blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
ade16edad2cbac40e9bacca1b0aba5e260577e2f | dfe925c32292ba1e054b86ea660546eb9eac921b | /example/gs/__init__.py | c371d57c6a673838f1d0eb1f56482200e99ebb74 | [] | no_license | keul/example.gs | bc64488d5e67492994b5a12a99d0fa64f1af87de | 5435e9f4fde66b810ff184c25e2dc26aa40900df | refs/heads/master | 2020-04-30T09:50:23.781896 | 2013-05-01T18:02:15 | 2013-05-01T18:02:15 | 9,789,567 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,012 | py | # -*- coding: utf8 -*-
import logging
from zope.i18nmessageid import MessageFactory
from example.gs import config
from example.gs.tool import FooTool
from Products.Archetypes import atapi
from Products.CMFCore import utils
logger = logging.getLogger('example.gs')
gsMessageFactory = MessageFactory('example.gs')
def initialize(context):
"""Initializer called when used as a Zope 2 product."""
content_types, constructors, ftis = atapi.process_types(
atapi.listTypes(config.PROJECTNAME),
config.PROJECTNAME)
for atype, constructor in zip(content_types, constructors):
utils.ContentInit('%s: %s' % (config.PROJECTNAME, atype.portal_type),
content_types=(atype, ),
permission=config.ADD_PERMISSIONS[atype.portal_type],
extra_constructors=(constructor,),
).initialize(context)
# utils.ToolInit("Foo Tool",
# tools=(FooTool,),
# icon="qm.gif",
# ).initialize(context)
| [
"[email protected]"
] | |
7714909e86d7cb824a84edc6d8ced3422f107600 | 54d17336ca03801bd9c9ef37be8642b332ab71c4 | /osm/SO/rwlaunchpad/plugins/rwautoscaler/rift/tasklets/rwautoscaler/engine.py | 3bd2645aeb390746fcbf31d24b4a18f0fad50d0f | [] | no_license | dennis-me/Pishahang | 2428379c4f7d3ee85df4b85727ce92e8fe69957a | cdd0abe80a76d533d08a51c7970d8ded06624b7d | refs/heads/master | 2020-09-07T12:35:54.734782 | 2020-01-24T20:11:33 | 2020-01-24T20:11:33 | 220,782,212 | 2 | 0 | null | 2019-11-10T11:46:44 | 2019-11-10T11:46:43 | null | UTF-8 | Python | false | false | 15,508 | py |
#
# Copyright 2016 RIFT.IO Inc
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import abc
import asyncio
import time
import numpy
from . import scaling_operation
from . import subscribers as monp_subscriber
from gi.repository import RwDts as rwdts
import rift.mano.dts as subscriber
class TimeSeries:
"""Convenience class to hold the data for the sliding window size.
"""
def __init__(self, threshold_time):
"""
Args:
threshold_time (int): window size in secs
"""
# 0 -> contains a list of all timestamps
# 1 -> contains a list of all values.
# self._series = numpy.empty(shape=(2, 1), dtype='int64')
self._series = numpy.array([[],[]], dtype='int64')
self.threshold_time = threshold_time
def add_value(self, timestamp, value):
timestamp = int(timestamp)
self._series = numpy.append(
self._series,
[[timestamp], [value]],
axis=1)
# Drop off stale value
# 0 -> timestamp
# 1 -> values
# Get all indexes that are outside the window, and drop them
window_values = self._series[0] >= (timestamp - self.threshold_time)
self._series = self._series[:, window_values]
def average(self):
return numpy.average(self._series[1])
def is_window_full(self):
"""Verify if there is sufficient data for the current window.
"""
if len(self._series[0]) < 2:
return False
start_time = self._series[0][0]
end_time = self._series[0][-1]
if (end_time - start_time) >= self.threshold_time:
return True
return False
class ScalingCriteria:
class Delegate:
"""Delegate: callbacks triggered by ScalingCriteris
"""
@abc.abstractmethod
def threshold_out_breached(self, criteria_name, avg_value):
"""Called when the value has crossed the scale-out-threshold
Args:
criteria_name (str): Criteria name
avg_value (float): The average value of the window.
"""
pass
@abc.abstractmethod
def threshold_in_breached(self, criteria_name, avg_value):
"""Called when the value has drops below the scale-in-threshold
Args:
criteria_name (str): Criteria name
avg_value (float): The average value of the window.
"""
pass
def __init__(
self,
log,
dts,
loop,
project,
nsr_id,
monp_id,
scaling_criteria,
window_size,
sampling_period=1,
delegate=None):
"""
Args:
log : Log
dts : DTS handle
loop : Event Handle
nsr_id (str): NSR ID
monp_id (str): Monitoring parameter
scaling_criteria : Yang data model
window_size (int): Length of the window
delegate : ScalingCriteria.Delegate
Note:
"""
self.log = log
self.dts = dts
self.loop = loop
self.sampling_period = sampling_period
self.window_size = window_size
self.delegate = delegate
self.nsr_id, self.monp_id = nsr_id, monp_id
self._scaling_criteria = scaling_criteria
self._timeseries = TimeSeries(self.window_size)
# Flag when set, triggers scale-in request.
self._scl_in_limit_enabled = False
self.nsr_monp_sub = monp_subscriber.NsrMonParamSubscriber(
self.log,
self.dts,
self.loop,
project,
self.nsr_id,
self.monp_id,
callback=self.add_value)
@property
def name(self):
return self._scaling_criteria.name
@property
def scale_in(self):
return self._scaling_criteria.scale_in_threshold
@property
def scale_out(self):
return self._scaling_criteria.scale_out_threshold
@asyncio.coroutine
def register(self):
yield from self.nsr_monp_sub.register()
def deregister(self):
self.nsr_monp_sub.deregister()
def trigger_action(self, timestamp, avg):
"""Triggers the scale out/in
Args:
timestamp : time in unix epoch
avg : Average of all the values in the window size.
"""
if self._timeseries.average() >= self.scale_out:
self.log.info("Triggering a scaling-out request for the criteria {}".format(
self.name))
self.delegate.threshold_out_breached(self.name, avg)
elif self._timeseries.average() < self.scale_in :
self.log.info("Triggering a scaling-in request for the criteria {}".format(
self.name))
self.delegate.threshold_in_breached(self.name, avg)
def add_value(self, monp, action):
"""Callback from NsrMonParamSubscriber
Args:
monp : Yang model
action : rwdts.QueryAction
"""
if action == rwdts.QueryAction.DELETE:
return
value = monp.value_integer
timestamp = time.time()
self._timeseries.add_value(timestamp, value)
if not self._timeseries.is_window_full():
return
self.log.debug("Sufficient sampling data obtained for criteria {}."
"Checking the scaling condition for the criteria".format(
self.name))
if not self.delegate:
return
self.trigger_action(timestamp, value)
class ScalingPolicy(ScalingCriteria.Delegate):
class Delegate:
@abc.abstractmethod
def scale_in(self, scaling_group_name, nsr_id, instance_id):
"""Delegate called when all the criteria for scaling-in are met.
Args:
scaling_group_name (str): Description
nsr_id (str): Description
"""
pass
@abc.abstractmethod
def scale_out(self, scaling_group_name, nsr_id):
"""Delegate called when all the criteria for scaling-out are met.
Args:
scaling_group_name (str): Description
nsr_id (str): Description
"""
pass
def __init__(
self,
log,
dts,
loop,
project,
nsr_id,
nsd_id,
scaling_group_name,
scaling_policy,
store,
delegate=None):
"""
Args:
log : Log
dts : DTS handle
loop : Event loop
nsr_id (str): NSR id
nsd_id (str): NSD id
scaling_group_name (str): Scaling group ref
scaling_policy : Yang model
store (SubscriberStore): Subscriber store instance
delegate (None, optional): ScalingPolicy.Delegate
"""
self.loop = loop
self.log = log
self.dts = dts
self.project = project
self.nsd_id = nsd_id
self.nsr_id = nsr_id
self.scaling_group_name = scaling_group_name
self._scaling_policy = scaling_policy
self.delegate = delegate
self.store = store
self.monp_sub = monp_subscriber.NsrMonParamSubscriber(
self.log,
self.dts,
self.loop,
self.project,
self.nsr_id,
callback=self.handle_nsr_monp)
self.nsr_scale_sub = monp_subscriber.NsrScalingGroupRecordSubscriber(
self.log,
self.dts,
self.loop,
self.project,
self.nsr_id,
self.scaling_group_name)
self.criteria_store = {}
# Timestamp at which the scale-in/scale-out request was generated.
self._last_triggered_time = None
self.scale_in_status = {cri.name: False for cri in self.scaling_criteria}
self.scale_out_status = {cri.name: False for cri in self.scaling_criteria}
self.scale_out_count = 0
def get_nsd_monp_cfg(self, nsr_monp):
"""Get the NSD's mon-param config.
"""
nsd = self.store.get_nsd(self.nsd_id)
for monp in nsd.monitoring_param:
if monp.id == nsr_monp.nsd_mon_param_ref:
return monp
def handle_nsr_monp(self, monp, action):
"""Callback for NSR mon-param handler.
Args:
monp : Yang Model
action : rwdts.QueryAction
"""
def handle_create():
if monp.id in self.criteria_store:
return
nsd_monp = self.get_nsd_monp_cfg(monp)
for cri in self.scaling_criteria:
if cri.ns_monitoring_param_ref != nsd_monp.id:
continue
# Create a criteria object as soon as the first monitoring data
# is published.
self.log.debug("Created a ScalingCriteria monitor for {}".format(
cri.as_dict()))
criteria = ScalingCriteria(
self.log,
self.dts,
self.loop,
self.project,
self.nsr_id,
monp.id,
cri,
self.threshold_time, # window size
delegate=self)
self.criteria_store[monp.id] = criteria
@asyncio.coroutine
def task():
yield from criteria.register()
self.loop.create_task(task())
def handle_delete():
if monp.id in self.criteria_store:
self.criteria_store[monp.id].deregister()
del self.criteria_store[monp.id]
if action in [rwdts.QueryAction.CREATE, rwdts.QueryAction.UPDATE]:
handle_create()
elif action == rwdts.QueryAction.DELETE:
handle_delete()
@property
def scaling_criteria(self):
return self._scaling_policy.scaling_criteria
@property
def scale_in_op(self):
optype = self._scaling_policy.scale_in_operation_type
return scaling_operation.get_operation(optype)
@property
def scale_out_op(self):
optype = self._scaling_policy.scale_out_operation_type
return scaling_operation.get_operation(optype)
@property
def name(self):
return self._scaling_policy.name
@property
def threshold_time(self):
return self._scaling_policy.threshold_time
@property
def cooldown_time(self):
return self._scaling_policy.cooldown_time
@asyncio.coroutine
def register(self):
yield from self.monp_sub.register()
yield from self.nsr_scale_sub.register()
def deregister(self):
self.monp_sub.deregister()
def _is_in_cooldown(self):
"""Verify if the current policy is in cooldown.
"""
if not self._last_triggered_time:
return False
if (time.time() - self._last_triggered_time) >= self.cooldown_time:
return False
return True
def can_trigger_action(self):
if self._is_in_cooldown():
self.log.debug("In cooldown phase ignoring the scale action ")
return False
return True
def threshold_in_breached(self, criteria_name, value):
"""Delegate callback when scale-in threshold is breached
Args:
criteria_name : Criteria name
value : Average value
"""
self.log.debug("Avg value {} has fallen below the threshold limit for "
"{}".format(value, criteria_name))
if not self.can_trigger_action():
return
if self.scale_out_count < 1:
self.log.debug('There is no scaled-out VNFs at this point. Hence ignoring the scale-in')
return
self.scale_in_status[criteria_name] = True
self.log.info("Applying {} operation to check if all criteria {} for"
" scale-in-threshold are met".format(
self.scale_out_op,
self.scale_out_status))
statuses = self.scale_in_status.values()
is_breached = self.scale_in_op(statuses)
if is_breached and self.delegate:
self.log.info("Triggering a scale-in action for policy {} as "
"all criteria have been met".format(self.name))
@asyncio.coroutine
def check_and_scale_in():
# data = yield from self.nsr_scale_sub.data()
# if len(data) <= 1:
# return
# # Get an instance ID
# instance_id = data[-1].instance_id
instance_id = 0 #assigning a value to follow existing scale_in signature
self._last_triggered_time = time.time()
self.scale_out_count -= 1
# Reset all statuses
self.scale_in_status = {cri.name: False for cri in self.scaling_criteria}
self.delegate.scale_in(self.scaling_group_name, self.nsr_id, instance_id)
self.loop.create_task(check_and_scale_in())
def threshold_out_breached(self, criteria_name, value):
"""Delegate callback when scale-out threshold is breached.
Args:
criteria_name : Criteria name
value : Average value
"""
self.log.debug("Avg value {} has gone above the threshold limit for "
"{}".format(value, criteria_name))
if not self.can_trigger_action():
return
self.scale_out_status[criteria_name] = True
self.log.info("Applying {} operation to check if all criteria {} for"
" scale-out-threshold are met".format(
self.scale_out_op,
self.scale_out_status))
statuses = self.scale_out_status.values()
is_breached = self.scale_out_op(statuses)
if is_breached and self.delegate:
self.log.info("Triggering a scale-out action for policy {} as "
"all criteria have been met".format(self.name))
self._last_triggered_time = time.time()
self.scale_out_count += 1
# Reset all statuses
self.scale_out_status = {cri.name: False for cri in self.scaling_criteria}
self.delegate.scale_out(self.scaling_group_name, self.nsr_id)
| [
"[email protected]"
] | |
9dac850c79526f3beabdcec45af6f4746838cae8 | 8890ff61262ff98369464721c165d53aa9febe85 | /oauthlib/oauth2/draft25/parameters.py | 4343dcb259213b64ac4a8ce3151d29588e82f6c1 | [
"Apache-2.0"
] | permissive | Mause/tumblr_conn | 09e91bb86e6310ac3f9b0be292967283990558ea | e0ac78947355e41a8432a2a3e12fb86fb28a4c72 | refs/heads/master | 2022-06-15T05:30:27.737676 | 2013-04-29T20:19:57 | 2013-04-29T20:19:57 | 9,258,639 | 0 | 0 | null | 2022-05-17T03:16:49 | 2013-04-06T10:52:39 | JavaScript | UTF-8 | Python | false | false | 11,616 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
"""
oauthlib.oauth2_draft28.parameters
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
This module contains methods related to `Section 4`_ of the OAuth 2 draft.
.. _`Section 4`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-4
"""
import json
try:
import urlparse
except ImportError:
import urllib.parse as urlparse
from oauthlib.common import add_params_to_uri, add_params_to_qs, unicode_type
from .errors import raise_from_error, MissingTokenError, MissingTokenTypeError
from .errors import MismatchingStateError, MissingCodeError
from .errors import InsecureTransportError
from .utils import list_to_scope, scope_to_list
def prepare_grant_uri(uri, client_id, response_type, redirect_uri=None,
scope=None, state=None, **kwargs):
"""Prepare the authorization grant request URI.
The client constructs the request URI by adding the following
parameters to the query component of the authorization endpoint URI
using the "application/x-www-form-urlencoded" format as defined by
[W3C.REC-html401-19991224]:
response_type
REQUIRED. Value MUST be set to "code".
client_id
REQUIRED. The client identifier as described in `Section 2.2`_.
redirect_uri
OPTIONAL. As described in `Section 3.1.2`_.
scope
OPTIONAL. The scope of the access request as described by
`Section 3.3`_.
state
RECOMMENDED. An opaque value used by the client to maintain
state between the request and callback. The authorization
server includes this value when redirecting the user-agent back
to the client. The parameter SHOULD be used for preventing
cross-site request forgery as described in `Section 10.12`_.
GET /authorize?response_type=code&client_id=s6BhdRkqt3&state=xyz
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb HTTP/1.1
Host: server.example.com
.. _`W3C.REC-html401-19991224`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#ref-W3C.REC-html401-19991224
.. _`Section 2.2`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-2.2
.. _`Section 3.1.2`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.1.2
.. _`Section 3.3`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.3
.. _`section 10.12`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-10.12
"""
if not uri.startswith('https://'):
raise InsecureTransportError()
params = [(('response_type', response_type)),
(('client_id', client_id))]
if redirect_uri:
params.append(('redirect_uri', redirect_uri))
if scope:
params.append(('scope', list_to_scope(scope)))
if state:
params.append(('state', state))
for k in kwargs:
if kwargs[k]:
params.append((unicode_type(k), kwargs[k]))
return add_params_to_uri(uri, params)
def prepare_token_request(grant_type, body='', **kwargs):
"""Prepare the access token request.
The client makes a request to the token endpoint by adding the
following parameters using the "application/x-www-form-urlencoded"
format in the HTTP request entity-body:
grant_type
REQUIRED. Value MUST be set to "authorization_code".
code
REQUIRED. The authorization code received from the
authorization server.
redirect_uri
REQUIRED, if the "redirect_uri" parameter was included in the
authorization request as described in `Section 4.1.1`_, and their
values MUST be identical.
grant_type=authorization_code&code=SplxlOBeZQQYbYS6WxSbIA
&redirect_uri=https%3A%2F%2Fclient%2Eexample%2Ecom%2Fcb
.. _`Section 4.1.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-4.1.1
"""
params = [('grant_type', grant_type)]
if 'scope' in kwargs:
kwargs['scope'] = list_to_scope(kwargs['scope'])
for k in kwargs:
if kwargs[k]:
params.append((unicode_type(k), kwargs[k]))
return add_params_to_qs(body, params)
def parse_authorization_code_response(uri, state=None):
"""Parse authorization grant response URI into a dict.
If the resource owner grants the access request, the authorization
server issues an authorization code and delivers it to the client by
adding the following parameters to the query component of the
redirection URI using the "application/x-www-form-urlencoded" format:
code
REQUIRED. The authorization code generated by the
authorization server. The authorization code MUST expire
shortly after it is issued to mitigate the risk of leaks. A
maximum authorization code lifetime of 10 minutes is
RECOMMENDED. The client MUST NOT use the authorization code
more than once. If an authorization code is used more than
once, the authorization server MUST deny the request and SHOULD
revoke (when possible) all tokens previously issued based on
that authorization code. The authorization code is bound to
the client identifier and redirection URI.
state
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
For example, the authorization server redirects the user-agent by
sending the following HTTP response:
HTTP/1.1 302 Found
Location: https://client.example.com/cb?code=SplxlOBeZQQYbYS6WxSbIA
&state=xyz
"""
if not uri.lower().startswith('https://'):
raise InsecureTransportError()
query = urlparse.urlparse(uri).query
params = dict(urlparse.parse_qsl(query))
if not 'code' in params:
raise MissingCodeError("Missing code parameter in response.")
if state and params.get('state', None) != state:
raise MismatchingStateError()
return params
def parse_implicit_response(uri, state=None, scope=None):
"""Parse the implicit token response URI into a dict.
If the resource owner grants the access request, the authorization
server issues an access token and delivers it to the client by adding
the following parameters to the fragment component of the redirection
URI using the "application/x-www-form-urlencoded" format:
access_token
REQUIRED. The access token issued by the authorization server.
token_type
REQUIRED. The type of the token issued as described in
Section 7.1. Value is case insensitive.
expires_in
RECOMMENDED. The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
scope
OPTIONAL, if identical to the scope requested by the client,
otherwise REQUIRED. The scope of the access token as described
by Section 3.3.
state
REQUIRED if the "state" parameter was present in the client
authorization request. The exact value received from the
client.
HTTP/1.1 302 Found
Location: http://example.com/cb#access_token=2YotnFZFEjr1zCsicMWpAA
&state=xyz&token_type=example&expires_in=3600
"""
if not uri.lower().startswith('https://'):
raise InsecureTransportError()
fragment = urlparse.urlparse(uri).fragment
params = dict(urlparse.parse_qsl(fragment, keep_blank_values=True))
if 'scope' in params:
params['scope'] = scope_to_list(params['scope'])
if state and params.get('state', None) != state:
raise ValueError("Mismatching or missing state in params.")
validate_token_parameters(params, scope)
return params
def parse_token_response(body, scope=None):
"""Parse the JSON token response body into a dict.
The authorization server issues an access token and optional refresh
token, and constructs the response by adding the following parameters
to the entity body of the HTTP response with a 200 (OK) status code:
access_token
REQUIRED. The access token issued by the authorization server.
token_type
REQUIRED. The type of the token issued as described in
`Section 7.1`_. Value is case insensitive.
expires_in
RECOMMENDED. The lifetime in seconds of the access token. For
example, the value "3600" denotes that the access token will
expire in one hour from the time the response was generated.
If omitted, the authorization server SHOULD provide the
expiration time via other means or document the default value.
refresh_token
OPTIONAL. The refresh token which can be used to obtain new
access tokens using the same authorization grant as described
in `Section 6`_.
scope
OPTIONAL, if identical to the scope requested by the client,
otherwise REQUIRED. The scope of the access token as described
by `Section 3.3`_.
The parameters are included in the entity body of the HTTP response
using the "application/json" media type as defined by [`RFC4627`_]. The
parameters are serialized into a JSON structure by adding each
parameter at the highest structure level. Parameter names and string
values are included as JSON strings. Numerical values are included
as JSON numbers. The order of parameters does not matter and can
vary.
For example:
HTTP/1.1 200 OK
Content-Type: application/json;charset=UTF-8
Cache-Control: no-store
Pragma: no-cache
{
"access_token":"2YotnFZFEjr1zCsicMWpAA",
"token_type":"example",
"expires_in":3600,
"refresh_token":"tGzv3JOkF0XG5Qx2TlKWIA",
"example_parameter":"example_value"
}
.. _`Section 7.1`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-7.1
.. _`Section 6`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-6
.. _`Section 3.3`: http://tools.ietf.org/html/draft-ietf-oauth-v2-28#section-3.3
.. _`RFC4627`: http://tools.ietf.org/html/rfc4627
"""
params = json.loads(body)
if 'scope' in params:
params['scope'] = scope_to_list(params['scope'])
validate_token_parameters(params, scope)
return params
def validate_token_parameters(params, scope=None):
"""Ensures token precence, token type, expiration and scope in params."""
if 'error' in params:
raise_from_error(params.get('error'), params)
if not 'access_token' in params:
raise MissingTokenError(description="Missing access token parameter.")
if not 'token_type' in params:
raise MissingTokenTypeError()
# If the issued access token scope is different from the one requested by
# the client, the authorization server MUST include the "scope" response
# parameter to inform the client of the actual scope granted.
# http://tools.ietf.org/html/draft-ietf-oauth-v2-25#section-3.3
new_scope = params.get('scope', None)
scope = scope_to_list(scope)
if scope and new_scope and set(scope) != set(new_scope):
raise Warning("Scope has changed to %s." % new_scope)
| [
"[email protected]"
] | |
5d1ed16f74021d81791fe06902bd4b73932fecc5 | 4f9930e15c02cb9a09af70d66b794480b8c9bd57 | /batch2/batch/driver/instance.py | 364815e0ed0adbb5c8d315f693c218f81632683b | [
"MIT"
] | permissive | gsarma/hail | d76aa16d718618c1915b629077fd80cbc4d3b526 | 6aa2d945bb7d57c463d5ab9afb686f18c2941b25 | refs/heads/master | 2020-06-20T06:09:43.408615 | 2019-10-29T21:40:23 | 2019-10-29T21:40:23 | 196,250,453 | 0 | 0 | MIT | 2019-07-10T17:44:48 | 2019-07-10T17:44:47 | null | UTF-8 | Python | false | false | 10,380 | py | import time
import logging
import googleapiclient.errors
import asyncio
import aiohttp
log = logging.getLogger('instance')
class Instance:
@staticmethod
def from_record(inst_pool, record):
ip_address = record['ip_address']
pending = ip_address is None
active = ip_address is not None
deleted = False
inst = Instance(inst_pool, record['name'], record['token'],
ip_address=ip_address, pending=pending,
active=active, deleted=deleted)
inst_pool.free_cores_mcpu += inst_pool.worker_capacity_mcpu # FIXME: this should get cores from db in future
if active:
inst_pool.n_active_instances += 1
inst_pool.instances_by_free_cores.add(inst)
else:
assert pending
inst_pool.n_pending_instances += 1
log.info(f'added instance {inst.name} to the instance pool with ip address {inst.ip_address}')
return inst
@staticmethod
async def create(inst_pool, name, token):
# FIXME: maybe add machine type, cores, batch_image etc.
await inst_pool.driver.db.instances.new_record(name=name,
token=token)
inst_pool.n_pending_instances += 1
inst_pool.free_cores_mcpu += inst_pool.worker_capacity_mcpu
return Instance(inst_pool, name, token, ip_address=None, pending=True,
active=False, deleted=False)
def __init__(self, inst_pool, name, token, ip_address, pending, active, deleted):
self.inst_pool = inst_pool
self.name = name
self.token = token
self.ip_address = ip_address
self.lock = asyncio.Lock()
self.pods = set()
self.free_cores_mcpu = inst_pool.worker_capacity_mcpu
# state: pending, active, deactivated (and/or deleted)
self.pending = pending
self.active = active
self.deleted = deleted
self.healthy = True
self.last_updated = time.time()
self.time_created = time.time()
self.last_ping = time.time()
log.info(f'{self.inst_pool.n_pending_instances} pending {self.inst_pool.n_active_instances} active workers')
def unschedule(self, pod):
assert not self.pending and self.active
self.pods.remove(pod)
if self.healthy:
self.inst_pool.instances_by_free_cores.remove(self)
self.free_cores_mcpu += pod.cores_mcpu
self.inst_pool.free_cores_mcpu += pod.cores_mcpu
self.inst_pool.instances_by_free_cores.add(self)
self.inst_pool.driver.changed.set()
else:
self.free_cores_mcpu += pod.cores_mcpu
def schedule(self, pod):
assert not self.pending and self.active and self.healthy
self.pods.add(pod)
self.inst_pool.instances_by_free_cores.remove(self)
self.free_cores_mcpu -= pod.cores_mcpu
self.inst_pool.free_cores_mcpu -= pod.cores_mcpu
assert self.inst_pool.free_cores_mcpu >= 0, (self.inst_pool.free_cores_mcpu, pod.cores_mcpu)
self.inst_pool.instances_by_free_cores.add(self)
# can't create more scheduling opportunities, don't set changed
async def activate(self, ip_address):
async with self.lock:
log.info(f'activating instance {self.name} after {time.time() - self.time_created} seconds since creation')
if self.active:
return
if self.deleted:
return
if self.pending:
self.pending = False
self.inst_pool.n_pending_instances -= 1
self.inst_pool.free_cores_mcpu -= self.inst_pool.worker_capacity_mcpu
self.active = True
self.ip_address = ip_address
self.inst_pool.n_active_instances += 1
self.inst_pool.instances_by_free_cores.add(self)
self.inst_pool.free_cores_mcpu += self.inst_pool.worker_capacity_mcpu
self.inst_pool.driver.changed.set()
await self.inst_pool.driver.db.instances.update_record(
self.name, ip_address=ip_address)
log.info(f'{self.inst_pool.n_pending_instances} pending {self.inst_pool.n_active_instances} active workers')
async def deactivate(self):
async with self.lock:
log.info(f'deactivating instance {self.name}')
start = time.time()
if self.pending:
self.pending = False
self.inst_pool.n_pending_instances -= 1
self.inst_pool.free_cores_mcpu -= self.inst_pool.worker_capacity_mcpu
assert not self.active
log.info(f'{self.inst_pool.n_pending_instances} pending {self.inst_pool.n_active_instances} active workers')
return
if not self.active:
return
self.mark_as_unhealthy()
pod_list = list(self.pods)
await asyncio.gather(*[p.unschedule() for p in pod_list])
assert not self.pods
for pod in pod_list:
asyncio.ensure_future(pod.put_on_ready())
self.active = False
log.info(f'took {time.time() - start} seconds to deactivate {self.name}')
log.info(f'{self.inst_pool.n_pending_instances} pending {self.inst_pool.n_active_instances} active workers')
def update_timestamp(self):
if self in self.inst_pool.instances:
self.inst_pool.instances.remove(self)
self.last_updated = time.time()
self.inst_pool.instances.add(self)
def mark_as_unhealthy(self):
if not self.active or not self.healthy:
return
self.inst_pool.instances.remove(self)
self.healthy = False
self.inst_pool.instances.add(self)
if self in self.inst_pool.instances_by_free_cores:
self.inst_pool.instances_by_free_cores.remove(self)
self.inst_pool.n_active_instances -= 1
self.inst_pool.free_cores_mcpu -= self.free_cores_mcpu
self.update_timestamp()
def mark_as_healthy(self):
self.last_ping = time.time()
if not self.active or self.healthy:
return
self.inst_pool.instances.remove(self)
self.healthy = True
self.inst_pool.instances.add(self)
if self not in self.inst_pool.instances_by_free_cores:
self.inst_pool.n_active_instances += 1
self.inst_pool.instances_by_free_cores.add(self)
self.inst_pool.free_cores_mcpu += self.free_cores_mcpu
self.inst_pool.driver.changed.set()
async def remove(self):
log.info(f'removing instance {self.name}')
await self.deactivate()
self.inst_pool.instances.remove(self)
if self.token in self.inst_pool.token_inst:
del self.inst_pool.token_inst[self.token]
await self.inst_pool.driver.db.instances.delete_record(self.name)
async def handle_call_delete_event(self):
log.info(f'handling call delete event for {self.name}')
await self.deactivate()
self.deleted = True
self.update_timestamp()
async def delete(self):
log.info(f'deleting instance {self.name}')
if self.deleted:
return
await self.deactivate()
try:
await self.inst_pool.driver.gservices.delete_instance(self.name)
except googleapiclient.errors.HttpError as e:
if e.resp['status'] == '404':
log.info(f'instance {self.name} was already deleted')
else:
raise e
self.deleted = True
async def handle_preempt_event(self):
log.info(f'handling preemption event for {self.name}')
await self.delete()
self.update_timestamp()
async def heal(self):
log.info(f'healing instance {self.name}')
async def _heal_gce():
try:
spec = await self.inst_pool.driver.gservices.get_instance(self.name)
except googleapiclient.errors.HttpError as e:
if e.resp['status'] == '404':
await self.remove()
return
status = spec['status']
log.info(f'heal gce: machine {self.name} status {status}')
# preempted goes into terminated state
if status == 'TERMINATED' and self.deleted:
log.info(f'instance {self.name} is terminated and deleted, removing')
await self.remove()
return
if status in ('TERMINATED', 'STOPPING'):
log.info(f'instance {self.name} is {status}, deactivating')
await self.deactivate()
if status == 'TERMINATED' and not self.deleted:
log.info(f'instance {self.name} is {status} and not deleted, deleting')
await self.delete()
if status == 'RUNNING' and self.active and not self.healthy and time.time() - self.last_ping > 60 * 5:
log.info(f'instance {self.name} is {status} and not healthy and last ping was greater than 5 minutes, deleting')
await self.delete()
if (status in ('STAGING', 'RUNNING')) and not self.active and time.time() - self.time_created > 60 * 5:
log.info(f'instance {self.name} is {status} and not active and older than 5 minutes, deleting')
await self.delete()
self.update_timestamp()
if self.ip_address and self.active:
try:
async with aiohttp.ClientSession(
raise_for_status=True, timeout=aiohttp.ClientTimeout(total=5)) as session:
await session.get(f'http://{self.ip_address}:5000/healthcheck')
self.mark_as_healthy()
self.update_timestamp()
except asyncio.CancelledError: # pylint: disable=try-except-raise
raise
except Exception as err: # pylint: disable=broad-except
log.info(f'healthcheck failed for {self.name} due to err {err}; asking gce instead')
self.mark_as_unhealthy()
await _heal_gce()
else:
await _heal_gce()
def __str__(self):
return self.name
| [
"[email protected]"
] | |
db2b8203bfcc6e719473a13b065bcf0d51007f50 | b15fd3fa4431c3bc0e9098b8ece4cb1e3bb45d50 | /data_providers/downloader.py | ec29f6d09b6514f00c036b6841ea965efcc7c89b | [] | no_license | SoulDuck/DenseNet | 0cdbb86f0cb4a685585f562374c894c165b3459f | 96581dd8e2df973560cf69ff99da211e91af55bb | refs/heads/master | 2021-07-10T04:22:31.868745 | 2017-10-06T13:23:57 | 2017-10-06T13:23:57 | 105,623,435 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | import sys ,os
from urllib import urlretrieve
import tarfile
import zipfile
def report_download_progress(count , block_size , total_size):
pct_complete = float(count * block_size) / total_size
msg = "\r {0:1%} already downloader".format(pct_complete)
sys.stdout.write(msg)
sys.stdout.flush()
def download_data_url(url, download_dir):
filename = url.split('/')[-1]
file_path = os.path.join(download_dir , filename)
if not os.path.exists(file_path):
try:
os.makedirs(download_dir)
except Exception :
pass
print "Download %s to %s" %(url , file_path)
file_path , _ = urlretrieve(url=url,filename=file_path,reporthook=report_download_progress)
print file_path
print('\nExtracting files')
if file_path.endswith(".zip"):
zipfile.ZipFile(file=file_path , mode="r").extracall(download_dir)
elif file_path.endswith(".tar.gz" , ".tgz"):
tarfile.open(name=file_path , mode='r:gz').extractall(download_dir)
| [
"[email protected]"
] | |
2cd1a1a76fe6766a6854de9064bedf52a1da8564 | a2f9d55d686425c4b47ce150aa1a23ea933055cc | /crossposting/spawnprocess.py | 0fa69d71efbd3ebead59242be16e3f573bf5535b | [] | no_license | wd5/blombum | b31c581f2c36c220164901189be1ba95a8341e0e | fe11efb369fe2cec67af1e79bc8935a266df2f80 | refs/heads/master | 2020-12-25T02:23:30.297939 | 2010-06-29T10:03:31 | 2010-06-29T10:03:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 331 | py | #!/usr/bin/python
import subprocess
subprocess.Popen([
'/home/nide/code/kanobu/src/manage.py', 'rebuildindex', '--site_id', '4', '--parse', 'none'
])
subprocess.Popen([
'node', '/home/nide/code/blombum/crossposting/test.js'
], stdin = subprocess.PIPE).communicate('[{somevar: 1}, {somevar: 44}, {somevar: 22}]')
print 'kuku'
| [
"[email protected]"
] | |
51da8e312770d0a2581c84ac2ef664dca607d04f | 3d6bb3df9ca1d0de6f749b927531de0790aa2e1d | /full_segmentation_histogram_creator.py | 97bc397018dc6ce79e45c96098caf6d100fa396d | [] | no_license | standardgalactic/kuhner-python | da1d66a6d638a9a379ba6bae2affdf151f8c27c5 | 30b73554cc8bc9d532c8108b34dd1a056596fec7 | refs/heads/master | 2023-07-07T04:18:30.634268 | 2020-04-06T04:37:48 | 2020-04-06T04:37:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,715 | py | # -*- coding: utf-8 -*-
"""
Created on Thu Sep 1 12:43:28 2016
@author: lpsmith
"""
from __future__ import division
from os import walk
import lucianSNPLibrary as lsl
nsamples_min = 10 #Arbitrary value: minimum number of samples we require
data10_12 = []
data13_20 = []
data21_50 = []
data51_500 = []
data501_5000 = []
data5001_50000 = []
data50001_plus = []
dataall =[]
#fullseg_filenames = ["three_formal_cy_omni_mix3_b37RB.txt"]
fullseg_filenames = []
for (_, _, f) in walk("full_segmentation_output/"):
fullseg_filenames += f
break
discrepancies = open("full_segmentation_histograms/discrepancies.txt", "w")
for file in fullseg_filenames:
handle = open("full_segmentation_output/" + file, "r")
for line in handle:
(chr, start, end, pmean, pnmarkers, nmarkers, meanlog2r) = line.rstrip().split("\t")
if (chr=="chr"):
continue
if (pnmarkers != "?"):
pnmarkers = int(pnmarkers)
nmarkers = int(nmarkers)
if (pnmarkers != nmarkers):
print "Anomaly in", file, ": different nmarkers from partek vs. raw SNP data:"
print " ", line
line = file + "\t" + line
discrepancies.write(line)
if (nmarkers < nsamples_min):
continue
meanlog2r = float(meanlog2r)
dataall.append(meanlog2r)
if (nmarkers < 13):
data10_12.append(meanlog2r)
elif (nmarkers < 21):
data13_20.append(meanlog2r)
elif (nmarkers < 51):
data21_50.append(meanlog2r)
elif (nmarkers < 501):
data51_500.append(meanlog2r)
elif (nmarkers < 5001):
data501_5000.append(meanlog2r)
elif (nmarkers < 50001):
data5001_50000.append(meanlog2r)
elif (nmarkers < 500001):
data50001_plus.append(meanlog2r)
binwidth = 0.001
lsl.createPrintAndSaveHistogram(data10_12, "full_segmentation_histograms/data10_12.txt", binwidth)
lsl.createPrintAndSaveHistogram(data13_20, "full_segmentation_histograms/data13_20.txt", binwidth)
lsl.createPrintAndSaveHistogram(data21_50, "full_segmentation_histograms/data21_50.txt", binwidth)
lsl.createPrintAndSaveHistogram(data51_500, "full_segmentation_histograms/data51_500.txt", binwidth)
lsl.createPrintAndSaveHistogram(data501_5000, "full_segmentation_histograms/data501_5000.txt", binwidth)
lsl.createPrintAndSaveHistogram(data5001_50000, "full_segmentation_histograms/data5001_50000.txt", binwidth)
lsl.createPrintAndSaveHistogram(data50001_plus, "full_segmentation_histograms/data50001_plus.txt", binwidth)
lsl.createPrintAndSaveHistogram(dataall, "full_segmentation_histograms/dataall.txt", binwidth)
| [
"[email protected]"
] | |
87eb88d102f76159683c01cee2a711c5e2d9b455 | 3fbd26091ebbc13913f9c7be1aaf10d477c79536 | /week01/zuoye/maoyan_scrapy/.history/manyan/manyan/spiders/maoyan_20200628205729.py | 1a0055be6d8db820c15c61f6e0e0a0a3b3f37f7a | [] | no_license | shuncon/Python001-class01 | d28faf3d5d8e9ea4cee93bcae7143a26fd8c472e | df19758181cdaf37f30d4b518600fc4612590499 | refs/heads/master | 2022-11-13T19:31:27.019214 | 2020-07-10T14:58:25 | 2020-07-10T14:58:25 | 273,135,541 | 0 | 0 | null | 2020-06-18T03:46:56 | 2020-06-18T03:46:55 | null | UTF-8 | Python | false | false | 63 | py | import scrapy
class Maoyanspider(scrapy.Spider):
name= '' | [
"[email protected]"
] | |
7f83aa1abe81599af869a8bf8a594a065d07480b | cbf407a1c9f18a6d0b94099586c59f1422933bb5 | /tensorflow_probability/python/distributions/joint_distribution_test.py | 0c69d5acd7131ced38a4030055c85676aa8c2225 | [
"Apache-2.0"
] | permissive | idofr/probability | eef7db7709e0309a6a132d6ce5e2423ae5f9ff17 | c8fa646de563cc3ddc9b375538bf5b613a318a46 | refs/heads/master | 2020-05-05T04:05:34.757494 | 2019-04-04T21:09:57 | 2019-04-04T21:10:45 | 179,697,040 | 1 | 0 | null | 2019-04-05T14:26:21 | 2019-04-05T14:26:21 | null | UTF-8 | Python | false | false | 10,474 | py | # Copyright 2018 The TensorFlow Probability Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""Tests for the JointDistributionSequential."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# Dependency imports
from absl.testing import parameterized
import tensorflow as tf
import tensorflow_probability as tfp
from tensorflow_probability.python.internal import test_util as tfp_test_util
from tensorflow.python.framework import test_util # pylint: disable=g-direct-tensorflow-import
tfd = tfp.distributions
@test_util.run_all_in_graph_and_eager_modes
class JointDistributionSequentialTest(tf.test.TestCase, parameterized.TestCase):
def test_sample_log_prob(self):
d = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[100, 120]), 1),
lambda e: tfd.Gamma(concentration=e[..., 0], rate=e[..., 1]),
tfd.Normal(loc=0, scale=2.),
tfd.Normal, # Or, `lambda loc, scale: tfd.Normal(loc, scale)`.
lambda m: tfd.Sample(tfd.Bernoulli(logits=m), 12),
],
validate_args=True)
self.assertEqual(
(
('e', ()),
('scale', ('e',)),
('loc', ()),
('m', ('loc', 'scale')),
('x', ('m',)),
),
d._resolve_graph())
xs = d.sample(seed=tfp_test_util.test_seed())
self.assertLen(xs, 5)
# We'll verify the shapes work as intended when we plumb these back into the
# respective log_probs.
ds, _ = d.sample_distributions(value=xs)
self.assertLen(ds, 5)
self.assertIsInstance(ds[0], tfd.Independent)
self.assertIsInstance(ds[1], tfd.Gamma)
self.assertIsInstance(ds[2], tfd.Normal)
self.assertIsInstance(ds[3], tfd.Normal)
self.assertIsInstance(ds[4], tfd.Sample)
# Static properties.
self.assertAllEqual(
[tf.float32, tf.float32, tf.float32, tf.float32, tf.int32],
d.dtype)
for expected, actual_tensorshape, actual_shapetensor in zip(
[[2], [], [], [], [12]],
d.event_shape,
self.evaluate(d.event_shape_tensor())):
self.assertAllEqual(expected, actual_tensorshape)
self.assertAllEqual(expected, actual_shapetensor)
for expected, actual_tensorshape, actual_shapetensor in zip(
[[], [], [], []],
d.batch_shape,
self.evaluate(d.batch_shape_tensor())):
self.assertAllEqual(expected, actual_tensorshape)
self.assertAllEqual(expected, actual_shapetensor)
expected_jlp = sum(d_.log_prob(x) for d_, x in zip(ds, xs))
actual_jlp = d.log_prob(xs)
self.assertAllEqual(*self.evaluate([expected_jlp, actual_jlp]))
def test_kl_divergence(self):
d0 = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[100, 120]), 1),
tfd.Normal(loc=0, scale=2.),
],
validate_args=True)
d1 = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[10, 12]), 1),
tfd.Normal(loc=1, scale=1.),
],
validate_args=True)
expected_kl = sum(tfd.kl_divergence(d0_, d1_) for d0_, d1_
in zip(d0.distribution_fn, d1.distribution_fn))
actual_kl = tfd.kl_divergence(d0, d1)
other_actual_kl = d0.kl_divergence(d1)
expected_kl_, actual_kl_, other_actual_kl_ = self.evaluate([
expected_kl, actual_kl, other_actual_kl])
self.assertNear(expected_kl_, actual_kl_, err=1e-5)
self.assertNear(expected_kl_, other_actual_kl_, err=1e-5)
def test_cross_entropy(self):
d0 = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[100, 120]), 1),
tfd.Normal(loc=0, scale=2.),
],
validate_args=True)
d1 = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[10, 12]), 1),
tfd.Normal(loc=1, scale=1.),
],
validate_args=True)
expected_xent = sum(
d0_.cross_entropy(d1_) for d0_, d1_
in zip(d0.distribution_fn, d1.distribution_fn))
actual_xent = d0.cross_entropy(d1)
expected_xent_, actual_xent_ = self.evaluate([expected_xent, actual_xent])
self.assertNear(actual_xent_, expected_xent_, err=1e-5)
def test_norequired_args_maker(self):
"""Test that only non-default args are passed through."""
d = tfd.JointDistributionSequential([tfd.Normal(0., 1.), tfd.Bernoulli])
with self.assertRaisesWithPredicateMatch(
ValueError, 'Must pass probs or logits, but not both.'):
d.sample()
def test_graph_resolution(self):
d = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[100, 120]), 1),
lambda e: tfd.Gamma(concentration=e[..., 0], rate=e[..., 1]),
tfd.HalfNormal(2.5),
lambda s: tfd.Normal(loc=0, scale=s),
tfd.Exponential(2),
lambda df, loc, _, scale: tfd.StudentT(df, loc, scale),
],
validate_args=True)
self.assertEqual(
(('e', ()),
('scale', ('e',)),
('s', ()),
('loc', ('s',)),
('df', ()),
('x', ('df', 'loc', '_', 'scale'))),
d._resolve_graph())
@parameterized.parameters('mean', 'mode', 'stddev', 'variance')
def test_summary_statistic(self, attr):
d = tfd.JointDistributionSequential(
[tfd.Normal(0., 1.), tfd.Bernoulli(logits=0.)],
validate_args=True)
expected = tuple(getattr(d_, attr)() for d_ in d.distribution_fn)
actual = getattr(d, attr)()
self.assertAllEqual(*self.evaluate([expected, actual]))
@parameterized.parameters(('covariance',))
def test_notimplemented_summary_statistic(self, attr):
d = tfd.JointDistributionSequential([tfd.Normal(0., 1.), tfd.Bernoulli],
validate_args=True)
with self.assertRaisesWithPredicateMatch(
NotImplementedError,
attr + ' is not implemented: JointDistributionSequential'):
getattr(d, attr)()
@parameterized.parameters(
'quantile', 'log_cdf', 'cdf',
'log_survival_function', 'survival_function',
)
def test_notimplemented_evaluative_statistic(self, attr):
d = tfd.JointDistributionSequential([tfd.Normal(0., 1.), tfd.Bernoulli],
validate_args=True)
with self.assertRaisesWithPredicateMatch(
NotImplementedError,
attr + ' is not implemented: JointDistributionSequential'):
getattr(d, attr)([0.]*len(d.distribution_fn))
def test_copy(self):
pgm = [tfd.Normal(0., 1.), tfd.Bernoulli]
d = tfd.JointDistributionSequential(pgm, validate_args=True)
d_copy = d.copy()
self.assertAllEqual(
{'distribution_fn': pgm,
'validate_args': True,
'name': None},
d_copy.parameters)
def test_batch_slicing(self):
d = tfd.JointDistributionSequential(
[
tfd.Exponential(rate=[10, 12, 14]),
lambda s: tfd.Normal(loc=0, scale=s),
lambda: tfd.Beta(concentration0=[3, 2, 1], concentration1=1),
],
validate_args=True)
d0, d1 = d[:1], d[1:]
x0 = d0.sample(seed=tfp_test_util.test_seed())
x1 = d1.sample(seed=tfp_test_util.test_seed())
self.assertLen(x0, 3)
self.assertEqual([1], x0[0].shape)
self.assertEqual([1], x0[1].shape)
self.assertEqual([1], x0[2].shape)
self.assertLen(x1, 3)
self.assertEqual([2], x1[0].shape)
self.assertEqual([2], x1[1].shape)
self.assertEqual([2], x1[2].shape)
def test_sample_shape_propagation_default_behavior(self):
d = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[100, 120]), 1),
lambda e: tfd.Gamma(concentration=e[..., 0], rate=e[..., 1]),
tfd.HalfNormal(2.5),
lambda s: tfd.Normal(loc=0, scale=s),
tfd.Exponential(2),
lambda df, loc, _, scale: tfd.StudentT(df, loc, scale),
],
validate_args=True)
x = d.sample([2, 3], seed=tfp_test_util.test_seed())
self.assertLen(x, 6)
self.assertEqual((2, 3, 2), x[0].shape)
self.assertEqual((2, 3), x[1].shape)
self.assertEqual((2, 3), x[2].shape)
self.assertEqual((2, 3), x[3].shape)
self.assertEqual((2, 3), x[4].shape)
self.assertEqual((2, 3), x[5].shape)
lp = d.log_prob(x)
self.assertEqual((2, 3), lp.shape)
def test_sample_shape_propagation_nondefault_behavior(self):
d = tfd.JointDistributionSequential(
[
tfd.Independent(tfd.Exponential(rate=[100, 120]), 1), # 0
lambda e: tfd.Gamma(concentration=e[..., 0], rate=e[..., 1]), # 1
tfd.HalfNormal(2.5), # 2
lambda s: tfd.Normal(loc=0, scale=s), # 3
tfd.Exponential(2), # 4
lambda df, loc, _, scale: tfd.StudentT(df, loc, scale), # 5
],
validate_args=False) # So log_prob doesn't complain.
# The following enables the nondefault sample shape behavior.
d._always_use_specified_sample_shape = True
sample_shape = (2, 3)
x = d.sample(sample_shape, seed=tfp_test_util.test_seed())
self.assertLen(x, 6)
self.assertEqual(sample_shape + (2,), x[0].shape)
self.assertEqual(sample_shape * 2, x[1].shape) # Has 1 arg.
self.assertEqual(sample_shape * 1, x[2].shape) # Has 0 args.
self.assertEqual(sample_shape * 2, x[3].shape) # Has 1 arg.
self.assertEqual(sample_shape * 1, x[4].shape) # Has 0 args.
# Has 3 args, one being scalar.
self.assertEqual(sample_shape * 3, x[5].shape)
lp = d.log_prob(x)
self.assertEqual(sample_shape * 3, lp.shape)
if __name__ == '__main__':
tf.test.main()
| [
"[email protected]"
] | |
a7cb86c2e4cbd4332442225c33eccf63b66b7f00 | de4e1332950d37707620c54a9613258c1dd9489c | /dongyeop/4주차/주식가격.py | 66a1fd2587ee46a60471b010445f940fe0c01ebf | [] | no_license | PnuLikeLion9th/Summer_algorithm | 8fe74066b9673fb891b7205f75f808a04c7fe750 | dcfcb6325854b3b4c529451d5c6b162298b53bc1 | refs/heads/master | 2023-07-10T13:57:05.511432 | 2021-08-15T07:50:00 | 2021-08-15T07:50:00 | 378,679,514 | 3 | 10 | null | 2021-08-15T07:50:01 | 2021-06-20T15:32:18 | Python | UTF-8 | Python | false | false | 943 | py | # 브루트포스
# def solution(prices):
# answer=[0]*len(prices)
# for i in range(len(prices)):
# for j in range(i+1,len(prices)):
# if prices[i] <=prices[j]:
# answer[i]+=1
# else:
# answer[i]+=1
# break
# return answer
def solution(prices):#스택
length = len(prices)
answer=[0]*length
stack = list()
for i,price in enumerate(prices):#가격들의 인덱스 값과 가격
while stack and price<prices[stack[-1]]:#스택이 존재하고 현재값이 더 작으면
index=stack.pop()#스택에서 빼주고
answer[index]=i-index#현재 인덱스와 스택에 담겼던 녀석의 인덱스를 빼면 시간임
stack.append(i)
while stack:#반복문이 다돌고 아직 남아있는 스택을 비워준다.
index=stack.pop()
answer[index] = length-index-1
return answer | [
"[email protected]"
] | |
a9959f969e1eb4d2abb88b4c50c283f909536ea4 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/H/hanse/vol_essen.py | b2d4485b5664a460fc906ebcf35661445fb64799 | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,428 | py | import scraperwiki
import lxml.html
pageCounter = 1
while True:
page = scraperwiki.scrape("http://essen.vol.at/welcome.asp?page=%d" % (pageCounter))
root = lxml.html.fromstring(page)
for entry in root.cssselect('div[class="Entry"]'):
data={
"Name":entry.cssselect('div[class="CompanyName"]')[0].text_content(),
"Street": entry.cssselect('div[class="CompanyStreet"]')[0].text_content(),
"City" : entry.cssselect('div[class="CompanyPlace"]')[0].text_content()
}
scraperwiki.sqlite.save(unique_keys=["Name"], data=data)
if root.cssselect('a[class="Next"]'):
pageCounter=pageCounter+1
else:
break import scraperwiki
import lxml.html
pageCounter = 1
while True:
page = scraperwiki.scrape("http://essen.vol.at/welcome.asp?page=%d" % (pageCounter))
root = lxml.html.fromstring(page)
for entry in root.cssselect('div[class="Entry"]'):
data={
"Name":entry.cssselect('div[class="CompanyName"]')[0].text_content(),
"Street": entry.cssselect('div[class="CompanyStreet"]')[0].text_content(),
"City" : entry.cssselect('div[class="CompanyPlace"]')[0].text_content()
}
scraperwiki.sqlite.save(unique_keys=["Name"], data=data)
if root.cssselect('a[class="Next"]'):
pageCounter=pageCounter+1
else:
break | [
"[email protected]"
] | |
bda191301750ca690fb5cac1d9f9abe3f859c48c | b773ca4e5f4a8642149316d3aded4c8b1e6037d2 | /sprint-challenge/aq_dashboard.py | e05d226a6c975acfb3676de3141310ccde108ea6 | [
"MIT"
] | permissive | echiyembekeza/DS-Unit-3-Sprint-3-Productization-and-Cloud | c2157e9078ec49b1f59d28220146a197dda3b25c | 64958ae8e9d2310d6c72606109a6ccf456bc5949 | refs/heads/master | 2020-08-04T18:39:27.405320 | 2019-12-11T03:11:28 | 2019-12-11T03:11:28 | 212,239,896 | 0 | 0 | MIT | 2019-10-02T02:27:48 | 2019-10-02T02:27:48 | null | UTF-8 | Python | false | false | 1,662 | py | """OpenAQ Air Quality Dashboard with Flask."""
from flask import Flask, request
from flask_sqlalchemy import SQLAlchemy
from decouple import config
from os import getenv
import openaq
APP = Flask(__name__)
APP.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite:///db.sqlite3'
DB = SQLAlchemy(APP)
API = openaq.OpenAQ()
mment = API.measurements(city='Los Angeles', parameter='pm25')
body = mment[1]
def LAquery(k):
LAresults = body['results']
values = []
for k in LAresults:
kvalue = k.get('value')
kdate = k.get('date')
kutc = kdate.get('utc')
values.append((kvalue, kutc))
return values
class Record(DB.Model):
id = DB.Column(DB.Integer, primary_key=True)
datetime = DB.Column(DB.String(25))
value = DB.Column(DB.Float, nullable=False)
def __repr__(self):
return f"<id={self.id}, datetime={self.datetime}, value={self.value}>"
@APP.route('/')
def root():
"""Base view."""
records = Record.query.filter(Record.value>=10).all()
res=''
for rec in records:
res += 'datetime = '+ rec.datetime
res += ", "
res += 'value = '+ str(rec.value)
res += '</br>'
return res
@APP.route('/refresh')
def refresh():
"""Pull fresh data from Open AQ and replace existing data."""
DB.drop_all()
DB.create_all()
API_items = body['results']
for i in API_items:
ivalue = i.get('value')
idate = i.get('date')
iutc = idate.get('utc')
db_item = (Record(datetime=iutc, value=ivalue))
DB.session.add(db_item)
DB.session.commit()
return 'Data refreshed!'
if __name__ == "__main__":
APP.run()
| [
"[email protected]"
] | |
695d0d073402440740fc8500b7e5f345c02b68c8 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Projects/Learn/PyCharm/Introduction to Python/Condition expressions/Boolean operators/tests.py | 470706706d0ff91198294d3ffe34a83d348960e6 | [
"LicenseRef-scancode-other-permissive"
] | permissive | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 128 | py | version https://git-lfs.github.com/spec/v1
oid sha256:539e49395fbd7e0e4b30294456efe7d922a26823d5ac4c0eea6d348bb18cfba3
size 413
| [
"[email protected]"
] | |
318a74534f2ec00ecb9d3d2d90042ac5ad963a45 | 56a7dd75f2a3f45d599ca89aaa9ca45390fbd546 | /ejercicios_preparcialito/parcialito_2/diccionarios/ejercicio_62.py | 28412a76c350229698e62a66e240a63a6c3ce189 | [] | no_license | facundoPri/algoritmo-programacion-i-essaya | e030d74de832b7642ff84a77212f8ea429d560d8 | 5ff7a8fc66f6683d47bc9faf80a35f9902b1e1a3 | refs/heads/master | 2023-04-07T01:04:10.221473 | 2021-04-10T13:05:59 | 2021-04-10T13:05:59 | 299,450,415 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | """
Escribir una función que reciba una cadena y devuelva un diccionario cuyas claves sean las letras y cuyos valores sean la cantidad de apariciones de dicha letra. Por ejemplo, si recibe 'catamarca' debe devolver: {'c':2, 'a':4, 't':1, 'r':1, 'm':1}.
"""
def contar_caracteres(cadena):
"""
Recibe una cadena
Devuelve un diccionarion con la cantidad de veces que aparece cada caracter
"""
contador = {}
for letra in cadena:
contador[letra] = contador.get(letra, 0) + 1
return contador
print(contar_caracteres("facundo"))
| [
"[email protected]"
] | |
ec8498ae54869540f229014677d6853284fde9fc | d4c67b2a12e990d4193e7ab06f04824a348067bf | /rl_trainer/ddpg_impl/flower/actor_critic/tf_ddpg_agent.py | 68c2dcbba15a6708b6789a492d9ba35ba24c020f | [
"BSD-3-Clause"
] | permissive | Roboy/nips-2018-ai-for-prosthetics | 2d57688ce85126379793e8643cbf0030c8f56beb | acb69f267a0cc852842828edbbfb47d1840c0a17 | refs/heads/master | 2020-03-26T05:39:25.565440 | 2018-11-01T23:28:08 | 2018-11-01T23:28:08 | 144,567,613 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,073 | py | import numpy as np
from typing import Callable, Collection
import tensorflow as tf
from gym.spaces import Box
from overrides import overrides
from typeguard import typechecked
from rl_trainer.agent import GymAgent
from rl_trainer.agent.replay_buffer import ReplayBuffer, InMemoryReplayBuffer
from rl_trainer.commons import Episode, ExperienceTupleBatch
from rl_trainer.ddpg_impl.flower.actor_critic.tf_model_saver import TFModelSaver
from .action_noise import OrnsteinUhlenbeckActionNoise
from .q_network import OnlineQNetwork
from .policy_network import OnlinePolicyNetwork
class TensorFlowDDPGAgent(GymAgent):
def __init__(self, state_dim: int, action_space: Box, sess: tf.Session = None,
gamma: float = 0.99, replay_buffer: ReplayBuffer = None,
actor_noise: Callable = None, tau: float = 0.001,
critic_nn: OnlineQNetwork = None, actor_nn: OnlinePolicyNetwork = None,
tf_model_saver: TFModelSaver = None):
action_dim = action_space.shape[0]
self._gamma = gamma
self._sess = sess if sess else tf.Session()
self._Q = critic_nn if critic_nn else OnlineQNetwork(
sess=self._sess, state_dim=state_dim, action_dim=action_dim)
self._Qʹ = self._Q.create_target_network(tau=tau)
self._μ = actor_nn if actor_nn else OnlinePolicyNetwork(
action_bound=action_space.high, sess=self._sess,
state_dim=state_dim, action_dim=action_dim, action_space=action_space)
self._μʹ = self._μ.create_target_network(tau=tau)
with self._sess.graph.as_default():
self._model_saver = tf_model_saver if tf_model_saver else TFModelSaver()
self._sess.run(tf.global_variables_initializer())
self._actor_noise = actor_noise if actor_noise else OrnsteinUhlenbeckActionNoise(
mu=np.zeros(action_dim))
self._replay_buffer = replay_buffer if replay_buffer else InMemoryReplayBuffer()
self.episode_max_q = 0
self._update_target_nets()
def _update_target_nets(self):
self._μʹ.update()
self._Qʹ.update()
@typechecked
@overrides
def act(self, current_state: Collection[float]):
if self._replay_buffer.has_sufficient_samples():
self._train()
s = np.array([current_state]) # pack single state into tf action batch
action = self._μ(s=s)
return action[0] + self._actor_noise() # unpack tf batch shape
def _train(self):
batch = self._replay_buffer.sample_batch()
self._train_critic(batch)
self._train_actor(batch)
self._update_target_nets()
@typechecked
def _train_critic(self, batch: ExperienceTupleBatch) -> None:
μʹ = self._μʹ
γ = self._gamma
s2 = np.array(batch.states_2)
dones = batch.states_2_are_terminal
Qs_s2 = self._Qʹ(s=s2, a=μʹ(s=s2))
yᵢ = [(r + (1-done)*γ*Q_s2) for r, done, Q_s2 in zip(batch.rewards, dones, Qs_s2)]
yᵢ = np.array(yᵢ).reshape((-1, 1))
s = np.array(batch.states_1)
a = np.array(batch.actions)
self._Q.train(s=s, a=a, y_i=yᵢ)
self._log_max_q(batch=batch)
@typechecked
def _train_actor(self, batch: ExperienceTupleBatch) -> None:
"""Update the actor policy using the sampled gradient"""
s = np.array(batch.states_1)
μ = self._μ
grads_a = self._Q.grads_a(s=s, a=μ(s))
assert len(grads_a) == 1
μ.train(s=s, grads_a=grads_a[0]) # unpack tf batch shape
@typechecked
def _log_max_q(self, batch: ExperienceTupleBatch):
s, a = batch.states_1, batch.actions
q_vals = self._Q(s=s, a=a)
self.episode_max_q = np.amax(q_vals)
@typechecked
@overrides
def observe_episode(self, episode: Episode):
self._replay_buffer.extend(episode.experience_tuples)
self._model_saver.step(self._sess)
@typechecked
@overrides
def set_seed(self, seed: int):
tf.set_random_seed(seed)
| [
"[email protected]"
] | |
e1c04602eb11935c3019f76fedd8f5debbf6c2c4 | b9c4c4e2ba9a54cf79169bb2c43e29b6994618f4 | /source/webapp/models.py | 5d3cd21c50b5fbf0b491d7e211c3065189b6e5ec | [] | no_license | big-arturka/exam_9 | 37bf8be08e0fd922bf36b0663babd4611d1ffb04 | 3505e39d9e2110c2912fc7f474e6ec297a8df4dd | refs/heads/master | 2022-12-31T14:06:53.576579 | 2020-10-24T13:24:24 | 2020-10-24T13:24:24 | 306,677,149 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,384 | py | from django.contrib.auth import get_user_model
from django.contrib.auth.models import User
from django.db import models
class Photo(models.Model):
image = models.ImageField(upload_to='images', verbose_name='Фото')
signature = models.CharField(max_length=200, verbose_name='Подпись')
created_at = models.DateTimeField(auto_now_add=True, verbose_name='Дата создания')
author = models.ForeignKey(get_user_model(), max_length=50, verbose_name='Автор',
related_name='image_author', on_delete=models.CASCADE)
def fav_by(self, user):
favs = self.favorite_photo.filter(author=user)
return favs
def __str__(self):
return f'{self.signature}-{self.author}'
class Meta:
verbose_name = 'Изображение'
verbose_name_plural = 'Изображения'
class Favorites(models.Model):
photo = models.ForeignKey('webapp.Photo', related_name='favorite_photo', verbose_name='Фото', on_delete=models.CASCADE)
author = models.ForeignKey(get_user_model(), related_name='favorite_author',
verbose_name='Автор', on_delete=models.CASCADE)
def __str__(self):
return f'{self.photo}-{self.author}'
class Meta:
verbose_name = 'Избранное'
verbose_name_plural = 'Избранные'
| [
"[email protected]"
] | |
f09b5ba0c63513fae5eb3bf59e46085513e482a1 | 77166c6ed9b872fa69b454d3682f63527f5f3951 | /arcade/text.py | eb69a21fd6aad532e3894f36ed8a51fdfd3f1d17 | [
"MIT"
] | permissive | biggzlar/arcade | d72d936f3c244a9d5173b6f36bca3ede3382a0ae | fc444db356452660ac6cb2ffe241f0b1a3d4bcf3 | refs/heads/master | 2020-12-14T06:30:18.997456 | 2020-01-18T04:44:03 | 2020-01-18T04:44:03 | 234,668,560 | 1 | 0 | NOASSERTION | 2020-01-18T02:07:41 | 2020-01-18T02:07:40 | null | UTF-8 | Python | false | false | 12,133 | py | # --- BEGIN TEXT FUNCTIONS # # #
from typing import Tuple, Union, cast
import PIL.Image
import PIL.ImageDraw
import PIL.ImageFont
from arcade.sprite import Sprite
from arcade.arcade_types import Color
from arcade.draw_commands import Texture
from arcade.arcade_types import RGBA
from arcade.draw_commands import get_four_byte_color
import pyglet.gl as gl
import pyglet
class Text:
""" Class used for managing text. """
def __init__(self):
self.size = (0, 0)
self.text_sprite_list = None
class CreateText:
""" Class used for managing text """
def __init__(self,
text: str,
color: Color,
font_size: float = 12,
width: int = 20,
align="left",
font_name=('Calibri', 'Arial'),
bold: bool = False,
italic: bool = False,
anchor_x="left",
anchor_y="baseline",
rotation=0):
self.text = text
self.color = color
self.font_size = font_size
self.width = width
self.align = align
self.font_name = font_name
self.bold = bold
self.italic = italic
self.anchor_x = anchor_x
self.anchor_y = anchor_y
self.rotation = rotation
def create_text(text: str,
color: Color,
font_size: float = 12,
width: int = 0,
align="left",
font_name=('Calibri', 'Arial'),
bold: bool = False,
italic: bool = False,
anchor_x: str = "left",
anchor_y: str = "baseline",
rotation=0):
""" Deprecated. Two step text drawing for backwards compatibility. """
import warnings
warnings.warn("create_text has been deprecated, please use draw_text instead.", DeprecationWarning)
my_text = CreateText(text, color, font_size, width, align, font_name, bold, italic, anchor_x, anchor_y, rotation)
return my_text
def render_text(text: CreateText, start_x: float, start_y: float):
""" Deprecated. Two step text drawing for backwards compatibility. """
import warnings
warnings.warn("render_text has been deprecated, please use draw_text instead.", DeprecationWarning)
draw_text(text.text,
start_x,
start_y,
color=text.color,
font_size=text.font_size,
width=text.width,
align=text.align,
font_name=text.font_name,
bold=text.bold,
italic=text.italic,
anchor_x=text.anchor_x,
anchor_y=text.anchor_y,
rotation=text.rotation)
def draw_text(text: str,
start_x: float, start_y: float,
color: Color,
font_size: float = 12,
width: int = 0,
align: str = "left",
font_name: Union[str, Tuple[str, ...]] = ('calibri', 'arial'),
bold: bool = False,
italic: bool = False,
anchor_x: str = "left",
anchor_y: str = "baseline",
rotation: float = 0
):
"""
:param str text: Text to draw
:param float start_x:
:param float start_y:
:param Color color: Color of the text
:param float font_size: Size of the text
:param float width:
:param str align:
:param Union[str, Tuple[str, ...]] font_name:
:param bool bold:
:param bool italic:
:param str anchor_x:
:param str anchor_y:
:param float rotation:
"""
# Scale the font up, so it matches with the sizes of the old code back
# when Pyglet drew the text.
font_size *= 1.25
# Text isn't anti-aliased, so we'll draw big, and then shrink
scale_up = 5
scale_down = 5
font_size *= scale_up
# If the cache gets too large, dump it and start over.
if len(draw_text.cache) > 5000: # type: ignore # dynamic attribute on function obj
draw_text.cache = {} # type: ignore # dynamic attribute on function obj
key = f"{text}{color}{font_size}{width}{align}{font_name}{bold}{italic}"
if key in draw_text.cache: # type: ignore # dynamic attribute on function obj
label = draw_text.cache[key] # type: ignore # dynamic attribute on function obj
text_sprite = label.text_sprite_list[0]
if anchor_x == "left":
text_sprite.center_x = start_x + text_sprite.width / 2
elif anchor_x == "center":
text_sprite.center_x = start_x
elif anchor_x == "right":
text_sprite.right = start_x
else:
raise ValueError(f"anchor_x should be 'left', 'center', or 'right'. Not '{anchor_x}'")
if anchor_y == "top":
text_sprite.center_y = start_y - text_sprite.height / 2
elif anchor_y == "center":
text_sprite.center_y = start_y
elif anchor_y == "bottom" or anchor_y == "baseline":
text_sprite.bottom = start_y
else:
raise ValueError(f"anchor_y should be 'top', 'center', 'bottom', or 'baseline'. Not '{anchor_y}'")
text_sprite.angle = rotation
else:
label = Text()
# Figure out the font to use
font = None
# Font was specified with a string
if isinstance(font_name, str):
try:
font = PIL.ImageFont.truetype(font_name, int(font_size))
except OSError:
# print(f"1 Can't find font: {font_name}")
pass
if font is None:
try:
temp_font_name = f"{font_name}.ttf"
font = PIL.ImageFont.truetype(temp_font_name, int(font_size))
except OSError:
# print(f"2 Can't find font: {temp_font_name}")
pass
# We were instead given a list of font names, in order of preference
else:
for font_string_name in font_name:
try:
font = PIL.ImageFont.truetype(font_string_name, int(font_size))
# print(f"3 Found font: {font_string_name}")
except OSError:
# print(f"3 Can't find font: {font_string_name}")
pass
if font is None:
try:
temp_font_name = f"{font_string_name}.ttf"
font = PIL.ImageFont.truetype(temp_font_name, int(font_size))
except OSError:
# print(f"4 Can't find font: {temp_font_name}")
pass
if font is not None:
break
# Default font if no font
if font is None:
font_names = ("arial.ttf",
'Arial.ttf',
'NotoSans-Regular.ttf',
"/usr/share/fonts/truetype/freefont/FreeMono.ttf",
'/System/Library/Fonts/SFNSDisplay.ttf',
'/Library/Fonts/Arial.ttf')
for font_string_name in font_names:
try:
font = PIL.ImageFont.truetype(font_string_name, int(font_size))
break
except OSError:
# print(f"5 Can't find font: {font_string_name}")
pass
# This is stupid. We have to have an image to figure out what size
# the text will be when we draw it. Of course, we don't know how big
# to make the image. Catch-22. So we just make a small image we'll trash
text_image_size = (10, 10)
image = PIL.Image.new("RGBA", text_image_size)
draw = PIL.ImageDraw.Draw(image)
# Get size the text will be
text_image_size = draw.multiline_textsize(text, font=font)
# Create image of proper size
text_height = text_image_size[1]
text_width = text_image_size[0]
image_start_x = 0
if width == 0:
width = text_image_size[0]
else:
# Wait! We were given a field width.
if align == "center":
# Center text on given field width
field_width = width * scale_up
text_image_size = field_width, text_height
image_start_x = (field_width - text_width) // 2
width = field_width
else:
image_start_x = 0
# If we draw a y at 0, then the text is drawn with a baseline of 0,
# cutting off letters that drop below the baseline. This shoves it
# up a bit.
image_start_y = - font_size * scale_up * 0.02
image = PIL.Image.new("RGBA", text_image_size)
draw = PIL.ImageDraw.Draw(image)
# Convert to tuple if needed, because the multiline_text does not take a
# list for a color
if isinstance(color, list):
color = cast(RGBA, tuple(color))
draw.multiline_text((image_start_x, image_start_y), text, color, align=align, font=font)
image = image.resize((width // scale_down, text_height // scale_down), resample=PIL.Image.LANCZOS)
text_sprite = Sprite()
text_sprite._texture = Texture(key)
text_sprite.texture.image = image
text_sprite.image = image
text_sprite.texture_name = key
text_sprite.width = image.width
text_sprite.height = image.height
if anchor_x == "left":
text_sprite.center_x = start_x + text_sprite.width / 2
elif anchor_x == "center":
text_sprite.center_x = start_x
elif anchor_x == "right":
text_sprite.right = start_x
else:
raise ValueError(f"anchor_x should be 'left', 'center', or 'right'. Not '{anchor_x}'")
if anchor_y == "top":
text_sprite.center_y = start_y + text_sprite.height / 2
elif anchor_y == "center":
text_sprite.center_y = start_y
elif anchor_y == "bottom" or anchor_y == "baseline":
text_sprite.bottom = start_y
else:
raise ValueError(f"anchor_y should be 'top', 'center', 'bottom', or 'baseline'. Not '{anchor_y}'")
text_sprite.angle = rotation
from arcade.sprite_list import SpriteList
label.text_sprite_list = SpriteList()
label.text_sprite_list.append(text_sprite)
draw_text.cache[key] = label # type: ignore # dynamic attribute on function obj
label.text_sprite_list.draw()
draw_text.cache = {} # type: ignore # dynamic attribute on function obj
def draw_text_2(text: str,
start_x: float, start_y: float,
color: Color,
font_size: float = 12,
width: int = 0,
align: str = "left",
font_name: Union[str, Tuple[str, ...]] = ('calibri', 'arial'),
bold: bool = False,
italic: bool = False,
anchor_x: str = "left",
anchor_y: str = "baseline",
rotation: float = 0
):
"""
:param str text: Text to draw
:param float start_x:
:param float start_y:
:param Color color: Color of the text
:param float font_size: Size of the text
:param float width:
:param str align:
:param Union[str, Tuple[str, ...]] font_name:
:param bool bold:
:param bool italic:
:param str anchor_x:
:param str anchor_y:
:param float rotation:
"""
color = get_four_byte_color(color)
label = pyglet.text.Label(text,
font_name=font_name,
font_size=font_size,
x=start_x, y=start_y,
anchor_x=anchor_x, anchor_y=anchor_y,
color=color,
align=align,
bold=bold,
italic=italic,
width=width)
label.draw()
| [
"[email protected]"
] | |
d9688ce59735aea7ef8f1d52da614763b7f2d036 | dbe1f4110921a08cb13e22ea325d503bd5627195 | /chuhuo_2.7_clickhouse/bluedon/bdwafd/newscantools/plugins/SiteEngine5_xPagejumpScript.py | 36b3f98ef2796868c8a3a3a6381ac72f04f32ea9 | [] | no_license | Hehouhua/waf_branches | 92dc1b1cbecba20f24ef6c7372dde7caa43f9158 | ca76f3a1ed8150b423474c9e37aee37841a5ee35 | refs/heads/main | 2023-01-07T11:33:31.667688 | 2020-11-03T06:58:33 | 2020-11-03T06:58:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,184 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from lib.common import *
def run_domain(http,ob):
list = []
try:
domain = ob['domain']
detail = u''
url = "%s://%s%s" % (ob['scheme'],ob['domain'],ob['base_path'])
expurl="%s%s"%(url,"admin/images/css.css")
url+="api.php?action=logout&forward=http://www.baidu.com"
r,c=requestUrl(http,expurl,ob['task_id'],ob['domain_id'])
if c.find("siteengine")>=0:
res, content = requestUrl(http,url,ob['task_id'],ob['domain_id'])
if res.has_key('location') and res['location'] == 'http://www.baidu.com':
request = getRequest(url)
response = getResponse(res)
list.append(getRecord(ob,ob['scheme']+"://"+ob['domain'],ob['level'],detail,request,response))
except Exception,e:
logging.getLogger().error("File:SITEENGINE5.xpagejumpscript.py, run_domain function :" + str(e) + ",task id:" + ob['task_id'] + ",domain id:" + ob['domain_id'])
write_scan_log(ob['task_id'],ob['domain_id'],"File:SITEENGINE5.xpagejumpscript.py, run_domain function :" + str(e))
#end try
return list
#end def | [
"[email protected]"
] | |
39a870579ef4ed97598cbc4f4f6818c96489c04f | bf2704bf2a65eda229fe52dc3bc37d30655ad3db | /microsip_consolidador/settings/common.py | 9e54dbf1eb5d8ef31c6c19af059d8f79338e5a89 | [] | no_license | ruff0/microsip_consolidador | 29276c6f96e2f2d3fb9eb06006234e7773c1aa8f | e8763651c5935d12f93a5413ea593dea16043f64 | refs/heads/master | 2021-05-03T22:02:45.045087 | 2014-04-02T00:50:36 | 2014-04-02T00:50:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,516 | py | #encoding:utf-8
# Identificando la ruta del proyecto
import os
import fdb
import sqlite3
from local_settings import MICROSIP_MODULES
RUTA_PROYECTO =os.path.dirname(os.path.realpath(__file__)).strip('settings')
ADMINS = (
# ('Your Name', '[email protected]'),
)
MANAGERS = ADMINS
DATABASE_ROUTERS = ['microsip_consolidador.libs.databases_routers.MainRouter']
MICROSIP_DATABASES = {}
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': RUTA_PROYECTO + 'data\USERS.sqlite',
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
'ATOMIC_REQUESTS': True,
},
}
try:
users_conn = sqlite3.connect(RUTA_PROYECTO + 'data\USERS.sqlite')
users_cur = users_conn.cursor()
users_cur.execute('''SELECT * FROM auth_conexiondb''')
conexiones_rows = users_cur.fetchall()
users_conn.close()
for conexion in conexiones_rows:
conexion_id = conexion[0]
conexion_id = "%02d" % conexion_id
host = conexion[3]
password = conexion[6]
user = conexion[5]
carpeta_datos = conexion[4]
conexion_exitosa = True
try:
db= fdb.connect(host=host, user=user, password=password, database="%s\System\CONFIG.FDB"%carpeta_datos )
except fdb.DatabaseError:
conexion_exitosa = False
else:
cur = db.cursor()
cur.execute("SELECT NOMBRE_CORTO FROM EMPRESAS")
empresas_rows = cur.fetchall()
db.close()
if conexion_exitosa:
DATABASES[ '%s-CONFIG'%conexion_id ] = {
'ENGINE': 'django.db.backends.firebird', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '%s\System\CONFIG.FDB'% carpeta_datos,
'USER': user, # Not used with sqlite3.
'PASSWORD': password, # Not used with sqlite3.
'HOST': host, # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3050', # Set to empty string for default. Not used with sqlite3.
'OPTIONS' : {'charset':'ISO8859_1'},
'ATOMIC_REQUESTS': True,
}
for empresa in empresas_rows:
try:
name = '%s\%s.FDB'% (carpeta_datos, empresa[0])
except UnicodeDecodeError:
pass
else:
MICROSIP_DATABASES['%s-%s'%(conexion_id, empresa[0].replace(' ','_'))] = {
'ENGINE': 'django.db.backends.firebird', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': name,
'USER': user, # Not used with sqlite3.
'PASSWORD': password, # Not used with sqlite3.
'HOST': host, # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3050', # Set to empty string for default. Not used with sqlite3.
'OPTIONS' : {'charset':'ISO8859_1'},
'ATOMIC_REQUESTS': True,
}
DATABASES['%s-%s'%(conexion_id, empresa[0].replace(' ','_'))] = {
'ENGINE': 'django.db.backends.firebird', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': name,
'USER': user, # Not used with sqlite3.
'PASSWORD': password, # Not used with sqlite3.
'HOST': host, # Set to empty string for localhost. Not used with sqlite3.
'PORT': '3050', # Set to empty string for default. Not used with sqlite3.
'OPTIONS' : {'charset':'ISO8859_1'},
'ATOMIC_REQUESTS': True,
}
except sqlite3.Error, e:
print "Error %s:" % e.args[0]
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# In a Windows environment this must be set to your system time zone.
TIME_ZONE = 'America/Mazatlan'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'es-mx'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale.
USE_L10N = True
# If you set this to False, Django will not use timezone-aware datetimes.
USE_TZ = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
#MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_ROOT = os.path.join(RUTA_PROYECTO,'media')
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
MEDIA_URL = os.path.join(RUTA_PROYECTO,'media/')
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'dajaxice.finders.DajaxiceFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# Make this unique, and don't share it with anybody.
SECRET_KEY = '3pq$&*)sd$k_olmn@lup_5)-)d=qk-&)18!+5bw7+$z++n2jm@'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'microsip_api.middleware.CustomerMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
# 'django.middleware.cache.CacheMiddleware',
'django.middleware.transaction.TransactionMiddleware',
'django.middleware.cache.FetchFromCacheMiddleware',
# Uncomment the next line for simple clickjacking protection:
# 'django.middleware.clickjacking.XFrameOptionsMiddleware',
)
# Python dotted path to the WSGI application used by Django's runserver.
WSGI_APPLICATION = 'microsip_consolidador.wsgi.application'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
(RUTA_PROYECTO + '/templates'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.debug',
'django.core.context_processors.i18n',
'django.core.context_processors.media',
'django.core.context_processors.static',
'django.core.context_processors.request',
'django.contrib.messages.context_processors.messages'
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error when DEBUG=False.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'filters': {
'require_debug_false': {
'()': 'django.utils.log.RequireDebugFalse'
}
},
'handlers': {
'mail_admins': {
'level': 'ERROR',
'filters': ['require_debug_false'],
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
#Configuraciones para enviar mensajes usando gmail
EMAIL_USE_TLS = True
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = '[email protected]'
EMAIL_HOST_PASSWORD = 'clavedelcorreo'
EMAIL_PORT = 587
| [
"[email protected]"
] | |
f06b314effbea49196936e04d020d70611e2ee01 | 3f9dd28efb7fb66b95a7b33ae3d15f6e4d0925f5 | /pydar/format.py | d8e1bb84cbbd5663e6463bfea12ee2347e12622b | [
"MIT"
] | permissive | MomsFriendlyRobotCompany/pydar | 926cdbb9431204b60a0816815904c1b27f415f0d | 20d5a6b382b4f047ba19f8f82a15a67ab3537543 | refs/heads/master | 2020-03-20T11:19:28.451746 | 2019-12-25T01:46:29 | 2019-12-25T01:46:29 | 137,399,625 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 80 | py |
from collections import namedtuple
Scan = namedtuple('Scan', 'scan timestamp')
| [
"[email protected]"
] | |
2e858c17d93645b79fec3bc950bfad4291ef27b3 | 4e96f383d4703ad8ee58869ed91a0c8432c8a051 | /Cura/Cura/cura/Backups/BackupsManager.py | ba6fcab8d75e54207a7423215cf29cc707d74109 | [
"LGPL-3.0-only",
"GPL-3.0-only"
] | permissive | flight7788/3d-printing-with-moveo-1 | b2dba26010c4fa31815bc1d2d0966161a8600081 | 7fcb9c6b5da9245d54ac917de8c2a7f5148e42b0 | refs/heads/Feature_Marlin_with_AlanBoy | 2022-08-30T18:36:44.785058 | 2020-05-30T07:52:58 | 2020-05-30T07:52:58 | 212,583,912 | 0 | 0 | MIT | 2020-05-16T07:39:47 | 2019-10-03T13:13:01 | C | UTF-8 | Python | false | false | 3,039 | py | # Copyright (c) 2018 Ultimaker B.V.
# Cura is released under the terms of the LGPLv3 or higher.
from typing import Dict, Optional, Tuple, TYPE_CHECKING
from UM.Logger import Logger
from cura.Backups.Backup import Backup
if TYPE_CHECKING:
from cura.CuraApplication import CuraApplication
## The BackupsManager is responsible for managing the creating and restoring of
# back-ups.
#
# Back-ups themselves are represented in a different class.
class BackupsManager:
def __init__(self, application: "CuraApplication") -> None:
self._application = application
## Get a back-up of the current configuration.
# \return A tuple containing a ZipFile (the actual back-up) and a dict
# containing some metadata (like version).
def createBackup(self) -> Tuple[Optional[bytes], Optional[Dict[str, str]]]:
self._disableAutoSave()
backup = Backup(self._application)
backup.makeFromCurrent()
self._enableAutoSave()
# We don't return a Backup here because we want plugins only to interact with our API and not full objects.
return backup.zip_file, backup.meta_data
## Restore a back-up from a given ZipFile.
# \param zip_file A bytes object containing the actual back-up.
# \param meta_data A dict containing some metadata that is needed to
# restore the back-up correctly.
def restoreBackup(self, zip_file: bytes, meta_data: Dict[str, str]) -> None:
if not meta_data.get("cura_release", None):
# If there is no "cura_release" specified in the meta data, we don't execute a backup restore.
Logger.log("w", "Tried to restore a backup without specifying a Cura version number.")
return
self._disableAutoSave()
backup = Backup(self._application, zip_file = zip_file, meta_data = meta_data)
restored = backup.restore()
if restored:
# At this point, Cura will need to restart for the changes to take effect.
# We don't want to store the data at this point as that would override the just-restored backup.
self._application.windowClosed(save_data = False)
## Here we try to disable the auto-save plug-in as it might interfere with
# restoring a back-up.
def _disableAutoSave(self) -> None:
auto_save = self._application.getAutoSave()
# The auto save is only not created if the application has not yet started.
if auto_save:
auto_save.setEnabled(False)
else:
Logger.log("e", "Unable to disable the autosave as application init has not been completed")
## Re-enable auto-save after we're done.
def _enableAutoSave(self) -> None:
auto_save = self._application.getAutoSave()
# The auto save is only not created if the application has not yet started.
if auto_save:
auto_save.setEnabled(True)
else:
Logger.log("e", "Unable to enable the autosave as application init has not been completed")
| [
"[email protected]"
] | |
3c639d64247b4a49b28c974d5d915777ea97abc0 | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /egHeSWSjHTgzMysBX_11.py | 07299dceba5a669196df27a142df5458fa762af5 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 501 | py | """
Create a function that takes a number as an argument and returns half of it.
### Examples
half_a_fraction("1/2") ➞ "1/4"
half_a_fraction("6/8") ➞ "3/8"
half_a_fraction("3/8") ➞ "3/16"
### Notes
Always return the simplified fraction.
"""
def half_a_fraction(fract):
fraction = fract.split("/")
if int(fraction[0]) % 2 == 0:
return '{}/{}'.format(int(int(fraction[0])/2), int(fraction[1]))
return '{}/{}'.format(int(fraction[0]), int(fraction[1])*2)
| [
"[email protected]"
] | |
22cca10d1314023d365cc0bdaae9d23ec9feeb56 | ac4b9385b7ad2063ea51237fbd8d1b74baffd016 | /.history/utils/ocr/handle_image_20210209170155.py | 17a1ffee42abc92b121aff59d84aa5bebaf2bf31 | [] | no_license | preethanpa/ssoemprep | 76297ef21b1d4893f1ac2f307f60ec72fc3e7c6f | ce37127845253c768d01aeae85e5d0d1ade64516 | refs/heads/main | 2023-03-09T00:15:55.130818 | 2021-02-20T06:54:58 | 2021-02-20T06:54:58 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,092 | py | import os
import cv2
import re
import numpy as np
from PIL import Image
import pytesseract
from pytesseract import Output
from fpdf import FPDF
'''
IMAGE HANDLING METHODS
'''
# get grayscale image
def get_grayscale(image):
return cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
# blur removal
def remove_blur(image):
return cv2.medianBlur(image,5)
# noise removal
def remove_noise(image):
return cv2.fastNlMeansDenoisingColored(image, None, 10, 10, 7, 15)
#thresholding
def thresholding(image):
return cv2.threshold(image, 0, 255, cv2.THRESH_BINARY + cv2.THRESH_OTSU)[1]
#dilation
def dilate(image):
kernel = np.ones((5,5),np.uint8)
return cv2.dilate(image, kernel, iterations = 1)
#erosion
def erode(image):
kernel = np.ones((5,5),np.uint8)
return cv2.erode(image, kernel, iterations = 1)
def extract_pdf_from_image(fileName='', pdf_path='', action='', psm=3):
'''
Extract text from image and save as PDF.
fileName=''
pdf_path='',
action='',
psm=3
'''
print(f'FileName is {fileName}')
#custom_config = r'-c tessedit_char_whitelist=123456789MALEPQRETHANabcdefghijklmnopqrstuvwxyz --psm 6'
#custom_config = r'-l eng --psm 11'
custom_config = r'-l eng --psm ' + str(psm)
pdfdir = pdf_path
if not os.path.exists(pdfdir):
os.makedirs(pdfdir)
# pdfFileName = os.path.basename(fileName).split('.')[0] + '.pdf'
pdfFileName = os.path.basename(fileName).split('.')[0]+ '.pdf'
pdfFilePath = pdfdir + '/' + pdfFileName
print(f'PDF File Path {pdfFilePath}')
#d = pytesseract.image_to_data(img, output_type=Output.DICT)
img = cv2.imread(fileName)
img1 = None
if (action == 1):
img1 = remove_noise(img)
if (action == 2):
img1 = get_grayscale(img)
#img1 = erode(img)
if (action == 3):
img1 = remove_blur(img)
#text = pytesseract.image_to_string(img1, config=custom_config,lang='eng')
text = pytesseract.image_to_pdf_or_hocr(img1, extension='pdf')
with open(pdfFilePath, mode = 'w+b') as f:
f.write(text)
return pdfFilePath
def convert_text_to_pdf(text='', pdf_path='', filename=''):
'''
Convert text file to PDF
text=''
pdf_path=''
filename=''
'''
tempdir = "/tmp"
pdfdir = pdf_path
textFileName = tempdir + '/' + filename + ".txt"
pdfFileName = pdfdir + '/' + filename + ".pdf"
if not os.path.exists(tempdir):
os.makedirs(tempdir)
if not os.path.exists(pdfdir):(
os.makedirs(pdfdir)
# save FPDF() class into a
# variable pdf
pdf = FPDF()
# Add a page
pdf.add_page()
# set style and size of font
# that you want in the pdf
pdf.set_font("Arial", size = 15)
with open(textFileName, mode = 'w+b') as f:
f.write(text)
line = 1
f = open(textFileName, "r")
for x in f:
x1 = re.sub(u"(\u2014|\u2018|\u2019|\u201c|\u201d)", "", x)
pdf.cell(100, 10, txt=x1, ln=line, align='L')
line=line+1
#save the pdf with name .pdf
pdf.output(pdfFileName,'F')
def mark_region(image_path):
im = cv2.imread(image_path)
gray = cv2.cvtColor(im, cv2.COLOR_BGR2GRAY)
blur = cv2.GaussianBlur(gray, (9,9), 0)
thresh = cv2.adaptiveThreshold(blur,255,cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV,11,30)
# Dilate to combine adjacent text contours
kernel = cv2.getStructuringElement(cv2.MORPH_RECT, (9,9))
dilate = cv2.dilate(thresh, kernel, iterations=4)
# Find contours, highlight text areas, and extract ROIs
cnts = cv2.findContours(dilate, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
cnts = cnts[0] if len(cnts) == 2 else cnts[1]
line_items_coordinates = []
for c in cnts:
area = cv2.contourArea(c)
x,y,w,h = cv2.boundingRect(c)
if y >= 600 and x <= 1000:
if area > 10000:
image = cv2.rectangle(im, (x,y), (2200, y+h), color=(255,0,255), thickness=3)
line_items_coordinates.append([(x,y), (2200, y+h)])
if y >= 2400 and x<= 2000:
image = cv2.rectangle(im, (x,y), (2200, y+h), color=(255,0,255), thickness=3)
line_items_coordinates.append([(x,y), (2200, y+h)])
return image, line_items_coordinates) | [
"{[email protected]}"
] | |
b15ae00c90717a2a67c39cb9e72a1951ed5f1ae4 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_217/ch21_2019_08_26_19_58_29_478795.py | 8e46bdfeb1e79e43246166f70246709b75ed0188 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 184 | py | def valor_da_conta(valor):
valor = int(input('Qual valor da conta?:' ))
com10% = valor + valor*(10/100)
return com10%
print("Valor da conta com 10%: R${0}".format(com10%)) | [
"[email protected]"
] | |
a673490634f6d5ccbf0aab99ff19ac353dd7567a | c21faf85627b1cfd96494aac73cc40e5f11ebb46 | /results/test_188.py | 0b34ac896ae50c37329c5d2aad8aae65d061d40a | [] | no_license | ekkya/Cyclomatic-Complexity | d02c61e009087e7d51738e60605875741532b878 | 172db2efdd974f5abad964e335552aec974b47cb | refs/heads/master | 2021-08-28T17:13:14.718314 | 2017-12-12T22:04:13 | 2017-12-12T22:04:13 | 112,042,202 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 102,256 | py | """
Author: Ankit Agarwal (ankit167)
Usage: python google.py <keyword>
Description: Script googles the keyword and opens
top 5 (max) search results in separate
tabs in the browser
Version: 1.0
"""
import webbrowser, sys, pyperclip, requests, bs4
def main():
if len(sys.argv) > 1:
keyword = ' '.join(sys.argv[1:])
else:
# if no keyword is entered, the script would search for the keyword
# copied in the clipboard
keyword = pyperclip.paste()
res=requests.get('http://google.com/search?q='+ keyword)
res.raise_for_status()
soup = bs4.BeautifulSoup(res.text)
linkElems = soup.select('.r a')
numOpen = min(5, len(linkElems))
for i in range(numOpen):
webbrowser.open('http://google.com' + linkElems[i].get('href'))
if __name__ == '__main__':
main()"""Get the number of each character in any given text.
Inputs:
A txt file -- You will be asked for an input file. Simply input the name
of the txt file in which you have the desired text.
"""
import pprint
import collections
def main():
file_input = input('File Name: ')
with open(file_input, 'r') as info:
count = collections.Counter(info.read().upper())
value = pprint.pformat(count)
print(value)
if __name__ == "__main__":
main()# Script Name : pscheck.py
# Author : Craig Richards
# Created : 19th December 2011
# Last Modified : 17th June 2013
# Version : 1.1
# Modifications : 1.1 - 17/06/13 - CR - Changed to functions, and check os before running the program
# Description : Process check on Nix boxes, diplsay formatted output from ps command
import commands, os, string
def ps():
program = raw_input("Enter the name of the program to check: ")
try:
#perform a ps command and assign results to a list
output = commands.getoutput("ps -f|grep " + program)
proginfo = string.split(output)
#display results
print "\n\
Full path:\t\t", proginfo[5], "\n\
Owner:\t\t\t", proginfo[0], "\n\
Process ID:\t\t", proginfo[1], "\n\
Parent process ID:\t", proginfo[2], "\n\
Time started:\t\t", proginfo[4]
except:
print "There was a problem with the program."
def main():
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
ps() # Call the function
elif os.name in ("nt", "dos", "ce"): # if the OS is windows
print "You need to be on Linux or Unix to run this"
if __name__ == '__main__':
main()from bs4 import BeautifulSoup
import datetime
import mechanize
import urllib2
# Create a Browser
b = mechanize.Browser()
# Disable loading robots.txt
b.set_handle_robots(False)
b.addheaders = [('User-agent',
'Mozilla/4.0 (compatible; MSIE 5.0; Windows 98;)')]
# Navigate
b.open('http://cbseresults.nic.in/jee/jee_2015.htm')
# Choose a form
b.select_form(nr=0)
# Fill it out
b['regno'] = '37000304'
currentdate = datetime.date(1997,3,10)
enddate = datetime.date(1998,4,1)
while currentdate <= enddate:
ct=0
#print currentdate
yyyymmdd = currentdate.strftime("%Y/%m/%d")
ddmmyyyy = yyyymmdd[8:] + "/" + yyyymmdd[5:7] + "/" +yyyymmdd[:4]
print(ddmmyyyy)
b.open('http://cbseresults.nic.in/jee/jee_2015.htm')
b.select_form(nr=0)
b['regno'] = '37000304'
b['dob'] = ddmmyyyy
fd = b.submit()
#print(fd.read())
soup = BeautifulSoup(fd.read(),'html.parser')
for writ in soup.find_all('table'):
ct = ct + 1;
#print (ct)
if ct == 6:
print("---fail---")
else:
print("--true--")
break;
currentdate += datetime.timedelta(days=1)
#print fd.read()# Script Name : new_script.py
# Author : Craig Richards
# Created : 20th November 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will create a new basic template for a new script
import os # Load the library module
import sys # Load the library module
import datetime # Load the library module
text = '''You need to pass an argument for the new script you want to create, followed by the script name. You can use
-python : Python Script
-bash : Bash Script
-ksh : Korn Shell Script
-sql : SQL Script'''
if len(sys.argv) < 3:
print text
sys.exit()
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv:
print text
sys.exit()
else:
if '-python' in sys.argv[1]:
config_file = "python.cfg"
extension = ".py"
elif '-bash' in sys.argv[1]:
config_file = "bash.cfg"
extension = ".bash"
elif '-ksh' in sys.argv[1]:
config_file = "ksh.cfg"
extension = ".ksh"
elif '-sql' in sys.argv[1]:
config_file = "sql.cfg"
extension = ".sql"
else:
print 'Unknown option - ' + text
sys.exit()
confdir = os.getenv("my_config")
scripts = os.getenv("scripts")
dev_dir = "Development"
newfile = sys.argv[2]
output_file = (newfile + extension)
outputdir = os.path.join(scripts,dev_dir)
script = os.path.join(outputdir, output_file)
input_file = os.path.join(confdir,config_file)
old_text = " Script Name : "
new_text = (" Script Name : " + output_file)
if not(os.path.exists(outputdir)):
os.mkdir(outputdir)
newscript = open(script, 'w')
input = open(input_file, 'r')
today = datetime.date.today()
old_date = " Created :"
new_date = (" Created : " + today.strftime("%d %B %Y"))
for line in input:
line = line.replace(old_text, new_text)
line = line.replace(old_date, new_date)
newscript.write(line)
# Script Name : osinfo.py
# Authors : {'geekcomputers': 'Craig Richards', 'dmahugh': 'Doug Mahugh','rutvik1010':'Rutvik Narayana Nadimpally','y12uc231': 'Satyapriya Krishna', 'minto4644':'Mohit Kumar'}
# Created : 5th April 2012
# Last Modified : July 19 2016
# Version : 1.0
# Modification 1 : Changed the profile to list again. Order is important. Everytime we run script we don't want to see different ordering.
# Modification 2 : Fixed the AttributeError checking for all properties. Using hasttr().
# Modification 3 : Removed ': ' from properties inside profile.
# Description : Displays some information about the OS you are running this script on
import platform as pl
profile = [
'architecture',
'linux_distribution',
'mac_ver',
'machine',
'node',
'platform',
'processor',
'python_build',
'python_compiler',
'python_version',
'release',
'system',
'uname',
'version',
]
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
for key in profile:
if hasattr(pl, key):
print(key + bcolors.BOLD + ": " + str(getattr(pl, key)()) + bcolors.ENDC)
# author:[email protected]
#!/usr/bin/env python
# -*- coding=utf-8 -*-
import os
# define the result filename
resultfile = 'result.csv'
# the merge func
def merge():
"""merge csv files to one file"""
# use list save the csv files
csvfiles = [f for f in os.listdir('.') if f != resultfile and f.split('.')[1]=='csv']
# open file to write
with open(resultfile,'w') as writefile:
for csvfile in csvfiles:
with open(csvfile) as readfile:
print('File {} readed.'.format(csvfile))
# do the read and write
writefile.write(readfile.read()+'\n')
print('\nFile {} wrote.'.format(resultfile))
# the main program
if __name__ == '__main__':
merge()import mechanize
import re
import urllib2
from random import *
br=mechanize.Browser()
br.addheaders = [('User-Agent','Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36')]
br.set_handle_robots(False)
#For page exploration
page=raw_input('Enter Page No:')
#print type(page)
p=urllib2.Request('https://www.google.co.in/search?q=gate+psu+2017+ext:pdf&start='+page)
ht=br.open(p)
text='<cite\sclass="_Rm">(.+?)</cite>'
patt=re.compile(text)
h=ht.read()
urls=re.findall(patt,h)
int=0
while int<len(urls):
urls[int]=urls[int].replace("<b>","")
urls[int]=urls[int].replace("</b>","")
int=int+1
print urls
for url in urls:
try:
temp=url.split("/")
q=temp[len(temp)-1]
if "http" in url:
r=urllib2.urlopen(url)
else:
r=urllib2.urlopen("http://"+url)
file=open('psu2'+q+'.pdf','wb')
file.write(r.read())
file.close()
print "Done"
except urllib2.URLError as e:
print "Sorry there exists a problem with this URL Please Download this Manually "+str(url)
# Script Name : logs.py
# Author : Craig Richards
# Created : 13th October 2011
# Last Modified : 14 February 2016
# Version : 1.2
#
# Modifications : 1.1 - Added the variable zip_program so you can set it for the zip program on whichever OS, so to run on a different OS just change the locations of these two variables.
# : 1.2 - Tidy up comments and syntax
#
# Description : This script will search for all *.log files in the given directory, zip them using the program you specify and then date stamp them
import os # Load the Library Module
from time import strftime # Load just the strftime Module from Time
logsdir = "c:\puttylogs" # Set the Variable logsdir
zip_program = "zip.exe" # Set the Variable zip_program - 1.1
for files in os.listdir(logsdir): # Find all the files in the directory
if files.endswith(".log"): # Check to ensure the files in the directory end in .log
files1 = files + "." + strftime("%Y-%m-%d") + ".zip" # Create the Variable files1, this is the files in the directory, then we add a suffix with the date and the zip extension
os.chdir(logsdir) # Change directory to the logsdir
os.system(zip_program + " " + files1 +" "+ files) # Zip the logs into dated zip files for each server. - 1.1
os.remove(files) # Remove the original log files"""
Author: Shreyas Daniel (shreydan)
Install: tweepy - "pip install tweepy"
API: Create a twitter app "apps.twitter.com" to get your OAuth requirements.
Version: 1.0
Tweet text and pics directly from the terminal.
"""
import tweepy, os
def getStatus():
lines = []
while True:
line = raw_input()
if line:
lines.append(line)
else:
break
status = '\n'.join(lines)
return status
def tweetthis(type):
if type == "text":
print "Enter your tweet "+user.name
tweet = getStatus()
try:
api.update_status(tweet)
except Exception as e:
print e
return
elif type == "pic":
print "Enter pic path "+user.name
pic = os.path.abspath(raw_input())
print "Enter status "+user.name
title = getStatus()
try:
api.update_with_media(pic, status=title)
except Exception as e:
print e
return
print "\n\nDONE!!"
def initialize():
global api, auth, user
ck = "here" # consumer key
cks = "here" # consumer key SECRET
at = "here" # access token
ats = "here" # access token SECRET
auth = tweepy.OAuthHandler(ck,cks)
auth.set_access_token(at,ats)
api = tweepy.API(auth)
user = api.me()
def main():
doit = int(raw_input("\n1. text\n2. picture\n"))
initialize()
if doit == 1:
tweetthis("text")
elif doit == 2:
tweetthis("pic")
else:
print "OK, Let's try again!"
main()
main()# Script Name : dice.py
# Author : Craig Richards
# Created : 05th February 2017
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will randomly select two numbers, like throwing dice, you can change the sides of the dice if you wish
import random
class Die(object):
#A dice has a feature of number about how many sides it has when it's established,like 6.
def __init__(self):
self.sides=6
"""because a dice contains at least 4 planes.
So use this method to give it a judgement when you need to change the instance attributes."""
def set_sides(self,sides_change):
if self.sides_change>=4:
self.sides=self.sides_change
print("change sides!")
else:
print("wrong sides!")
def roll(self):
return random.randint(1, self.sides)
d = Die()
d1 = Die()
d.set_sides(4)
d1.set_sides(4)
print (d.roll(), d1.roll())from sys import argv # import argment variable
script, rows, columns = argv #define rows and columns for the table and assign them to the argument variable
def table(rows, columns):
for i in range(1, int(rows) + 1 ): #it's safe to assume that the user would mean 12 rows when they provide 12 as an argument, b'coz 12 will produce 11 rows
print "\t", i,
print "\n\n"
for i in range(1, int(columns) + 1 ):
print i,
for j in range(1, int(rows) + 1 ):
print "\t",i*j,
print "\n\n"
table(rows, columns)import os
import sys
import shutil
Music = ['MP3', 'WAV', 'WMA', 'MKA', 'AAC', 'MID', 'RA', 'RAM', 'RM', 'OGG']
Codes = ['CPP', 'RB', 'PY', 'HTML', 'CSS', 'JS']
Compressed = ['RAR', 'JAR', 'ZIP', 'TAR', 'MAR', 'ISO', 'LZ', '7ZIP', 'TGZ', 'GZ', 'BZ2']
Documents = ['DOC', 'DOCX', 'PPT', 'PPTX', 'PAGES', 'PDF', 'ODT', 'ODP', 'XLSX', 'XLS', 'ODS', 'TXT', 'IN', 'OUT', 'MD']
Images = ['JPG', 'JPEG', 'GIF', 'PNG', 'SVG']
Executables = ['LNK','DEB', 'EXE', 'SH', 'BUNDLE']
Video = ['FLV', 'WMV', 'MOV', 'MP4', 'MPEG', '3GP', 'MKV','AVI']
def getVideo():
return Video
def getMusic():
return Music
def getCodes():
return Codes
def getCompressed():
return Compressed
def getImages():
return Images
def getExe():
return Executables
def getDoc():
return Documents
# taking the location of the Folder to Arrange
try:
arrange_dir = str(sys.argv[1])
except IndexError:
arrange_dir = str(raw_input("Enter the Path of directory: "))
# when we make a folder that already exist then WindowsError happen
# changing directory may give WindowsError
def change(direc):
try:
os.chdir(direc)
#print "path changed"
except WindowsError:
print "Error! Cannot change the Directory"
print "Enter a valid directory!"
direc = str(raw_input("Enter the Path of directory: "))
change(direc)
change(arrange_dir)
# now we will get the list of all the directories in the folder
list_dir = os.listdir(os.getcwd())
#print list_dir
#check_Folder = False # for organising Folders
check_Music = False
check_Video = False
check_Exe = False
check_Code = False
check_Compressed = False
check_Img = False
check_Docs = False
main_names = ['Video','Folders','Images','Documents','Music','Codes','Executables','Compressed']
for name in list_dir:
#print name.split('.')
if len(name.split('.')) == 2:
if name.split('.')[1].upper() in getVideo():
try:
os.mkdir("Video")
print "Video Folder Created"
except WindowsError:
print "Images Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Video"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
#print "It is a folder"
elif name.split('.')[1].upper() in getImages():
try:
os.mkdir("Images")
print "Images Folder Created"
except WindowsError:
print "Images Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Images"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
#print "It is a folder"
elif name.split('.')[1].upper() in getMusic():
try:
os.mkdir("Music")
print "Music Folder Created"
except WindowsError:
print "Music Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Music"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
#print "It is a folder"
elif name.split('.')[1].upper() in getDoc():
try:
os.mkdir("Documents")
print "Documents Folder Created"
except WindowsError:
print "Documents Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Documents"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
#print "It is a folder"
elif name.split('.')[1].upper() in getCodes():
try:
os.mkdir("Codes")
print "Codes Folder Created"
except WindowsError:
print "Codes Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Codes"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
#print "It is a folder"
elif name.split('.')[1].upper() in getCompressed():
try:
os.mkdir("Compressed")
print "Compressed Folder Created"
except WindowsError:
print "Compressed Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Compressed"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
#print "It is a folder"
elif name.split('.')[1].upper() in getExe():
try:
os.mkdir("Executables")
print "Executables Folder Created"
except WindowsError:
print "Executables Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Executables"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
#print "It is a folder"
else:
if name not in main_names:
try:
os.mkdir("Folders")
print "Folders Folder Created"
except WindowsError:
print "Folders Folder Exists"
old_dir = arrange_dir + "\\" + name
new_dir = arrange_dir + "\Folders"
os.chdir(new_dir)
shutil.move(old_dir, new_dir + "\\" + name)
print os.getcwd()
os.chdir(arrange_dir)
print "Done Arranging Files and Folder in your specified directory""""
Written by: Shreyas Daniel - github.com/shreydan
Written on: 26 April 2017
Description: Download latest XKCD Comic with this program.
NOTE:
if this script is launched from the cloned repo, a new folder is created.
Please move the file to another directory to avoid messing with the folder structure.
"""
import requests
from lxml import html
import urllib.request
import os
def main():
# opens xkcd.com
try:
page = requests.get("https://www.xkcd.com")
except requests.exceptions.RequestException as e:
print (e)
exit()
# parses xkcd.com page
tree = html.fromstring(page.content)
# finds image src url
image_src = tree.xpath(".//*[@id='comic']/img/@src")[0]
image_src = "https:" + str(image_src)
# gets comic name from the image src url
comic_name = image_src.split('/')[-1]
comic_name = comic_name[:-4]
# save location of comic
comic_location = os.getcwd() + '/comics/'
# checks if save location exists else creates
if not os.path.exists(comic_location):
os.makedirs(comic_location)
# creates final comic location including name of the comic
comic_location = comic_location + comic_name
# downloads the comic
urllib.request.urlretrieve(image_src, comic_location)
if __name__ == "__main__":
main()# Script Name : check_for_sqlite_files.py
# Author : Craig Richards
# Created : 07 June 2013
# Last Modified : 14 February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Remove unecessary line and variable on Line 21
# Description : Scans directories to check if there are any sqlite files in there
from __future__ import print_function
import os
def isSQLite3(filename):
from os.path import isfile, getsize
if not isfile(filename):
return False
if getsize(filename) < 100: # SQLite database file header is 100 bytes
return False
else:
fd = open(filename, 'rb')
Header = fd.read(100)
fd.close()
if Header[0:16] == 'SQLite format 3\000':
return True
else:
return False
log=open('sqlite_audit.txt','w')
for r,d,f in os.walk(r'.'):
for files in f:
if isSQLite3(files):
print(files)
print("[+] '%s' **** is a SQLITE database file **** " % os.path.join(r,files))
log.write("[+] '%s' **** is a SQLITE database file **** " % files+'\n')
else:
log.write("[-] '%s' is NOT a sqlite database file" % os.path.join(r,files)+'\n')
log.write("[-] '%s' is NOT a sqlite database file" % files+'\n')# Script Name : create_dir_if_not_there.py
# Author : Craig Richards
# Created : 09th January 2012
# Last Modified : 22nd October 2015
# Version : 1.0.1
# Modifications : Added exceptions
# : 1.0.1 Tidy up comments and syntax
#
# Description : Checks to see if a directory exists in the users home directory, if not then create it
import os # Import the OS module
MESSAGE = 'The directory already exists.'
TESTDIR = 'testdir'
try:
home = os.path.expanduser("~") # Set the variable home by expanding the user's set home directory
print(home) # Print the location
if not os.path.exists(os.path.join(home, TESTDIR)): # os.path.join() for making a full path safely
os.makedirs(os.path.join(home, TESTDIR)) # If not create the directory, inside their home directory
else:
print(MESSAGE)
except Exception as e:
print(e)
# Script Name : move_files_over_x_days.py
# Author : Craig Richards
# Created : 8th December 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will move all the files from the src directory that are over 240 days old to the destination directory.
import shutil
import sys
import time
import os
src = 'u:\\test' # Set the source directory
dst = 'c:\\test' # Set the destination directory
now = time.time() # Get the current time
for f in os.listdir(src): # Loop through all the files in the source directory
if os.stat(f).st_mtime < now - 240 * 86400: # Work out how old they are, if they are older than 240 days old
if os.path.isfile(f): # Check it's a file
shutil.move(f, dst) # Move the files
# Script Name : sqlite_table_check.py
# Author : Craig Richards
# Created : 07 June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Checks the main SQLITE database to ensure all the tables should exist
import sqlite3
import sys
import os
dropbox = os.getenv("dropbox")
config = os.getenv("my_config")
dbfile = ("Databases\jarvis.db")
listfile = ("sqlite_master_table.lst")
master_db = os.path.join(dropbox, dbfile)
config_file = os.path.join(config, listfile)
tablelist = open(config_file,'r');
conn = sqlite3.connect(master_db)
cursor = conn.cursor()
cursor.execute('SELECT SQLITE_VERSION()')
data = cursor.fetchone()
if str(data) == "(u'3.6.21',)":
print ("\nCurrently " + master_db + " is on SQLite version: %s" % data + " - OK -\n")
else:
print ("\nDB On different version than master version - !!!!! \n")
conn.close()
print ("\nCheckling " + master_db + " against " + config_file + "\n")
for table in tablelist.readlines():
conn = sqlite3.connect(master_db)
cursor = conn.cursor()
cursor.execute("select count(*) from sqlite_master where name = ?",(table.strip(), ))
res = cursor.fetchone()
if (res[0]):
print ('[+] Table : ' + table.strip() + ' exists [+]')
else:
print ('[-] Table : ' + table.strip() + ' does not exist [-]')
# Script Name : puttylogs.py
# Author : Craig Richards
# Created : 13th October 2011
# Last Modified : 29th February 2012
# Version : 1.2
# Modifications : 1.1 - Added the variable zip_program so you can set it for the zip program on whichever OS, so to run on a different OS just change the locations of these two variables.
# : 1.2 - 29-02-12 - CR - Added shutil module and added one line to move the zipped up logs to the zipped_logs directory
# Description : Zip up all the logs in the given directory
import os # Load the Library Module
import shutil # Load the Library Module - 1.2
from time import strftime # Load just the strftime Module from Time
logsdir="c:\logs\puttylogs" # Set the Variable logsdir
zipdir="c:\logs\puttylogs\zipped_logs" # Set the Variable zipdir - 1.2
zip_program="zip.exe" # Set the Variable zip_program - 1.1
for files in os.listdir(logsdir): # Find all the files in the directory
if files.endswith(".log"): # Check to ensure the files in the directory end in .log
files1=files+"."+strftime("%Y-%m-%d")+".zip" # Create the Variable files1, this is the files in the directory, then we add a suffix with the date and the zip extension
os.chdir(logsdir) # Change directory to the logsdir
os.system(zip_program + " " + files1 +" "+ files) # Zip the logs into dated zip files for each server. - 1.1
shutil.move(files1, zipdir) # Move the zipped log files to the zipped_logs directory - 1.2
os.remove(files) # Remove the original log files
# Script Name : daily_checks.py
# Author : Craig Richards
# Created : 07th December 2011
# Last Modified : 01st May 2013
# Version : 1.5
#
# Modifications : 1.1 Removed the static lines for the putty sessions, it now reads a file, loops through and makes the connections.
# : 1.2 Added a variable filename=sys.argv[0] , as when you use __file__ it errors when creating an exe with py2exe.
# : 1.3 Changed the server_list.txt file name and moved the file to the config directory.
# : 1.4 Changed some settings due to getting a new pc
# : 1.5 Tidy comments and syntax
#
# Description : This simple script loads everything I need to carry out the daily checks for our systems.
import platform # Load Modules
import os
import subprocess
import sys
from time import strftime # Load just the strftime Module from Time
def clear_screen(): # Function to clear the screen
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
os.system('clear') # Clear the Screen
elif os.name in ("nt", "dos", "ce"): # DOS/Windows
os.system('CLS') # Clear the Screen
def print_docs(): # Function to print the daily checks automatically
print ("Printing Daily Check Sheets:")
# The command below passes the command line string to open word, open the document, print it then close word down
subprocess.Popen(["C:\\Program Files (x86)\Microsoft Office\Office14\winword.exe", "P:\\\\Documentation\\Daily Docs\\Back office Daily Checks.doc", "/mFilePrintDefault", "/mFileExit"]).communicate()
def putty_sessions(): # Function to load the putty sessions I need
for server in open(conffilename): # Open the file server_list.txt, loop through reading each line - 1.1 -Changed - 1.3 Changed name to use variable conffilename
subprocess.Popen(('putty -load '+server)) # Open the PuTTY sessions - 1.1
def rdp_sessions():
print ("Loading RDP Sessions:")
subprocess.Popen("mstsc eclr.rdp") # Open up a terminal session connection and load the euroclear session
def euroclear_docs():
# The command below opens IE and loads the Euroclear password document
subprocess.Popen('"C:\\Program Files\\Internet Explorer\\iexplore.exe"' '"file://fs1\pub_b\Pub_Admin\Documentation\Settlements_Files\PWD\Eclr.doc"')
# End of the functions
# Start of the Main Program
def main():
filename = sys.argv[0] # Create the variable filename
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable - 1.3
conffile = ('daily_checks_servers.conf') # Set the variable conffile - 1.3
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together - 1.3
clear_screen() # Call the clear screen function
# The command below prints a little welcome message, as well as the script name, the date and time and where it was run from.
print ("Good Morning " + os.getenv('USERNAME') + ", "+
filename, "ran at", strftime("%Y-%m-%d %H:%M:%S"), "on",platform.node(), "run from",os.getcwd())
print_docs() # Call the print_docs function
putty_sessions() # Call the putty_session function
rdp_sessions() # Call the rdp_sessions function
euroclear_docs() # Call the euroclear_docs function
if __name__ == "__main__":
main()
import serial
import sys
#A serial port-scanner for linux and windows platforms
#Author: Julio César Echeverri Marulanda
#e-mail: [email protected]
#blog: blogdelingeniero1.wordpress.com
#You should have installed the PySerial module to use this method.
#You can install pyserial with the following line: pip install pyserial
def ListAvailablePorts():
#This function return a list containing the string names for Virtual Serial Ports
#availables in the computer (this function works only for Windows & Linux Platforms but you can extend it)
#if there isn't available ports, returns an empty List
AvailablePorts = []
platform = sys.platform
if platform == 'win32':
for i in range(255):
try:
ser = serial.Serial(i,9600)
except serial.serialutil.SerialException:
pass
else:
AvailablePorts.append(ser.portstr)
ser.close()
elif platform == 'linux':
for i in range(0,255):
try:
ser = serial.Serial('/dev/ttyUSB'+str(i))
except serial.serialutil.SerialException:
pass
else:
AvailablePorts.append('/dev/ttyUSB'+str(i))
ser.close()
else:
print '''This method was developed only for linux and windows
the current platform isn't recognised'''
return AvailablePorts
# EXAMPLE OF HOW IT WORKS
# if an Arduino is connected to the computer, the port will be show in the terminal
# print ListAvailablePorts()# Script Name : nslookup_check.py
# Author : Craig Richards
# Created : 5th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This very simple script opens the file server_list.txt and the does an nslookup for each one to check the DNS entry
import subprocess # Import the subprocess module
for server in open('server_list.txt'): # Open the file and read each line
subprocess.Popen(('nslookup ' + server)) # Run the nslookup command for each server in the listimport urllib
import json
import sys
import os
accessToken = 'TOKENVALUE' # YOUR ACCESS TOKEN GETS INSERTED HERE
userId = sys.argv[1] #USERID
limit=100
url='https://graph.facebook.com/'+userId+'/posts?access_token='+accessToken +'&limit='+str(limit) #FB Link
data = json.load(urllib.urlopen(url))
id=0
print str(id)
for item in data['data']:
time=item['created_time'][11:19]
date=item['created_time'][5:10]
year=item['created_time'][0:4]
if 'shares' in item:
num_share=item['shares']['count']
else:
num_share=0
if 'likes' in item:
num_like=item['likes']['count']
else:
num_like=0
id+=1
print str(id)+'\t'+ time.encode('utf-8')+'\t'+date.encode('utf-8')+'\t'+year.encode('utf-8')+'\t'+ str(num_share)+'\t'+str(num_like)"""
Written by: Shreyas Daniel - github.com/shreydan
Description: an overview of 'timy' module - pip install timy
A great alternative to Pythons 'timeit' module and easier to use.
"""
import timy # begin by importing timy
@timy.timer(ident = 'listcomp', loops = 1) # timy decorator
def listcomprehension(): # the function whose execution time is calculated.
li = [x for x in range(0,100000,2)]
listcomprehension()
"""
this is how the above works:
timy decorator is created.
any function underneath the timy decorator is the function whose execution time
need to be calculated.
after the function is called. The execution time is printed.
in the timy decorator:
ident: an identity for each timy decorator, handy when using a lot of them
loops: no. of times this function has to be executed
"""
# this can also be accomplished by 'with' statement:
# tracking points in between code can be added
# to track specific instances in the program
def listcreator():
with timy.Timer() as timer:
li = []
for i in range(0,100000,2):
li.append(i)
if i == 50000:
timer.track('reached 50000')
listcreator()
"""
there are many more aspects to 'timy' module.
check it out here: https://github.com/ramonsaraiva/timy
"""'''Simple million word count program.
main idea is Python pairs words
with the number of times
that number appears in the triple quoted string.
Credit to William J. Turkel and Adam Crymble for the word
frequency code used below. I just merged the two ideas.
'''
wordstring = '''SCENE I. Yorkshire. Gaultree Forest.
Enter the ARCHBISHOP OF YORK, MOWBRAY, LORD HASTINGS, and others
ARCHBISHOP OF YORK
What is this forest call'd?
HASTINGS
'Tis Gaultree Forest, an't shall please your grace.
ARCHBISHOP OF YORK
Here stand, my lords; and send discoverers forth
To know the numbers of our enemies.
HASTINGS
We have sent forth already.
ARCHBISHOP OF YORK
'Tis well done.
My friends and brethren in these great affairs,
I must acquaint you that I have received
New-dated letters from Northumberland;
Their cold intent, tenor and substance, thus:
Here doth he wish his person, with such powers
As might hold sortance with his quality,
The which he could not levy; whereupon
He is retired, to ripe his growing fortunes,
To Scotland: and concludes in hearty prayers
That your attempts may overlive the hazard
And fearful melting of their opposite.
MOWBRAY
Thus do the hopes we have in him touch ground
And dash themselves to pieces.
Enter a Messenger
HASTINGS
Now, what news?
Messenger
West of this forest, scarcely off a mile,
In goodly form comes on the enemy;
And, by the ground they hide, I judge their number
Upon or near the rate of thirty thousand.
MOWBRAY
The just proportion that we gave them out
Let us sway on and face them in the field.
ARCHBISHOP OF YORK
What well-appointed leader fronts us here?
Enter WESTMORELAND
MOWBRAY
I think it is my Lord of Westmoreland.
WESTMORELAND
Health and fair greeting from our general,
The prince, Lord John and Duke of Lancaster.
ARCHBISHOP OF YORK
Say on, my Lord of Westmoreland, in peace:
What doth concern your coming?
WESTMORELAND
Then, my lord,
Unto your grace do I in chief address
The substance of my speech. If that rebellion
Came like itself, in base and abject routs,
Led on by bloody youth, guarded with rags,
And countenanced by boys and beggary,
I say, if damn'd commotion so appear'd,
In his true, native and most proper shape,
You, reverend father, and these noble lords
Had not been here, to dress the ugly form
Of base and bloody insurrection
With your fair honours. You, lord archbishop,
Whose see is by a civil peace maintained,
Whose beard the silver hand of peace hath touch'd,
Whose learning and good letters peace hath tutor'd,
Whose white investments figure innocence,
The dove and very blessed spirit of peace,
Wherefore do you so ill translate ourself
Out of the speech of peace that bears such grace,
Into the harsh and boisterous tongue of war;
Turning your books to graves, your ink to blood,
Your pens to lances and your tongue divine
To a trumpet and a point of war?
ARCHBISHOP OF YORK
Wherefore do I this? so the question stands.
Briefly to this end: we are all diseased,
And with our surfeiting and wanton hours
Have brought ourselves into a burning fever,
And we must bleed for it; of which disease
Our late king, Richard, being infected, died.
But, my most noble Lord of Westmoreland,
I take not on me here as a physician,
Nor do I as an enemy to peace
Troop in the throngs of military men;
But rather show awhile like fearful war,
To diet rank minds sick of happiness
And purge the obstructions which begin to stop
Our very veins of life. Hear me more plainly.
I have in equal balance justly weigh'd
What wrongs our arms may do, what wrongs we suffer,
And find our griefs heavier than our offences.
We see which way the stream of time doth run,
And are enforced from our most quiet there
By the rough torrent of occasion;
And have the summary of all our griefs,
When time shall serve, to show in articles;
Which long ere this we offer'd to the king,
And might by no suit gain our audience:
When we are wrong'd and would unfold our griefs,
We are denied access unto his person
Even by those men that most have done us wrong.
The dangers of the days but newly gone,
Whose memory is written on the earth
With yet appearing blood, and the examples
Of every minute's instance, present now,
Hath put us in these ill-beseeming arms,
Not to break peace or any branch of it,
But to establish here a peace indeed,
Concurring both in name and quality.
WESTMORELAND
When ever yet was your appeal denied?
Wherein have you been galled by the king?
What peer hath been suborn'd to grate on you,
That you should seal this lawless bloody book
Of forged rebellion with a seal divine
And consecrate commotion's bitter edge?
ARCHBISHOP OF YORK
My brother general, the commonwealth,
To brother born an household cruelty,
I make my quarrel in particular.
WESTMORELAND
There is no need of any such redress;
Or if there were, it not belongs to you.
MOWBRAY
Why not to him in part, and to us all
That feel the bruises of the days before,
And suffer the condition of these times
To lay a heavy and unequal hand
Upon our honours?
WESTMORELAND
O, my good Lord Mowbray,
Construe the times to their necessities,
And you shall say indeed, it is the time,
And not the king, that doth you injuries.
Yet for your part, it not appears to me
Either from the king or in the present time
That you should have an inch of any ground
To build a grief on: were you not restored
To all the Duke of Norfolk's signories,
Your noble and right well remember'd father's?
MOWBRAY
What thing, in honour, had my father lost,
That need to be revived and breathed in me?
The king that loved him, as the state stood then,
Was force perforce compell'd to banish him:
And then that Harry Bolingbroke and he,
Being mounted and both roused in their seats,
Their neighing coursers daring of the spur,
Their armed staves in charge, their beavers down,
Their eyes of fire sparking through sights of steel
And the loud trumpet blowing them together,
Then, then, when there was nothing could have stay'd
My father from the breast of Bolingbroke,
O when the king did throw his warder down,
His own life hung upon the staff he threw;
Then threw he down himself and all their lives
That by indictment and by dint of sword
Have since miscarried under Bolingbroke.
WESTMORELAND
You speak, Lord Mowbray, now you know not what.
The Earl of Hereford was reputed then
In England the most valiant gentlemen:
Who knows on whom fortune would then have smiled?
But if your father had been victor there,
He ne'er had borne it out of Coventry:
For all the country in a general voice
Cried hate upon him; and all their prayers and love
Were set on Hereford, whom they doted on
And bless'd and graced indeed, more than the king.
But this is mere digression from my purpose.
Here come I from our princely general
To know your griefs; to tell you from his grace
That he will give you audience; and wherein
It shall appear that your demands are just,
You shall enjoy them, every thing set off
That might so much as think you enemies.
MOWBRAY
But he hath forced us to compel this offer;
And it proceeds from policy, not love.
WESTMORELAND
Mowbray, you overween to take it so;
This offer comes from mercy, not from fear:
For, lo! within a ken our army lies,
Upon mine honour, all too confident
To give admittance to a thought of fear.
Our battle is more full of names than yours,
Our men more perfect in the use of arms,
Our armour all as strong, our cause the best;
Then reason will our heart should be as good
Say you not then our offer is compell'd.
MOWBRAY
Well, by my will we shall admit no parley.
WESTMORELAND
That argues but the shame of your offence:
A rotten case abides no handling.
HASTINGS
Hath the Prince John a full commission,
In very ample virtue of his father,
To hear and absolutely to determine
Of what conditions we shall stand upon?
WESTMORELAND
That is intended in the general's name:
I muse you make so slight a question.
ARCHBISHOP OF YORK
Then take, my Lord of Westmoreland, this schedule,
For this contains our general grievances:
Each several article herein redress'd,
All members of our cause, both here and hence,
That are insinew'd to this action,
Acquitted by a true substantial form
And present execution of our wills
To us and to our purposes confined,
We come within our awful banks again
And knit our powers to the arm of peace.
WESTMORELAND
This will I show the general. Please you, lords,
In sight of both our battles we may meet;
And either end in peace, which God so frame!
Or to the place of difference call the swords
Which must decide it.
ARCHBISHOP OF YORK
My lord, we will do so.
Exit WESTMORELAND
MOWBRAY
There is a thing within my bosom tells me
That no conditions of our peace can stand.
HASTINGS
Fear you not that: if we can make our peace
Upon such large terms and so absolute
As our conditions shall consist upon,
Our peace shall stand as firm as rocky mountains.
MOWBRAY
Yea, but our valuation shall be such
That every slight and false-derived cause,
Yea, every idle, nice and wanton reason
Shall to the king taste of this action;
That, were our royal faiths martyrs in love,
We shall be winnow'd with so rough a wind
That even our corn shall seem as light as chaff
And good from bad find no partition.
ARCHBISHOP OF YORK
No, no, my lord. Note this; the king is weary
Of dainty and such picking grievances:
For he hath found to end one doubt by death
Revives two greater in the heirs of life,
And therefore will he wipe his tables clean
And keep no tell-tale to his memory
That may repeat and history his loss
To new remembrance; for full well he knows
He cannot so precisely weed this land
As his misdoubts present occasion:
His foes are so enrooted with his friends
That, plucking to unfix an enemy,
He doth unfasten so and shake a friend:
So that this land, like an offensive wife
That hath enraged him on to offer strokes,
As he is striking, holds his infant up
And hangs resolved correction in the arm
That was uprear'd to execution.
HASTINGS
Besides, the king hath wasted all his rods
On late offenders, that he now doth lack
The very instruments of chastisement:
So that his power, like to a fangless lion,
May offer, but not hold.
ARCHBISHOP OF YORK
'Tis very true:
And therefore be assured, my good lord marshal,
If we do now make our atonement well,
Our peace will, like a broken limb united,
Grow stronger for the breaking.
MOWBRAY
Be it so.
Here is return'd my Lord of Westmoreland.
Re-enter WESTMORELAND
WESTMORELAND
The prince is here at hand: pleaseth your lordship
To meet his grace just distance 'tween our armies.
MOWBRAY
Your grace of York, in God's name then, set forward.
ARCHBISHOP OF YORK
Before, and greet his grace: my lord, we come.
Exeunt'''
wordlist = wordstring.split()
wordfreq = [wordlist.count(w) for w in wordlist]
print("String\n {} \n".format(wordstring))
print("List\n {} \n".format(str(wordlist)))
print("Frequencies\n {} \n".format(str(wordfreq)))
print("Pairs\n {}".format(str(list(zip(wordlist, wordfreq)))))#!/usr/bin/python
import urllib2
import cookielib
from getpass import getpass
import sys
username = raw_input('Enter mobile number:')
passwd = getpass()
message = raw_input('Enter Message:')
#Fill the list with Recipients
x=raw_input('Enter Mobile numbers seperated with comma:')
num=x.split(',')
message = "+".join(message.split(' '))
#Logging into the SMS Site
url = 'http://site24.way2sms.com/Login1.action?'
data = 'username='+username+'&password='+passwd+'&Submit=Sign+in'
#For Cookies:
cj = cookielib.CookieJar()
opener = urllib2.build_opener(urllib2.HTTPCookieProcessor(cj))
# Adding Header detail:
opener.addheaders = [('User-Agent','Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/37.0.2062.120 Safari/537.36')]
try:
usock = opener.open(url, data)
except IOError:
print "Error while logging in."
sys.exit(1)
jession_id = str(cj).split('~')[1].split(' ')[0]
send_sms_url = 'http://site24.way2sms.com/smstoss.action?'
opener.addheaders = [('Referer', 'http://site25.way2sms.com/sendSMS?Token='+jession_id)]
try:
for number in num:
send_sms_data = 'ssaction=ss&Token='+jession_id+'&mobile='+number+'&message='+message+'&msgLen=136'
sms_sent_page = opener.open(send_sms_url,send_sms_data)
except IOError:
print "Error while sending message"
sys.exit(1)
print "SMS has been sent."# Script Name : get_info_remoute_srv.py
# Author : Pavel Sirotkin
# Created : 3th April 2016
# Last Modified : -
# Version : 1.0.0
# Modifications :
# Description : this will get info about remoute server on linux through ssh connection. Connect these servers must be through keys
import subprocess
HOSTS = ('proxy1', 'proxy')
COMMANDS = ('uname -a', 'uptime')
for host in HOSTS:
result = []
for command in COMMANDS:
ssh = subprocess.Popen(["ssh", "%s" % host, command],
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
result.append(ssh.stdout.readlines())
print('--------------- ' + host + ' --------------- ')
for res in result:
if not res:
print(ssh.stderr.readlines())
break
else:
print(res)# Script Name : portscanner.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Port Scanner, you just pass the host and the ports
import optparse # Import the module
from socket import * # Import the module
from threading import * # Import the module
screenLock = Semaphore(value=1) # Prevent other threads from preceeding
def connScan(tgtHost, tgtPort): # Start of the function
try:
connSkt = socket(AF_INET, SOCK_STREAM) # Open a socket
connSkt.connect((tgtHost, tgtPort))
connSkt.send('')
results=connSkt.recv(100)
screenLock.acquire() # Acquire the lock
print '[+] %d/tcp open'% tgtPort
print '[+] ' + str(results)
except:
screenLock.acquire()
print '[-] %d/tcp closed '% tgtPort
finally:
screenLock.release()
connSkt.close()
def portScan(tgtHost, tgtPorts): # Start of the function
try:
tgtIP = gethostbyname(tgtHost) # Get the IP from the hostname
except:
print "[-] Cannot resolve '%s': Unknown host"%tgtHost
return
try:
tgtName = gethostbyaddr(tgtIP) # Get hostname from IP
print '\n[+] Scan Results for: ' +tgtName[0]
except:
print '\n[+] Scan Results for: ' + tgtIP
setdefaulttimeout(1)
for tgtPort in tgtPorts: # Scan host and ports
t = Thread(target=connScan, args=(tgtHost, int(tgtPort)))
t.start()
def main():
parser = optparse.OptionParser('usage %prog -H'+' <target host> -p <target port>')
parser.add_option('-H', dest='tgtHost', type='string', help='specify target host')
parser.add_option('-p', dest='tgtPort',type='string', help='specify target port[s] seperated by a comma')
(options, args) = parser.parse_args()
tgtHost = options.tgtHost
tgtPorts = str(options.tgtPort).split(',')
if (tgtHost == None) | (tgtPorts[0] == None):
print parser.usage
exit(0)
portScan(tgtHost, tgtPorts)
if __name__ == '__main__':
main()# Script Name : work_connect.py
# Author : Craig Richards
# Created : 11th May 2012
# Last Modified : 31st October 2012
# Version : 1.1
# Modifications : 1.1 - CR - Added some extra code, to check an argument is passed to the script first of all, then check it's a valid input
# Description : This simple script loads everything I need to connect to work etc
import subprocess # Load the Library Module
import sys # Load the Library Module
import os # Load the Library Module
import time # Load the Library Module
dropbox = os.getenv("dropbox") # Set the variable dropbox, by getting the values of the environment setting for dropbox
rdpfile = ("remote\\workpc.rdp") # Set the variable logfile, using the arguments passed to create the logfile
conffilename=os.path.join(dropbox, rdpfile) # Set the variable conffilename by joining confdir and conffile together
remote = (r"c:\windows\system32\mstsc.exe ") # Set the variable remote with the path to mstsc
text = '''You need to pass an argument
-c Followed by login password to connect
-d to disconnect''' # Text to display if there is no argument passed or it's an invalid option - 1.2
if len(sys.argv) < 2: # Check there is at least one option passed to the script - 1.2
print text # If not print the text above - 1.2
sys.exit() # Exit the program - 1.2
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print text # Print the text, stored in the text variable - 1.2
sys.exit(0) # Exit the program
else:
if sys.argv[1].lower().startswith('-c'): # If the first argument is -c then
passwd = sys.argv[2] # Set the variable passwd as the second argument passed, in this case my login password
subprocess.Popen((r"c:\Program Files\Checkpoint\Endpoint Connect\trac.exe connect -u username -p "+passwd))
subprocess.Popen((r"c:\geektools\puttycm.exe"))
time.sleep(15) # Sleep for 15 seconds, so the checkpoint software can connect before opening mstsc
subprocess.Popen([remote, conffilename])
elif sys.argv[1].lower().startswith('-d'): # If the first argument is -d then disconnect my checkpoint session.
subprocess.Popen((r"c:\Program Files\Checkpoint\Endpoint Connect\trac.exe disconnect "))
else:
print 'Unknown option - ' + text # If any other option is passed, then print Unknown option and the text from above - 1.2# Script Name : testlines.py
# Author : Craig Richards
# Created : 08th December 2011
# Last Modified :
# Version : 1.0
# Modifications : beven nyamande
# Description : This is a very simple script that opens up a file and writes whatever is set "
def write_to_file(filename, txt):
with open(filename, 'w') as file_object:
s = file_object.write(txt)
if __name__ == '__main__':
write_to_file('test.txt', 'I am beven')
# Script Name : ping_subnet.py
# Author : Craig Richards
# Created : 12th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : After supplying the first 3 octets it will scan the final range for available addresses
import os # Load the Library Module
import subprocess # Load the Library Module
import sys # Load the Library Module
filename = sys.argv[0] # Sets a variable for the script name
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print '''
You need to supply the first octets of the address Usage : ''' + filename + ''' 111.111.111 '''
sys.exit(0)
else:
if (len(sys.argv) < 2): # If no arguments are passed then display the help and instructions on how to run the script
sys.exit (' You need to supply the first octets of the address Usage : ' + filename + ' 111.111.111')
subnet = sys.argv[1] # Set the variable subnet as the three octets you pass it
if os.name == "posix": # Check the os, if it's linux then
myping = "ping -c 2 " # This is the ping command
elif os.name in ("nt", "dos", "ce"): # Check the os, if it's windows then
myping = "ping -n 2 " # This is the ping command
f = open('ping_' + subnet + '.log', 'w') # Open a logfile
for ip in range(2,255): # Set the ip variable for the range of numbers
ret = subprocess.call(myping + str(subnet) + "." + str(ip) ,
shell=True, stdout=f, stderr=subprocess.STDOUT) # Run the command pinging the servers
if ret == 0: # Depending on the response
f.write (subnet + "." + str(ip) + " is alive" + "\n") # Write out that you can receive a reponse
else:
f.write (subnet + "." + str(ip) + " did not respond" + "\n") # Write out you can't reach the box# Script Name : ping_servers.py
# Author : Craig Richards
# Created : 9th May 2012
# Last Modified : 14th May 2012
# Version : 1.1
# Modifications : 1.1 - 14th May 2012 - CR Changed it to use the config directory to store the server files
# Description : This script will, depending on the arguments supplied will ping the servers associated with that application group.
import os # Load the Library Module
import subprocess # Load the Library Module
import sys # Load the Library Module
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print '''
You need to supply the application group for the servers you want to ping, i.e.
dms
swaps
Followed by the site i.e.
155
bromley'''
sys.exit(0)
else:
if (len(sys.argv) < 3): # If no arguments are passed,display the help/instructions on how to run the script
sys.exit ('\nYou need to supply the app group. Usage : ' + filename + ' followed by the application group i.e. \n \t dms or \n \t swaps \n then the site i.e. \n \t 155 or \n \t bromley')
appgroup = sys.argv[1] # Set the variable appgroup as the first argument you supply
site = sys.argv[2] # Set the variable site as the second argument you supply
if os.name == "posix": # Check the os, if it's linux then
myping = "ping -c 2 " # This is the ping command
elif os.name in ("nt", "dos", "ce"): # Check the os, if it's windows then
myping = "ping -n 2 " # This is the ping command
if 'dms' in sys.argv: # If the argument passed is dms then
appgroup = 'dms' # Set the variable appgroup to dms
elif 'swaps' in sys.argv: # Else if the argment passed is swaps then
appgroup = 'swaps' # Set the variable appgroup to swaps
if '155' in sys.argv: # If the argument passed is 155 then
site = '155' # Set the variable site to 155
elif 'bromley' in sys.argv: # Else if the argument passed is bromley
site = 'bromley' # Set the variable site to bromley
filename = sys.argv[0] # Sets a variable for the script name
logdir = os.getenv("logs") # Set the variable logdir by getting the OS environment logs
logfile = 'ping_' + appgroup + '_' + site + '.log' # Set the variable logfile, using the arguments passed to create the logfile
logfilename = os.path.join(logdir, logfile) # Set the variable logfilename by joining logdir and logfile together
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable - 1.2
conffile = (appgroup + '_servers_' + site + '.txt') # Set the variable conffile - 1.2
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together - 1.2
f = open(logfilename, "w") # Open a logfile to write out the output
for server in open(conffilename): # Open the config file and read each line - 1.2
ret = subprocess.call(myping + server, shell=True, stdout=f, stderr=subprocess.STDOUT) # Run the ping command for each server in the list.
if ret == 0: # Depending on the response
f.write (server.strip() + " is alive" + "\n") # Write out that you can receive a reponse
else:
f.write (server.strip() + " did not respond" + "\n") # Write out you can't reach the box
print ("\n\tYou can see the results in the logfile : " + logfilename); # Show the location of the logfile# Script Name : backup_automater_services.py
# Author : Craig Richards
# Created : 24th October 2012
# Last Modified : 13th February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Tidy up the comments and syntax
# Description : This will go through and backup all my automator services workflows
import datetime # Load the library module
import os # Load the library module
import shutil # Load the library module
today = datetime.date.today() # Get Today's date
todaystr = today.isoformat() # Format it so we can use the format to create the directory
confdir = os.getenv("my_config") # Set the variable by getting the value from the OS setting
dropbox = os.getenv("dropbox") # Set the variable by getting the value from the OS setting
conffile = ('services.conf') # Set the variable as the name of the configuration file
conffilename = os.path.join(confdir, conffile) # Set the variable by combining the path and the file name
sourcedir = os.path.expanduser('~/Library/Services/') # Source directory of where the scripts are located
destdir = os.path.join(dropbox, "My_backups" + "/" +
"Automater_services" + todaystr + "/") # Combine several settings to create
# the destination backup directory
for file_name in open(conffilename): # Walk through the configuration file
fname = file_name.strip() # Strip out the blank lines from the configuration file
if fname: # For the lines that are not blank
sourcefile = os.path.join(sourcedir, fname) # Get the name of the source files to backup
destfile = os.path.join(destdir, fname) # Get the name of the destination file names
shutil.copytree(sourcefile, destfile) # Copy the directories# Script Name : powerup_checks.py
# Author : Craig Richards
# Created : 25th June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Creates an output file by pulling all the servers for the given site from SQLITE database, then goes through the list pinging the servers to see if they are up on the network
import sys # Load the Library Module
import sqlite3 # Load the Library Module
import os # Load the Library Module
import subprocess # Load the Library Module
from time import strftime # Load just the strftime Module from Time
dropbox=os.getenv("dropbox") # Set the variable, by getting the value of the variable from the OS
config=os.getenv("my_config") # Set the variable, by getting the value of the variable from the OS
dbfile=("Databases/jarvis.db") # Set the variable to the database
master_db=os.path.join(dropbox, dbfile) # Create the variable by linking the path and the file
listfile=("startup_list.txt") # File that will hold the servers
serverfile=os.path.join(config,listfile) # Create the variable by linking the path and the file
outputfile=('server_startup_'+strftime("%Y-%m-%d-%H-%M")+'.log')
# Below is the help text
text = '''
You need to pass an argument, the options the script expects is
-site1 For the Servers relating to site1
-site2 For the Servers located in site2'''
def windows(): # This is the function to run if it detects the OS is windows.
f = open(outputfile, 'a') # Open the logfile
for server in open(serverfile,'r'): # Read the list of servers from the list
#ret = subprocess.call("ping -n 3 %s" % server.strip(), shell=True,stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
ret = subprocess.call("ping -n 3 %s" % server.strip(),stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # Depending on the response
f.write ("%s: is alive" % server.strip().ljust(15) + "\n") # Write out to the logfile is the server is up
else:
f.write ("%s: did not respond" % server.strip().ljust(15) + "\n") # Write to the logfile if the server is down
def linux(): # This is the function to run if it detects the OS is nix.
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open(serverfile,'r'): # Read the list of servers from the list
ret = subprocess.call("ping -c 3 %s" % server, shell=True,stdout=open('/dev/null', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # Depending on the response
f.write ("%s: is alive" % server.strip().ljust(15) + "\n") # Write out to the logfile is the server is up
else:
f.write ("%s: did not respond" % server.strip().ljust(15) + "\n") # Write to the logfile if the server is down
def get_servers(query): # Function to get the servers from the database
conn = sqlite3.connect(master_db) # Connect to the database
cursor = conn.cursor() # Create the cursor
cursor.execute('select hostname from tp_servers where location =?',(query,)) # SQL Statement
print ('\nDisplaying Servers for : ' + query + '\n')
while True: # While there are results
row = cursor.fetchone() # Return the results
if row == None:
break
f = open(serverfile, 'a') # Open the serverfile
f.write("%s\n" % str(row[0])) # Write the server out to the file
print row[0] # Display the server to the screen
f.close() # Close the file
def main(): # Main Function
if os.path.exists(serverfile): # Checks to see if there is an existing server file
os.remove(serverfile) # If so remove it
if len(sys.argv) < 2: # Check there is an argument being passed
print text # Display the help text if there isn't one passed
sys.exit() # Exit the script
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # If the ask for help
print text # Display the help text if there isn't one passed
sys.exit(0) # Exit the script after displaying help
else:
if sys.argv[1].lower().startswith('-site1'): # If the argument is site1
query = 'site1' # Set the variable to have the value site
elif sys.argv[1].lower().startswith('-site2'): # Else if the variable is bromley
query = 'site2' # Set the variable to have the value bromley
else:
print '\n[-] Unknown option [-] ' + text # If an unknown option is passed, let the user know
sys.exit(0)
get_servers(query) # Call the get servers funtion, with the value from the argument
if os.name == "posix": # If the OS is linux.
linux() # Call the linux function
elif os.name in ("nt", "dos", "ce"): # If the OS is Windows...
windows() # Call the windows function
print ('\n[+] Check the log file ' + outputfile + ' [+]\n') # Display the name of the log
if __name__ == '__main__':
main() # Call the main function# Script Name : password_cracker.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Old school password cracker using python
from sys import platform as _platform
# Check the current operating system to import the correct version of crypt
if _platform in ["linux", "linux2", "darwin"]: # darwin is _platform name for Mac OS X
import crypt # Import the module
elif _platform == "win32":
# Windows
try:
import fcrypt # Try importing the fcrypt module
except ImportError:
print 'Please install fcrypt if you are on Windows'
def testPass(cryptPass): # Start the function
salt = cryptPass[0:2]
dictFile = open('dictionary.txt','r') # Open the dictionary file
for word in dictFile.readlines(): # Scan through the file
word = word.strip('\n')
cryptWord = crypt.crypt(word, salt) # Check for password in the file
if (cryptWord == cryptPass):
print "[+] Found Password: "+word+"\n"
return
print "[-] Password Not Found.\n"
return
def main():
passFile = open('passwords.txt') # Open the password file
for line in passFile.readlines(): # Read through the file
if ":" in line:
user = line.split(':')[0]
cryptPass = line.split(':')[1].strip(' ') # Prepare the user name etc
print "[*] Cracking Password For: " + user
testPass(cryptPass) # Call it to crack the users password
if __name__ == "__main__":
main()# Script Name : check_file.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications : with statement added to ensure correct file closure
# Description : Check a file exists and that we can read the file
from __future__ import print_function
import sys # Import the Modules
import os # Import the Modules
# Prints usage if not appropriate length of arguments are provided
def usage():
print('[-] Usage: python check_file.py <filename1> [filename2] ... [filenameN]')
exit(0)
# Readfile Functions which open the file that is passed to the script
def readfile(filename):
with open(filename, 'r') as f: # Ensure file is correctly closed under all circumstances
file = f.read()
print(file)
def main():
if len(sys.argv) >= 2: # Check the arguments passed to the script
filenames = sys.argv[1:]
for filename in filenames: # Iterate for each filename passed in command line argument
if not os.path.isfile(filename): # Check the File exists
print ('[-] ' + filename + ' does not exist.')
filenames.remove(filename) #remove non existing files from filenames list
continue
if not os.access(filename, os.R_OK): # Check you can read the file
print ('[-] ' + filename + ' access denied')
filenames.remove(filename) # remove non readable filenames
continue
else:
usage() # Print usage if not all parameters passed/Checked
# Read the content of each file
for filename in filenames:
print ('[+] Reading from : ' + filename) # Display Message and read the file contents
readfile(filename)
if __name__ == '__main__':
main()
# Script Name : factorial_perm_comp.py
# Author : Ebiwari Williams
# Created : 20th May 2017
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Find Factorial, Permutation and Combination of a Number
def factorial(n):
fact = 1
while(n >= 1 ):
fact = fact * n
n = n - 1
return fact
def permutation(n,r):
return factorial(n)/factorial(n-r)
def combination(n,r):
return permutation(n,r)/factorial(r)
def main():
print('choose between operator 1,2,3')
print('1) Factorial')
print('2) Permutation')
print('3) Combination')
operation = input('\n')
if(operation == '1'):
print('Factorial Computation\n')
while(True):
try:
n = int(input('\n Enter Value for n '))
print('Factorial of {} = {}'.format(n,factorial(n)))
break
except(ValueError):
print('Invalid Value')
continue
elif(operation == '2'):
print('Permutation Computation\n')
while(True):
try:
n = int(input('\n Enter Value for n '))
r = int(input('\n Enter Value for r '))
print('Permutation of {}P{} = {}'.format(n,r,permutation(n,r)))
break
except(ValueError):
print('Invalid Value')
continue
elif(operation == '3'):
print('Combination Computation\n')
while(True):
try:
n = int(input('\n Enter Value for n '))
r = int(input('\n Enter Value for r '))
print('Combination of {}C{} = {}'.format(n,r,combination(n,r)))
break
except(ValueError):
print('Invalid Value')
continue
if __name__ == '__main__':
main()# Script Name : nmap_scan.py
# Author : Craig Richards
# Created : 24th May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This scans my scripts directory and gives a count of the different types of scripts, you need nmap installed to run this
import nmap # Import the module
import optparse # Import the module
def nmapScan(tgtHost, tgtPort): # Create the function, this fucntion does the scanning
nmScan = nmap.PortScanner()
nmScan.scan(tgtHost, tgtPort)
state = nmScan[tgtHost]['tcp'][int(tgtPort)]['state']
print "[*] " + tgtHost + " tcp/" + tgtPort + " " + state
def main(): # Main Program
parser = optparse.OptionParser('usage%prog ' + '-H <host> -p <port>') # Display options/help if required
parser.add_option('-H', dest='tgtHost', type='string', help='specify host')
parser.add_option('-p', dest='tgtPort', type='string', help='port')
(options, args) = parser.parse_args()
tgtHost = options.tgtHost
tgtPorts = str(options.tgtPort).split(',')
if (tgtHost == None) | (tgtPorts[0] == None):
print parser.usage
exit(0)
for tgtPort in tgtPorts: # Scan the hosts with the ports etc
nmapScan(tgtHost, tgtPort)
if __name__ == '__main__':
main()
# Script Created by Yash Ladha
# Requirements:
# youtube-dl
# aria2c
# 10 Feb 2017
import subprocess
import sys
video_link, threads = sys.argv[1], sys.argv[2]
subprocess.call([
"youtube-dl",
video_link,
"--external-downloader",
"aria2c",
"--external-downloader-args",
"-x"+threads
])import urllib2
try:
urllib2.urlopen("http://google.com", timeout=2)
print ("working connection")
except urllib2.URLError:
print ("No internet connection")# Script Name : sqlite_check.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Runs checks to check my SQLITE database
import sqlite3 as lite
import sys
import os
dropbox= os.getenv("dropbox")
dbfile=("Databases\jarvis.db")
master_db=os.path.join(dropbox, dbfile)
con = None
try:
con = lite.connect(master_db)
cur = con.cursor()
cur.execute('SELECT SQLITE_VERSION()')
data = cur.fetchone()
print "SQLite version: %s" % data
except lite.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
con = lite.connect(master_db)
cur=con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
rows = cur.fetchall()
for row in rows:
print row
con = lite.connect(master_db)
cur=con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
while True:
row = cur.fetchone()
if row == None:
break
print row[0]print list(x for x in range(2,100,2))import pygame, sys, time
from pygame.locals import *
pygame.init()
window = pygame.display.set_mode((400, 300), 0, 32)
pygame.display.set_caption("Shape")
WHITE = (255, 255, 255)
GREEN = ( 0, 255, 0)
window.fill(WHITE)
pygame.draw.polygon(window, GREEN, ((146, 0), (236, 277), (56, 277)))
# Game logic
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
pygame.display.update()# Script Name : fileinfo.py
# Author : Not sure where I got this from
# Created : 28th November 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Show file information for a given file
# get file information using os.stat()
# tested with Python24 vegsaeat 25sep2006
from __future__ import print_function
import os
import sys
import stat # index constants for os.stat()
import time
try_count = 16
while try_count:
file_name = raw_input("Enter a file name: ") # pick a file you have
try_count >>= 1
try:
file_stats = os.stat(file_name)
break
except OSError:
print ("\nNameError : [%s] No such file or directory\n", file_name)
if try_count == 0:
print ("Trial limit exceded \nExiting program")
sys.exit()
# create a dictionary to hold file info
file_info = {
'fname': file_name,
'fsize': file_stats[stat.ST_SIZE],
'f_lm' : time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_MTIME])),
'f_la' : time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_ATIME])),
'f_ct' : time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_CTIME]))
}
print ("\nfile name = %(fname)s", file_info)
print ("file size = %(fsize)s bytes", file_info)
print ("last modified = %(f_lm)s", file_info)
print ("last accessed = %(f_la)s", file_info)
print ("creation time = %(f_ct)s\n", file_info)
if stat.S_ISDIR(file_stats[stat.ST_MODE]):
print ("This a directory")
else:
print ("This is not a directory\n")
print ("A closer look at the os.stat(%s) tuple:" % file_name)
print (file_stats)
print ("\nThe above tuple has the following sequence:")
print ("""st_mode (protection bits), st_ino (inode number),
st_dev (device), st_nlink (number of hard links),
st_uid (user ID of owner), st_gid (group ID of owner),
st_size (file size, bytes), st_atime (last access time, seconds since epoch),
st_mtime (last modification time), st_ctime (time of creation, Windows)"""
)# Script Name : dir_test.py
# Author : Craig Richards
# Created : 29th November 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Tests to see if the directory testdir exists, if not it will create the directory for you
from __future__ import print_function
import os # Import the OS Module
import sys
def main():
if sys.version_info.major >= 3:
input_func = input
else:
input_func = raw_input
CheckDir = input_func("Enter the name of the directory to check : ")
print()
if os.path.exists(CheckDir): # Checks if the dir exists
print("The directory exists")
else:
print("No directory found for " + CheckDir) # Output if no directory
print()
os.makedirs(CheckDir) # Creates a new dir for the given name
print("Directory created for " + CheckDir)
if __name__ == '__main__':
main()import sys
from PIL import ImageDraw, ImageFont, Image
def input_par():
print('Enter the text to insert in image: ')
text = str(input())
print('Enter the desired size: ')
size = int(input())
print('Enter the color for the text(r, g, b): ')
color_value = [int(i) for i in input().split(' ')]
return text, size, color_value
pass
def main():
path_to_image = sys.argv[1]
image_file = Image.open(path_to_image + '.jpg')
image_file = image_file.convert("RGBA")
pixdata = image_file.load()
print(image_file.size)
text, size, color_value = input_par()
font = ImageFont.truetype("C:\\Windows\\Fonts\\Arial.ttf", size=size)
# Clean the background noise, if color != white, then set to black.
# change with your color
for y in range(100):
for x in range(100):
pixdata[x, y] = (255, 255, 255, 255)
image_file.show()
# Drawing text on the picture
draw = ImageDraw.Draw(image_file)
draw.text((0, 2300), text, (color_value[0], color_value[1], color_value[2]), font=font)
draw = ImageDraw.Draw(image_file)
print('Enter the file name: ')
file_name = str(input())
image_file.save(file_name + ".jpg")
pass
if __name__ == '__main__':
main()def get_user_input(start,end):
testcase = False
while testcase == False:
try:
userInput = int(input("Enter Your choice: "))
if userInput > 6 or userInput < 1:
print("Please try again.")
testcase = False
else:
return userInput
except ValueError:
print("Please try again.")
x = get_user_input(1,6)
print(x)
###Asks user to enter something, ie. a number option from a menu.
###While type != interger, and not in the given range,
###Program gives error message and asks for new input."""
Created on Thu Apr 27 16:28:36 2017
@author: barnabysandeford
"""
# Currently works for Safari, but just change to whichever
# browser you're using.
import time
#Changed the method of opening the browser.
#Selenium allows for the page to be refreshed.
from selenium import webdriver
#adding ability to change number of repeats
count = int(raw_input("Number of times to be repeated: "))
#Same as before
x = raw_input("Enter the URL (no https): ")
print( "Length of video:")
minutes = int(raw_input("Minutes "))
seconds = int(raw_input("Seconds "))
#Calculating the refreshrate from the user input
refreshrate = minutes * 60 + seconds
#Selecting Safari as the browser
driver = webdriver.Safari()
driver.get("http://"+x)
for i in range(count):
#Sets the page to refresh at the refreshrate.
time.sleep(refreshrate)
driver.refresh()# batch_file_rename.py
# Created: 6th August 2012
'''
This will batch rename a group of files in a given directory,
once you pass the current and new extensions
'''
__author__ = 'Craig Richards'
__version__ = '1.0'
import os
import sys
import argparse
def batch_rename(work_dir, old_ext, new_ext):
'''
This will batch rename a group of files in a given directory,
once you pass the current and new extensions
'''
# files = os.listdir(work_dir)
for filename in os.listdir(work_dir):
# Get the file extension
file_ext = os.path.splitext(filename)[1]
# Start of the logic to check the file extensions, if old_ext = file_ext
if old_ext == file_ext:
# Returns changed name of the file with new extention
name_list=list(filename)
name_list[len(name_list)-len(old_ext):]=list(new_ext)
newfile=''.join(name_list)
# Write the files
os.rename(
os.path.join(work_dir, filename),
os.path.join(work_dir, newfile)
)
def get_parser():
parser = argparse.ArgumentParser(description='change extension of files in a working directory')
parser.add_argument('work_dir', metavar='WORK_DIR', type=str, nargs=1, help='the directory where to change extension')
parser.add_argument('old_ext', metavar='OLD_EXT', type=str, nargs=1, help='old extension')
parser.add_argument('new_ext', metavar='NEW_EXT', type=str, nargs=1, help='new extension')
return parser
def main():
'''
This will be called if the script is directly invoked.
'''
# adding command line argument
parser = get_parser()
args = vars(parser.parse_args())
# Set the variable work_dir with the first argument passed
work_dir = args['work_dir'][0]
# Set the variable old_ext with the second argument passed
old_ext = args['old_ext'][0]
# Set the variable new_ext with the third argument passed
new_ext = args['new_ext'][0]
batch_rename(work_dir, old_ext, new_ext)
if __name__ == '__main__':
main()
# Script Name : python_sms.py
# Author : Craig Richards
# Created : 16th February 2017
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will text all the students Karate Club
import urllib # URL functions
import urllib2 # URL functions
import os
from time import strftime
import sqlite3
import sys
dropbox= os.getenv("dropbox")
scripts=os.getenv("scripts")
dbfile=("database/maindatabase.db")
master_db=os.path.join(dropbox, dbfile)
f=open(scripts+'/output/student.txt','a')
tdate=strftime("%d-%m")
conn = sqlite3.connect(master_db)
cursor = conn.cursor()
loc_stmt='SELECT name, number from table'
cursor.execute(loc_stmt)
while True:
row = cursor.fetchone()
if row == None:
break
sname=row[0]
snumber=row[1]
message = (sname + ' There will be NO training tonight on the ' + tdate + ' Sorry for the late notice, I have sent a mail as well, just trying to reach everyone, please do not reply to this message as this is automated')
username = 'YOUR_USERNAME'
sender = 'WHO_IS_SENDING_THE_MAIL'
hash = 'YOUR HASH YOU GET FROM YOUR ACCOUNT'
numbers = (snumber)
# Set flag to 1 to simulate sending, this saves your credits while you are testing your code. # To send real message set this flag to 0
test_flag = 0
#-----------------------------------
# No need to edit anything below this line
#-----------------------------------
values = {'test' : test_flag,
'uname' : username,
'hash' : hash,
'message' : message,
'from' : sender,
'selectednums' : numbers }
url = 'http://www.txtlocal.com/sendsmspost.php'
postdata = urllib.urlencode(values)
req = urllib2.Request(url, postdata)
print ('Attempting to send SMS to '+ sname + ' at ' + snumber + ' on ' + tdate)
f.write ('Attempting to send SMS to '+ sname + ' at ' + snumber + ' on ' + tdate + '\n')
try:
response = urllib2.urlopen(req)
response_url = response.geturl()
if response_url==url:
print 'SMS sent!'
except urllib2.URLError, e:
print 'Send failed!'
print e.reasonfrom sys import argv
script, input_file = argv
def print_all(f):
print f.read()
# seek(n) to read a file's content from byte-n
def rewind(f):
f.seek(0)
def print_a_line(line_count, f):
print line_count, f.readline()
current_file = open(input_file)
print "First let's print the whole file:\n"
print_all(current_file)
print "Now let's rewind, kind of like a tape."
rewind(current_file)
print "Let's print three lines:"
current_line = 1
print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)
current_line = current_line + 1
print_a_line(current_line, current_file)
current_file.close()# Script Name : recyclebin.py
# Author : Craig Richards
# Created : 07th June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Scans the recyclebin and displays the files in there, originally got this script from the Violent Python book
import os # Load the Module
import optparse # Load the Module
from _winreg import * # Load the Module
def sid2user(sid): # Start of the function to gather the user
try:
key = OpenKey(HKEY_LOCAL_MACHINE, "SOFTWARE\Microsoft\Windows NT\CurrentVersion\ProfileList" + '\\' + sid)
(value, type) = QueryValueEx(key, 'ProfileImagePath')
user = value.split('\\')[-1]
return user
except:
return sid
def returnDir(): # Start of the function to search through the recyclebin
dirs=['c:\\Recycler\\','C:\\Recycled\\','C:\\$RECYCLE.BIN\\']
#dirs=['c:\\$RECYCLE.BIN\\']
for recycleDir in dirs:
if os.path.isdir(recycleDir):
return recycleDir
return None
def findRecycled(recycleDir): # Start of the function, list the contents of the recyclebin
dirList = os.listdir(recycleDir)
for sid in dirList:
files = os.listdir(recycleDir + sid)
user = sid2user(sid)
print '\n[*] Listing Files for User: ' + str(user)
for file in files:
print '[+] Found File: ' + str(file)
def main():
recycleDir = returnDir()
findRecycled(recycleDir)
if __name__ == '__main__':
main()# Script Name : powerdown_startup.py
# Author : Craig Richards
# Created : 05th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This goes through the server list and pings the machine, if it's up it will load the putty session, if its not it will notify you.
import os # Load the Library Module
import subprocess # Load the Library Module
from time import strftime # Load just the strftime Module from Time
def windows(): # This is the function to run if it detects the OS is windows.
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open('startup_list.txt','r'): # Read the list of servers from the list
ret = subprocess.call("ping -n 3 %s" % server, shell=True,stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # If you get a response.
f.write ("%s: is alive, loading PuTTY session" % server.strip() + "\n") # Write out to the logfile
subprocess.Popen(('putty -load '+server)) # Load the putty session
else:
f.write ("%s : did not respond" % server.strip() + "\n") # Write to the logfile if the server is down
def linux():
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open('startup_list.txt'): # Read the list of servers from the list
ret = subprocess.call("ping -c 3 %s" % server, shell=True,stdout=open('/dev/null', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # If you get a response.
f.write ("%s: is alive" % server.strip() + "\n") # Print a message
subprocess.Popen(['ssh', server.strip()])
else:
f.write ("%s: did not respond" % server.strip() + "\n")
# End of the functions
# Start of the Main Program
if os.name == "posix": # If the OS is linux...
linux() # Call the linux function
elif os.name in ("nt", "dos", "ce"): # If the OS is Windows...
windows() # Call the windows functionfrom __future__ import print_function
import SimpleHTTPServer
import SocketServer
PORT = 8000 #This will serve at port 8080
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print("serving at port", PORT)
httpd.serve_forever()#Author: OMKAR PATHAK
#This script helps to build a simple stopwatch application using Python's time module.
import time
print('Press ENTER to begin, Press Ctrl + C to stop')
while True:
try:
input() #For ENTER
starttime = time.time()
print('Started')
except KeyboardInterrupt:
print('Stopped')
endtime = time.time()
print('Total Time:', round(endtime - starttime, 2),'secs')
break# Script Name : folder_size.py
# Author : Craig Richards
# Created : 19th July 2012
# Last Modified : 22 February 2016
# Version : 1.0.1
# Modifications : Modified the Printing method and added a few comments
# Description : This will scan the current directory and all subdirectories and display the size.
import os
import sys # Load the library module and the sys module for the argument vector'''
try:
directory = sys.argv[1] # Set the variable directory to be the argument supplied by user.
except IndexError:
sys.exit("Must provide an argument.")
dir_size = 0 # Set the size to 0
fsizedicr = {'Bytes': 1,
'Kilobytes': float(1) / 1024,
'Megabytes': float(1) / (1024 * 1024),
'Gigabytes': float(1) / (1024 * 1024 * 1024)}
for (path, dirs, files) in os.walk(directory): # Walk through all the directories. For each iteration, os.walk returns the folders, subfolders and files in the dir.
for file in files: # Get all the files
filename = os.path.join(path, file)
dir_size += os.path.getsize(filename) # Add the size of each file in the root dir to get the total size.
fsizeList = [str(round(fsizedicr[key] * dir_size, 2)) + " " + key for key in fsizedicr] # List of units
if dir_size == 0: print ("File Empty") # Sanity check to eliminate corner-case of empty file.
else:
for units in sorted(fsizeList)[::-1]: # Reverse sort list of units so smallest magnitude units print first.
print ("Folder Size: " + units)"""
Written by: Shreyas Daniel - github.com/shreydan
Description: Uses Pythons eval() function
as a way to implement calculator
Functions available:
+ : addition
- : subtraction
* : multiplication
/ : division
% : percentage
sine: sin(rad)
cosine: cos(rad)
tangent: tan(rad)
square root: sqrt(n)
pi: 3.141......
"""
import math
def main():
def calc(k):
functions = ['sin', 'cos', 'tan', 'sqrt', 'pi']
for i in functions:
if i in k.lower():
withmath = 'math.' + i
k = k.replace(i, withmath)
try:
k = eval(k)
except ZeroDivisionError:
print ("Can't divide by 0")
exit()
except NameError:
print ("Invalid input")
exit()
return k
print ("\nScientific Calculator\nEg: pi * sin(90) - sqrt(81)")
k = raw_input("\nWhat is ") # Using input() function is causing NameError. Changing it to raw_input() fixes this.
k = k.replace(' ', '')
k = k.replace('^', '**')
k = k.replace('=', '')
k = k.replace('?', '')
k = k.replace('%', '/100')
print ("\n" + str(calc(k)))
if __name__ == "__main__":
main()# Script Name : env_check.py
# Author : Craig Richards
# Created : 14th May 2012
# Last Modified : 14 February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Tidy up comments and syntax
# Description : This script will check to see if all of the environment variables I require are set
import os
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable
conffile = 'env_check.conf' # Set the variable conffile
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together
for env_check in open(conffilename): # Open the config file and read all the settings
env_check = env_check.strip() # Set the variable as itsself, but strip the extra text out
print '[{}]'.format(env_check) # Format the Output to be in Square Brackets
newenv = os.getenv(env_check) # Set the variable newenv to get the settings from the OS what is currently set for the settings out the configfile
if newenv is None: # If it doesn't exist
print env_check, 'is not set' # Print it is not set
else: # Else if it does exist
print 'Current Setting for {}={}\n'.format(env_check, newenv) # Print out the details# Script Name : script_count.py
# Author : Craig Richards
# Created : 27th February 2012
# Last Modified : 20th July 2012
# Version : 1.3
# Modifications : 1.1 - 28-02-2012 - CR - Changed inside github and development functions, so instead of if os.name = "posix" do this else do this etc
# : I used os.path.join, so it condensed 4 lines down to 1
# : 1.2 - 10-05-2012 - CR - Added a line to include PHP scripts.
# : 1.3 - 20-07-2012 - CR - Added the line to include Batch scripts
# Description : This scans my scripts directory and gives a count of the different types of scripts
import os # Load the library module
path = os.getenv("scripts") # Set the variable path by getting the value from the OS environment variable scripts
dropbox = os.getenv("dropbox") # Set the variable dropbox by getting the value from the OS environment variable dropbox
def clear_screen(): # Function to clear the screen
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
os.system('clear') # Clear the Screen
elif os.name in ("nt", "dos", "ce"): # DOS/Windows
os.system('CLS') # Clear the Screen
def count_files(path, extensions): # Start of the function to count the files in the scripts directory, it counts the extension when passed below
counter = 0 # Set the counter to 0
for root, dirs, files in os.walk(path): # Loop through all the directories in the given path
for file in files: # For all the files
counter += file.endswith(extensions) # Count the files
return counter # Return the count
def github(): # Start of the function just to count the files in the github directory
github_dir = os.path.join(dropbox, 'github') # Joins the paths to get the github directory - 1.1
github_count = sum((len(f) for _, _, f in os.walk(github_dir))) # Get a count for all the files in the directory
if github_count > 5: # If the number of files is greater then 5, then print the following messages
print '\nYou have too many in here, start uploading !!!!!'
print 'You have: ' + str(github_count) + ' waiting to be uploaded to github!!'
elif github_count == 0: # Unless the count is 0, then print the following messages
print '\nGithub directory is all Clear'
else: # If it is any other number then print the following message, showing the number outstanding.
print '\nYou have: ' + str(github_count) + ' waiting to be uploaded to github!!'
def development(): # Start of the function just to count the files in the development directory
dev_dir = os.path.join(path, 'development') # Joins the paths to get the development directory - 1.1
dev_count = sum((len(f) for _, _, f in os.walk(dev_dir))) # Get a count for all the files in the directory
if dev_count > 10: # If the number of files is greater then 10, then print the following messages
print '\nYou have too many in here, finish them or delete them !!!!!'
print 'You have: ' + str(dev_count) + ' waiting to be finished!!'
elif dev_count ==0: # Unless the count is 0, then print the following messages
print '\nDevelopment directory is all clear'
else:
print '\nYou have: ' + str(dev_count) + ' waiting to be finished!!' # If it is any other number then print the following message, showing the number outstanding.
clear_screen() # Call the function to clear the screen
print '\nYou have the following :\n'
print 'AutoIT:\t' + str(count_files(path, '.au3')) # Run the count_files function to count the files with the extension we pass
print 'Batch:\t' + str(count_files(path, ('.bat', ',cmd'))) # 1.3
print 'Perl:\t' + str(count_files(path, '.pl'))
print 'PHP:\t' + str(count_files(path, '.php')) # 1.2
print 'Python:\t' + str(count_files(path, '.py'))
print 'Shell:\t' + str(count_files(path, ('.ksh', '.sh', '.bash')))
print 'SQL:\t' + str(count_files(path, '.sql'))
github() # Call the github function
development() # Call the development function#Made on May 27th, 2017
#Made by SlimxShadyx
#Dice Rolling Simulator
import random
#These variables are used for user input and while loop checking.
correct_word = False
dice_checker = False
dicer = False
roller_loop = False
#Checking the user input to start the program.
while correct_word == False:
user_input_raw = raw_input("\r\nWelcome to the Dice Rolling Simulator! We currently support 6, 8, and 12 sided die! Type [start] to begin!\r\n?>")
#Converting the user input to lower case.
user_input = (user_input_raw.lower())
if user_input == 'start':
correct_word = True
else:
print "Please type [start] to begin!\r\n"
#Main program loop. Exiting this, exits the program.
while roller_loop == False:
#Second While loop to ask the user for the certain die they want.
while dice_checker == False:
user_dice_chooser = raw_input("\r\nGreat! Begin by choosing a die! [6] [8] [10]\r\n?>")
user_dice_chooser = int(user_dice_chooser)
if user_dice_chooser == 6:
dice_checker = True
elif user_dice_chooser == 8:
dice_checker = True
elif user_dice_chooser == 12:
dice_checker = True
else:
print "\r\nPlease choose one of the applicable options!\r\n"
#Another inner while loop. This one does the actual rolling, as well as allowing the user to re-roll without restarting the program.
while dicer == False:
if user_dice_chooser == 6:
dice_6 = random.randint(1,6)
print "\r\nYou rolled a " + str(dice_6) + "!\r\n"
dicer = True
user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>")
user_exit_checker = (user_exit_checker_raw.lower())
if user_exit_checker == 'roll':
dicer = False
elif user_exit_checker == 'exit':
roller_loop = True
elif user_dice_chooser == 8:
dice_8 = random.randint(1,8)
print "\r\nYou rolled a " + str(dice_8) + "!"
dicer = True
user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>")
user_exit_checker = (user_exit_checker_raw.lower())
if user_exit_checker == 'roll':
dicer = False
elif user_exit_checker == 'exit':
roller_loop = True
elif user_dice_chooser == 12:
dice_12 = random.randint(1,12)
print "\r\nYou rolled a " + str(dice_12) + "!"
dicer = True
user_exit_checker_raw = raw_input("\r\nIf you want to roll another die, type [roll]. To exit, type [exit].\r\n?>")
user_exit_checker = (user_exit_checker_raw.lower())
if user_exit_checker == 'roll':
dicer = False
elif user_exit_checker == 'exit':
roller_loop = True
print "Thanks for using the Dice Rolling Simulator! Have a great day! =)"
# Script Name : script_listing.py
# Author : Craig Richards
# Created : 15th February 2012
# Last Modified : 29th May 2012
# Version : 1.2
# Modifications : 1.1 - 28-02-2012 - CR - Added the variable to get the logs directory, I then joined the output so the file goes to the logs directory
# : 1.2 - 29-05/2012 - CR - Changed the line so it doesn't ask for a directory, it now uses the environment varaible scripts
# Description : This will list all the files in the given directory, it will also go through all the subdirectories as well
import os # Load the library module
logdir = os.getenv("logs") # Set the variable logdir by getting the value from the OS environment variable logs
logfile = 'script_list.log' # Set the variable logfile
path = os.getenv("scripts") # Set the varable path by getting the value from the OS environment variable scripts - 1.2
#path = (raw_input("Enter dir: ")) # Ask the user for the directory to scan
logfilename = os.path.join(logdir, logfile) # Set the variable logfilename by joining logdir and logfile together
log = open(logfilename, 'w') # Set the variable log and open the logfile for writing
for dirpath, dirname, filenames in os.walk(path): # Go through the directories and the subdirectories
for filename in filenames: # Get all the filenames
log.write(os.path.join(dirpath, filename)+'\n') # Write the full path out to the logfile
print ("\nYour logfile " , logfilename, "has been created") # Small message informing the user the file has been created# Requirements:
# pip install numpy
# sudo apt-get install python-openCV
import numpy as np
import cv2
cap = cv2.VideoCapture(0)
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
# Our operations on the frame come here
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Display the resulting frame
cv2.imshow('frame',gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
6c907dbb07bf1ef1ce4fdced214be391d28b2ca8 | eda9187adfd53c03f55207ad05d09d2d118baa4f | /python3_base/python_class_method.py | 4bb8dc5866ce970db1d955879a443c4426f31c41 | [] | no_license | HuiZhaozh/python_tutorials | 168761c9d21ad127a604512d7c6c6b38b4faa3c7 | bde4245741081656875bcba2e4e4fcb6b711a3d9 | refs/heads/master | 2023-07-07T20:36:20.137647 | 2020-04-24T07:18:25 | 2020-04-24T07:18:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,137 | py | # -*- coding:utf-8 -*-
# /usr/bin/python
'''
@Author: Yan Errol
@Email:[email protected]
@Date: 2019-05-20 17:22
@File:class_method.py
@Describe:静态方法
'''
from math import sqrt
class Triangle(object):
def __init__(self, a, b, c):
self._a = a
self._b = b
self._c = c
@staticmethod
def is_valid(a, b, c):
return a + b > c and b + c > a and a + c > b
def perimeter(self):
return self._a + self._b + self._c
def area(self):
half = self.perimeter() / 2
return sqrt(half * (half - self._a) *
(half - self._b) * (half - self._c))
def main():
a, b, c = 3, 4, 5
# 静态方法和类方法都是通过给类发消息来调用的
if Triangle.is_valid(a, b, c):
t = Triangle(a, b, c)
print(t.perimeter())
# 也可以通过给类发消息来调用对象方法但是要传入接收消息的对象作为参数
# print(Triangle.perimeter(t))
print(t.area())
# print(Triangle.area(t))
else:
print('无法构成三角形.')
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
b6db4caaa1b3f409974642244d2e45e58bea2b74 | d94d22ab20a11ab6c473d8aba4038c97f75806c4 | /python小栗子/t57.py | c34766c279355c2457734c45293ae01587fccbaf | [] | no_license | songszw/python | a1d0419b995df13aee5997d24c09dccab91ac9e0 | 5135a3efcdcc2a37f7321ae19271c9315f48bcaf | refs/heads/master | 2020-02-26T16:24:28.411919 | 2017-04-26T09:08:41 | 2017-04-26T09:08:41 | 71,195,225 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,392 | py | print('|--- 欢迎进入宋氏通讯录 ---|')
print('|--- 1:查询联系人资料 ---|')
print('|--- 2:插入新的联系人 ---|')
print('|--- 3:删除已有联系人 ---|')
print('|--- 4:退出通讯录程序 ---|')
contacts = dict()
while 1:
num = int(input('please enter the number you want to do: '))
if num==1:
name = input('please enter the name you waht to check: ')
if name in contacts:
print(name+':'+contacts[name])
else:
print('sorry,the man who wasn\'t here')
if num==2:
name = input('please enter your name:')
if name in contacts:
print('sorry, the man is already in the contacts -->>',end=' ')
print(name+":"+contacts[name])
if input('do you want to change the name ?[YES/NO]:')=='YES':
contacts[name]=input('please enter the phone number:')
else:
contacts[name] =input('please enter the phone number:')
else:
contacts[name]=input('please enter the phone number:')
if num==3:
name = input('please enter the name who you want to delete:')
if name in contacts:
contacts.pop(name)
else:
print('sorry, the man who wasn\'t here')
if num==4:
break
print('|--- 感谢使用通讯录程序 ---|')
| [
"[email protected]"
] | |
100efc94fe97678e4d050167f49f1a7ead921301 | 4dd87e032760f77855727a36c02ab670d9ca8ff8 | /file-uploader/appengine_config.py | 21778b35475168809f3e6b5f3adbd15fdeca3de6 | [] | no_license | ramuta/gaedevs-examples | 09f14ae4fd444d898483f303aead08fd93839935 | f8a077bad98a2f40739d59321aad67dad6407c54 | refs/heads/master | 2020-04-08T09:56:13.315017 | 2018-11-27T17:01:33 | 2018-11-27T17:01:33 | 159,246,286 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 60 | py | from google.appengine.ext import vendor
vendor.add('libs')
| [
"[email protected]"
] | |
509b043958ecf41f0f38c5f2c9c22a9f3826f34b | 074279d6b63c9cd25c1353624710ed1fb422b30f | /j2ee模式-前端控制器模式.py | 53e5ab14544ddcb6c8ff3233c365a535f8179b88 | [] | no_license | qqizai/python36patterns | edd106f496a1aa7eda5d9070a6d82f142a808621 | 39052df13db9a54cb8322d87edbc2dbe6ff06a07 | refs/heads/master | 2022-11-12T14:01:32.341802 | 2020-06-29T02:23:46 | 2020-06-29T02:23:46 | 281,970,231 | 0 | 1 | null | 2020-07-23T14:13:31 | 2020-07-23T14:13:30 | null | UTF-8 | Python | false | false | 2,562 | py | # -*- coding: utf-8 -*-
# @Author : ydf
# @Time : 2019/10/9 0009 15:32
"""
前端控制器模式(Front Controller Pattern)是用来提供一个集中的请求处理机制,所有的请求都将由一个单一的处理程序处理。该处理程序可以做认证/授权/记录日志,或者跟踪请求,然后把请求传给相应的处理程序。以下是这种设计模式的实体。
前端控制器(Front Controller) - 处理应用程序所有类型请求的单个处理程序,应用程序可以是基于 web 的应用程序,也可以是基于桌面的应用程序。
调度器(Dispatcher) - 前端控制器可能使用一个调度器对象来调度请求到相应的具体处理程序。
视图(View) - 视图是为请求而创建的对象。
从java转化来,命名规范懒得改了。
"""
from abc import ABCMeta, abstractmethod
from monkey_print2 import print
class HomeView:
def show(self):
print('显示 Home 页面')
class StudentView:
def show(self):
print('显示 Student 页面')
class Dispatcher:
def __init__(self):
self.student_view = StudentView()
self.home_view = HomeView()
def dispatch(self, request: str):
if request.upper() == 'STUDENT':
self.student_view.show()
else:
self.home_view.show()
class FrontController:
def __init__(self):
self.__dispatcher = Dispatcher()
def is_authentic_user(self):
print("用户鉴权成功")
return True
def track_request(self, request):
print("被请求页面: " + request)
def dispatch_request(self, request):
self.track_request(request)
if self.is_authentic_user():
self.__dispatcher.dispatch(request)
if __name__ == '__main__':
front_controller = FrontController()
front_controller.dispatch_request("HOME")
front_controller.dispatch_request("STUDENT")
"""
"D:/coding2/python36patterns/j2ee模式-前端控制器模式.py:49" 16:54:03 被请求页面: HOME
"D:/coding2/python36patterns/j2ee模式-前端控制器模式.py:45" 16:54:03 用户鉴权成功
"D:/coding2/python36patterns/j2ee模式-前端控制器模式.py:20" 16:54:03 显示 Home 页面
"D:/coding2/python36patterns/j2ee模式-前端控制器模式.py:49" 16:54:03 被请求页面: STUDENT
"D:/coding2/python36patterns/j2ee模式-前端控制器模式.py:45" 16:54:03 用户鉴权成功
"D:/coding2/python36patterns/j2ee模式-前端控制器模式.py:25" 16:54:03 显示 Student 页面
"""
| [
"[email protected]"
] | |
d81ca3d2f986e4c7af9c64432aef10385266e46b | 8cc30a27835e205a3476783106ca1605a6a85c48 | /amy/workshops/migrations/0066_merge.py | ef0455252831a8b0cfaf3e51343f4267be07ade1 | [
"MIT"
] | permissive | gaybro8777/amy | d968edc78bbd3f63f3353450334721628dbbc0f4 | 3cf99aed58a0f0acf83d2645a30d8408208ccea9 | refs/heads/develop | 2023-03-07T22:08:28.692700 | 2021-02-23T18:06:06 | 2021-02-23T18:06:06 | 341,930,505 | 0 | 0 | MIT | 2021-02-24T17:22:08 | 2021-02-24T14:40:43 | null | UTF-8 | Python | false | false | 304 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('workshops', '0064_dc_instructor_badge'),
('workshops', '0065_multiple_memberships'),
]
operations = [
]
| [
"[email protected]"
] | |
f9023a1da5efba1124204d1d8a76778d9c15c29d | d18f74c0683fa412833fc7b68f737226dcf0f5df | /setup.py | 70e68c224d914b125b04f0aa01c8f602ff39fa0f | [] | no_license | phymhan/gomoku | ab22b19c2f59ea63aba3015f2b3ce53bf1b440e5 | e48e215fe24236ccccfa5edb0709a22bed4624b9 | refs/heads/master | 2021-08-28T23:06:50.620937 | 2017-12-13T07:49:45 | 2017-12-13T07:49:45 | 114,087,358 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,361 | py | import cx_Freeze
executables = [cx_Freeze.Executable("fivechessonline21.py")]
cx_Freeze.setup(
name = "Five-Chess",
options = {"build_exe": {"packages": ["pygame"],
"include_files": ["./sources/pics/board.png",
"./sources/pics/cp_k_29.png",
"./sources/pics/cp_w_29.png",
"./sources/pics/panel.png",
"./sources/pics/catsmall.png",
"./sources/music/BackgroundMusic.ogg",
"./sources/music/Snd_click.ogg"]}},
executables = executables
)
##cx_Freeze.setup(
## name = "Five-Chess",
## options = {"build_exe": {"packages": ["pygame"],
## "include_files": ["board.png",
## "cp_k_29.png",
## "cp_w_29.png",
## "panel.png",
## "catsmall.png",
## "BackgroundMusic.ogg",
## "Snd_click.ogg"]}},
## executables = executables
## )
| [
"[email protected]"
] | |
54fdb90defd17f79a01648b7ef2c33d62cb46d3b | c4b8e1e09dedbccd37ca008ecaaca4438610bbaf | /cpmpy/sliding_sum.py | 01b48796c45a4a65f16a0e45cf1d93b7cf1cdcf1 | [
"MIT"
] | permissive | hakank/hakank | 4806598b98cb36dd51b24b0ab688f52dadfe9626 | c337aaf8187f15dcdc4d5b09cd2ed0dbdb2e72c2 | refs/heads/master | 2023-08-15T00:21:52.750270 | 2023-07-27T16:21:40 | 2023-07-27T16:21:40 | 11,933,517 | 336 | 97 | MIT | 2023-07-27T11:19:42 | 2013-08-06T20:12:10 | JavaScript | UTF-8 | Python | false | false | 1,355 | py | """
Sliding sum constraint in cpmpy.
From Global Constraint Catalogue
http://www.emn.fr/x-info/sdemasse/gccat/Csliding_sum.html
'''
sliding_sum(LOW,UP,SEQ,VARIABLES)
Purpose
Constrains all sequences of SEQ consecutive variables of the collection VARIABLES so that the
sum of the variables belongs to interval [LOW, UP].
Example
(
3, 7, 4,<1, 4, 2, 0, 0, 3, 4>
)
The example considers all sliding sequences of SEQ=4 consecutive values of <1, 4, 2, 0,0,3, 4>
collection and constraints the sum to be in [LOW,UP] = [3, 7]. The sliding_sum constraint holds
since the sum associated with the corresponding subsequences 1 4 2 0, 4 2 0 0, 2 0 0 3, and
0 0 3 4 are respectively 7, 6, 5 and 7.
'''
This cpmpy model was written by Hakan Kjellerstrand ([email protected])
See also my cpmpy page: http://hakank.org/cpmpy/
"""
from cpmpy import *
import cpmpy.solvers
import numpy as np
from cpmpy_hakank import *
def sliding_sum_test(n=7,seq=4,low=3,up=7):
x = intvar(0,4,shape=n,name="x")
# low = intvar(0,10,name="low")
# up = intvar(0,10,name="up")
model = Model(sliding_sum(low,up,seq,x))
ss = CPM_ortools(model)
ss.ort_solver.parameters.linearization_level = 0
ss.ort_solver.parameters.cp_model_probing_level = 0
num_solutions = ss.solveAll(display=x)
print("num_solutions:", num_solutions)
sliding_sum_test()
| [
"[email protected]"
] | |
c73953e48af931827b1da62eb65e647668cfd10d | 5e45f1d1d9f58aa1456777b0d75334d6efd43840 | /challenges/hackerrank/algorithms/dynamic/max_subarray/python/max_subarray.py | 7a4bd11931255b0775dd3b4438356b773e6b06e1 | [] | no_license | missingdays/nerdy.school | 604953dc9b3c38a0f71793f066ce2707aa980dae | 051673e0ebc54bc2f7e96a6477697d1d528dc45c | refs/heads/master | 2021-01-17T08:10:19.558851 | 2016-06-06T15:29:01 | 2016-06-06T15:29:01 | 59,897,184 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,187 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2015 missingdays <missingdays@missingdays>
#
# Distributed under terms of the MIT license.
"""
Maximum subarray problem solution
"""
def max_subarray(array):
curr_sum = 0
curr_index = 0
best_sum = 0
best_start_index = 0
best_ending_index = 0
for i in range(len(array)):
val = curr_sum + array[i]
if val > 0:
if curr_sum == 0:
curr_index = i
curr_sum = val
else:
curr_sum = 0
if curr_sum > best_sum:
best_sum = curr_sum
best_start_index = curr_index
best_ending_index = i
return array[best_start_index:best_ending_index+1]
def sum_positive(array):
s = 0
for elem in array:
if elem > 0:
s += elem
if s == 0:
mv = array[0]
for elem in array:
if elem > mv:
mv = elem
return mv
else:
return s
for i in range(int(input())):
n = input()
inp = list(map(int, input().split()))
print(sum(max_subarray(inp)), end=" ")
print(sum_positive(inp))
| [
"[email protected]"
] | |
878bdb34e11bc1501de1b6b0dfd2018dfcf3aa4a | 63191be7f688591af69263972d68423d76fb5f74 | /geekshop/adminapp/views/categories.py | b42b65dd0c4181601279fe52418c7aef8c7ee7a5 | [] | no_license | IliaNiyazof/Django | 5eee4c226a1f06178fdbb5626444fff406886de7 | 052cb4f3f142c4224454ebac9fb27f63de9cbc47 | refs/heads/master | 2021-07-19T05:52:56.620026 | 2020-06-05T16:17:47 | 2020-06-05T16:17:47 | 166,776,966 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,332 | py | from django.contrib.auth.decorators import user_passes_test
from django.shortcuts import render, HttpResponseRedirect, reverse, get_object_or_404
from mainapp.models import ProductCategory
from adminapp.models.categories import ProductCategoryEditForm
@user_passes_test(lambda u: u.is_superuser)
def categories(request):
title = 'админка/категории'
categories_list = ProductCategory.objects.all()
content = {
'title': title,
'objects': categories_list
}
return render(request, 'adminapp/categories/read.html', content)
@user_passes_test(lambda u: u.is_superuser)
def category_create(request):
title = 'категории/создание'
if request.method == 'POST':
category_form = ProductCategoryEditForm(request.POST, request.FILES)
if category_form.is_valid():
category_form.save()
return HttpResponseRedirect(reverse('admin:categories'))
else:
category_form = ProductCategoryEditForm()
content = {'title': title, 'update_form': category_form}
return render(request, 'adminapp/categories/update.html', content)
@user_passes_test(lambda u: u.is_superuser)
def category_update(request, pk):
title = 'категории/редактирование'
edit_category = get_object_or_404(ProductCategory, pk=pk)
if request.method == 'POST':
edit_form = ProductCategoryEditForm(request.POST, request.FILES, instance=edit_category)
if edit_form.is_valid():
edit_form.save()
return HttpResponseRedirect(reverse('admin:category_update', args=[edit_category.pk]))
else:
edit_form = ProductCategoryEditForm(instance=edit_category)
content = {'title': title, 'update_form': edit_form}
return render(request, 'adminapp/categories/update.html', content)
@user_passes_test(lambda u: u.is_superuser)
def category_delete(request, pk):
title = 'категории/удаление'
category = get_object_or_404(ProductCategory, pk=pk)
if request.method == 'POST':
category.is_active = False
category.save()
return HttpResponseRedirect(reverse('admin:categories'))
content = {'title': title, 'category_to_delete': category}
return render(request, 'adminapp/categories/delete.html', content)
| [
"[email protected]"
] | |
91feb4ba59077254f4d6a9ed644bd5d3663554bf | 60bb3ef7dd8a147761918f1fa021918d6898202d | /.history/main_20210623141903.py | eee65d95a67254bb6a9d60f7a4da85315eba9d6c | [] | no_license | sanjayMamidipaka/bbtautomationscripts | c1d29d9ea5c0fa982a53895b10db50b66e475c8f | 12c35a3459cb0ead71ae616b2efad21c555cf8a0 | refs/heads/master | 2023-06-06T06:25:09.152797 | 2021-06-23T18:21:34 | 2021-06-23T18:21:34 | 362,836,875 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,370 | py | import pandas as pd
import numpy as np
import csv
import xlsxwriter
from timeprofile import timeprofile
from masterdata import masterdata
from virtualReference import virtualReference
from keyfigures import keyfigures
from planninglevels import planninglevels
from attributesaskf import attributesaskf
from attributes import attributes
#Steps:
# create class
# add import
# change writer
#change this line to the desired output path
output_path = '/Users/sanjaymamidipaka/Downloads/Energizer_Latest_output1.xlsx'
writer = pd.ExcelWriter(output_path, engine='xlsxwriter')
paths = []
masterdatapath = str(input('Enter the masterdata path: '))
plevelspath = str(input('Enter the masterdata path: '))
keyfigurespath = str(input('Enter the masterdata path: '))
attributesaskfpath = str(input('Enter the masterdata path: '))
timeprofilepath = str(input('Enter the masterdata path: '))
paa = str(input('Enter the masterdata path: '))
energizerpaths = ['/Users/sanjaymamidipaka/Downloads/Energizer Production_ZSAPIBP1C_2021-05-04_21_01/ZSAPIBP1C_MASTERDATATYPES_2021-05-04_21_01.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Production_ZSAPIBP1C_2021-05-04_21_01/ZSAPIBP1C_PLEVELS_ATTRS_2021-05-04_21_01.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Production_ZSAPIBP1C_2021-05-04_21_01/ZSAPIBP1C_KEYFIGURES_2021-05-04_21_01.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Production_ZSAPIBP1C_2021-05-04_21_01/ZSAPIBP1C_ATTRIBUTES_AS_KEYFIGURE_2021-05-04_21_01.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Production_ZSAPIBP1C_2021-05-04_21_01/ZSAPIBP1C_TIMEPROFILE_2021-05-04_21_01.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Production_ZSAPIBP1C_2021-05-04_21_01/ZSAPIBP1C_PA_ATTRIBUTES_2021-05-04_21_01.csv']
natureswaypaths = ['/Users/sanjaymamidipaka/Downloads/natureswaydata/CFGSNA2_MASTERDATATYPES_2020-12-02_15_09.csv',
'/Users/sanjaymamidipaka/Downloads/natureswaydata/CFGSNA2_PLEVELS_ATTRS_2020-12-02_15_09.csv',
'/Users/sanjaymamidipaka/Downloads/natureswaydata/CFGSNA2_KEYFIGURES_2020-12-02_15_09.csv',
'/Users/sanjaymamidipaka/Downloads/natureswaydata/CFGSNA2_ATTRIBUTES_AS_KEYFIGURE_2020-12-02_15_09.csv',
'/Users/sanjaymamidipaka/Downloads/natureswaydata/CFGSNA2_TIMEPROFILE_2020-12-02_15_09.csv',
'/Users/sanjaymamidipaka/Downloads/natureswaydata/CFGSNA2_PA_ATTRIBUTES_2020-12-02_15_09.csv']
energizertestpaths = ['/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_29_Test/ZSAPIBP1C_MASTERDATATYPES_2021-05-05_21_29.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_29_Test/ZSAPIBP1C_PLEVELS_ATTRS_2021-05-05_21_29.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_29_Test/ZSAPIBP1C_KEYFIGURES_2021-05-05_21_29.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_29_Test/ZSAPIBP1C_ATTRIBUTES_AS_KEYFIGURE_2021-05-05_21_29.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_29_Test/ZSAPIBP1C_TIMEPROFILE_2021-05-05_21_29.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_29_Test/ZSAPIBP1C_PA_ATTRIBUTES_2021-05-05_21_29.csv']
energizerproductionspaths = ['/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_32_Production/ZSAPIBP1C_MASTERDATATYPES_2021-05-05_21_32.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_32_Production/ZSAPIBP1C_PLEVELS_ATTRS_2021-05-05_21_32.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_32_Production/ZSAPIBP1C_KEYFIGURES_2021-05-05_21_32.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_32_Production/ZSAPIBP1C_ATTRIBUTES_AS_KEYFIGURE_2021-05-05_21_32.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_32_Production/ZSAPIBP1C_TIMEPROFILE_2021-05-05_21_32.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer_2021-05-05_21_32_Production/ZSAPIBP1C_PA_ATTRIBUTES_2021-05-05_21_32.csv']
energizerlatestpaths = ['/Users/sanjaymamidipaka/Downloads/Energizer Latest/ZSAPIBP1C_MASTERDATATYPES_2021-05-05_23_58.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Latest/ZSAPIBP1C_PLEVELS_ATTRS_2021-05-05_23_58.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Latest/ZSAPIBP1C_KEYFIGURES_2021-05-05_23_58.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Latest/ZSAPIBP1C_ATTRIBUTES_AS_KEYFIGURE_2021-05-05_23_58.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Latest/ZSAPIBP1C_TIMEPROFILE_2021-05-05_23_58.csv',
'/Users/sanjaymamidipaka/Downloads/Energizer Latest/ZSAPIBP1C_PA_ATTRIBUTES_2021-05-05_23_58.csv']
timeprofile_instance = timeprofile(writer, energizerlatestpaths)
timeprofile_instance.run()
masterdata_instance = masterdata(writer, energizerlatestpaths)
masterdata_instance.run()
virtualReference_instance = virtualReference(writer, energizerlatestpaths)
virtualReference_instance.run()
attributes_instance = attributes(writer, energizerlatestpaths)
attributes_instance.run()
planninglevels_instance = planninglevels(writer, energizerlatestpaths)
planninglevels_instance.run()
keyfigures_instance = keyfigures(writer, energizerlatestpaths)
keyfigures_instance.run()
attributesaskf_instance = attributesaskf(writer, energizerlatestpaths)
attributesaskf_instance.run()
writer.book.close() #close the workbook
| [
"[email protected]"
] | |
ad56100aae986b9d39225d2009c1864556132f8f | 5a7a3447d434a458a7bb63f2aa11b64c284d5492 | /test/image_load.py | 834165562d2d100c68a6bd98d20ca2faaea7dd90 | [] | no_license | woshimayi/mypython | 35792e12036a7a05f12d3ef7006637b2b03f0e2e | 7f1eb38e8585bf6d2f21d3ad0f64dace61425875 | refs/heads/master | 2023-09-01T08:59:12.301836 | 2023-08-30T05:30:54 | 2023-08-30T05:30:54 | 130,017,052 | 4 | 0 | null | 2018-12-02T16:18:14 | 2018-04-18T06:50:36 | HTML | UTF-8 | Python | false | false | 622 | py | #coding=utf-8
import urllib
import re
def getHtml(url):
page = urllib.urlopen(url)
html = page.read()
return html
def getImg(html):
reg = r'src="(.+?\.jpg)" pic_ext'
imgre = re.compile(reg)
imglist = re.findall(imgre,html)
x = 0
for imgurl in imglist:
urllib.urlretrieve(imgurl,'%s.jpg' % x)
x+=1
html = getHtml("http://cn.bing.com/images/search?q=%E8%8B%B1%E5%9B%BD%E8%AE%AE%E4%BC%9A%E5%A4%A7%E5%8E%A6%E6%81%90%E6%80%96%E8%A2%AD%E5%87%BB&FORM=ISTRTH&id=F1E1C03F7EB1F290F78351F68318CB06438FD2B9&cat=%E4%BB%8A%E6%97%A5%E7%83%AD%E5%9B%BE&lpversion=")
print getImg(html)
| [
"[email protected]"
] | |
ef9743d94d29deebd30fc55ae31439a2db8b093d | f87f51ec4d9353bc3836e22ac4a944951f9c45c0 | /.history/HW06_20210715222321.py | fcf2188b6928a2756355ea80e53ded7f525f6620 | [] | no_license | sanjayMamidipaka/cs1301 | deaffee3847519eb85030d1bd82ae11e734bc1b7 | 9ddb66596497382d807673eba96853a17884d67b | refs/heads/main | 2023-06-25T04:52:28.153535 | 2021-07-26T16:42:44 | 2021-07-26T16:42:44 | 389,703,530 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,763 | py | """
Georgia Institute of Technology - CS1301
HW06 - Text Files & CSV
Collaboration Statement:
"""
#########################################
"""
Function Name: findCuisine()
Parameters: filename (str), cuisine (str)
Returns: list of restaurants (list)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def findCuisine(filename, cuisine):
file = open(filename,'r')
content = file.readlines()
listOfRestaurants = []
for i in range(len(content)):
if content[i].strip() == cuisine:
listOfRestaurants.append(content[i-1].strip()) #add the name of the restaurant, which is the previous line
file.close()
return listOfRestaurants
"""
Function Name: restaurantFilter()
Parameters: filename (str)
Returns: dictionary that maps cuisine type (str)
to a list of restaurants of the same cuisine type (list)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def restaurantFilter(filename):
dict = {}
file = open(filename,'r')
content = file.readlines()
cuisines = []
for i in range(1,len(content),4):
line = content[i].strip()
if line not in cuisines:
cuisines.append(line)
for i in range(len(cuisines)):
dict[cuisines[i]] = []
for i in range(0,len(content),4):
line = content[i].strip()
lineBelow = content[i+1].strip()
dict[lineBelow].append(line)
return dict
"""
Function Name: createDirectory()
Parameters: filename (str), output filename (str)
Returns: None (NoneType)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def createDirectory(filename, outputFilename):
readFile = open(filename, 'r')
writeFile = open(outputFilename, 'w')
content = readFile.readlines()
fastfood = []
sitdown = []
fastfoodcounter = 1
sitdowncouter = 1
for i in range(2,len(content), 4):
restaurant = content[i-2].strip()
cuisine = content[i-1].strip()
group = content[i].strip()
if group == 'Fast Food':
fastfood.append(str(fastfoodcounter) + '. ' + restaurant + ' - ' + cuisine + '\n')
fastfoodcounter += 1
else:
sitdown.append(str(sitdowncouter) + '. ' + restaurant + ' - ' + cuisine)
sitdowncouter += 1
writeFile.write('Restaurant Directory' + '\n')
writeFile.write('Fast Food' + '\n')
writeFile.writelines(fastfood)
writeFile.write('Sit-down' + '\n')
for i in range(len(sitdown)):
if i != len(sitdown)-1:
writeFile.write(sitdown[i] + '\n')
else:
writeFile.write(sitdown[i])
"""
Function Name: extraHours()
Parameters: filename (str), hour (int)
Returns: list of (person, extra money) tuples (tuple)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def extraHours(filename, hour):
overtime = []
file = open(filename, 'r')
header = file.readline()
content = file.readlines()
for i in content:
line = i.strip().split(',')
name = line[0]
wage = int(line[2])
hoursWorked = int(line[4])
if hoursWorked > hour:
compensation = (hoursWorked - hour) * wage
overtime.append((name, compensation))
return overtime
"""
Function Name: seniorStaffAverage()
Parameters: filename (str), year (int)
Returns: average age of senior staff members (float)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
def seniorStaffAverage(filename, year):
file = open(filename, 'r')
header = file.readline()
content = file.readlines()
for i in content:
line = i.strip().split(',')
age = int(line[1])
yearHired = line[3]
hoursWorked = int(line[4])
if hoursWorked > hour:
compensation = (hoursWorked - hour) * wage
overtime.append((name, compensation))
"""
Function Name: ageDict()
Parameters: filename (str), list of age ranges represented by strings (list)
Returns: dictionary (dict) that maps each age range (str) to a list of employees (list)
"""
#########################################
########## WRITE FUNCTION HERE ##########
#########################################
# print(findCuisine('restaurants.txt', 'Mexican'))
#print(restaurantFilter('restaurants.txt'))
#print(createDirectory('restaurants.txt','output.txt'))
# print(extraHours('employees.csv', 40))
| [
"[email protected]"
] | |
27b40d5f4d6e34ca94bb8678bc5ab493499da234 | a1431c25ebd62daead742e0120a16253c4cf67ca | /env/bin/rst2pseudoxml.py | f5df7e1eeb081261c7377ab286cc4b34ed5a0fc3 | [] | no_license | KonradMarzec1991/my_MDB | f840cbf495c23272b3e39db68c241219a60d63bd | d77339a4c37a3d7ae21b6d28bd9644ce15130f10 | refs/heads/master | 2022-04-29T10:15:37.109422 | 2019-11-03T20:13:57 | 2019-11-03T20:13:57 | 207,375,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | #!/home/konrad/PycharmProjects/my_mdb/env/bin/python3
# $Id: rst2pseudoxml.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <[email protected]>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing pseudo-XML.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates pseudo-XML from standalone reStructuredText '
'sources (for testing purposes). ' + default_description)
publish_cmdline(description=description)
| [
"[email protected]"
] | |
1a17c0e753532ecf7f5f5d0b99fb308e5ec83ca9 | bdcab42a9124d7a3878a904076170bd4bff7451f | /src/hessian/random_sample_points.py | 44047bd5934ab4c7ec808b9b9c3a87972695717a | [] | no_license | hwang595/data_augmentation_playground | aa30685213083bb271ae56996d8aff831ef975ab | 5b11a5d5c2d9254b5ffa293eebf8e3e6269edd69 | refs/heads/master | 2021-01-25T09:14:30.059368 | 2020-03-01T21:33:06 | 2020-03-01T21:33:06 | 93,801,194 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 5,705 | py | import numpy as np
from sklearn.decomposition import PCA
import matplotlib.pyplot as plt
from mpl_toolkits.mplot3d import Axes3D
DIST_ = 20
def rand_point_generator(point_num=None):
'''
we want y \in [1.5, 2.5], x \in [-0.5 0.5] for datapoints with label 1
we want y \in [-0.5, 0.5], x \in [1.5 2.5] for datapoints with label -1
return:
point_num data points with label 1, point_num data points with label -1
'''
pos_data_points = []
neg_data_points = []
while len(pos_data_points) < point_num or len(neg_data_points) < point_num:
# first settings
x_pos_ = np.random.randint(low=-1000, high=-100) / float(1000) * DIST_
y_pos_ = np.random.randint(low=600, high=1400) / float(1000) * DIST_
x_neg_ = np.random.randint(low=500, high=1500) / float(1000) * DIST_
y_neg_ = np.random.randint(low=-1000, high=-200) / float(1000) * DIST_
# second settings shift very far
'''
x_pos_ = np.random.randint(low=-1000, high=-200) / float(1000)
y_pos_ = np.random.randint(low=50000, high=51000) / float(1000)
x_neg_ = np.random.randint(low=29000, high=31000) / float(1000)
y_neg_ = np.random.randint(low=-5000, high=-4000) / float(1000)
'''
if [x_pos_, y_pos_] not in pos_data_points:
pos_data_points.append([x_pos_, y_pos_, 1])
if [x_neg_, y_neg_] not in neg_data_points:
neg_data_points.append([x_neg_, y_neg_, -1])
return np.array(pos_data_points), np.array(neg_data_points)
def find_point_with_distance(center_point_0=None, center_point_1=None, distance=None):
# find normalized direction vector between center0 and center1
v_ = (center_point_1 - center_point_0) / float(np.linalg.norm(center_point_1 - center_point_0))
return center_point_0 + distance * v_
def rand_point_generator_high_dim(point_num=None, dim=None, dist=None):
'''
param: point_num: num of data points we want for both pos and neg dataset
param: dim: in what dimension the data points in
param: dist: how far away we want the two data points
'''
np.random.seed(seed=42)
POS_HIGH_ = -200
POS_LOW_ = -1200
NEG_HIGH_ = 1800
NEG_LOW_ = 400
sigma_ = 0.1
pos_data_points = []
neg_data_points = []
pos_labels = []
neg_labels = []
tmp_pos_ = np.zeros(dim)
tmp_neg_ = np.zeros(dim)
# we randomly generate two data points first, then based on them, we further generate more
# data points
for i in range(dim):
tmp_pos_[i] = np.random.randint(low=POS_LOW_, high=POS_HIGH_) / float(1000)
tmp_neg_[i] = np.random.randint(low=NEG_LOW_, high=NEG_HIGH_) / float(1000)
# we generate another center by one center and distance predefined
while len(pos_data_points) < point_num or len(neg_data_points) < point_num:
pos_data_point = np.zeros(dim)
neg_data_point = np.zeros(dim)
for i in range(dim):
pos_data_point[i] = np.random.randint(low=POS_LOW_, high=POS_HIGH_) / float(1000) * dist
neg_data_point[i] = np.random.randint(low=NEG_LOW_, high=NEG_HIGH_) / float(1000) * dist
pos_data_points.append(pos_data_point)
neg_data_points.append(neg_data_point)
pos_labels.append(1)
neg_labels.append(-1)
'''
pos = tmp_pos_
new_neg = find_point_with_distance(tmp_pos_, tmp_neg_, distance=dist)
while len(pos_data_points) < point_num or len(neg_data_points) < point_num:
pos_data_point = np.zeros(dim)
neg_data_point = np.zeros(dim)
for i in range(dim):
pos_data_point[i] = np.random.normal(pos[i], sigma_)
neg_data_point[i] = np.random.normal(new_neg[i], sigma_)
pos_data_points.append(pos_data_point)
neg_data_points.append(neg_data_point)
pos_labels.append(1)
neg_labels.append(-1)
'''
return np.array(pos_data_points), np.array(neg_data_points), np.array(pos_labels), np.array(neg_labels)
def get_transformation(angle=None):
'''
angles determined here is in anti-clockwise
'''
theta = np.radians(angle)
c, s = np.cos(theta), np.sin(theta)
R = np.matrix('{} {}; {} {}'.format(c, -s, s, c))
return np.array(R)
if __name__ == "__main__":
np.random.seed(seed=42)
X_pos, X_neg, y_pos, y_neg = rand_point_generator_high_dim(point_num=50, dim=6, dist=0.5)
X = np.concatenate((X_pos, X_neg), axis=0)
#plt.show()
'''
pca_pos = PCA(n_components=2)
pca_neg = PCA(n_components=2)
X_decomp_pos=pca_pos.fit_transform(X_pos)
X_decomp_neg=pca_neg.fit_transform(X_neg)
'''
pca = PCA(n_components=2)
X_decomp = pca.fit_transform(X)
# fig = plt.figure()
# ax = fig.add_subplot(111, projection='3d')
# ax.scatter(X_pos[:, 0], X_pos[:, 1], X_pos[:, 2], c='r', marker='^')
# ax.scatter(X_neg[:, 0], X_neg[:, 1], X_neg[:, 2], c='b', marker='s')
# plt.show()
#print(X_decomp_pos.shape)
#print(X_decomp_neg.shape)
plt.figure(2)
plt.hold(True)
for i in range(X_decomp.shape[0]):
if i < X_decomp.shape[0] / 2:
plt.plot(X_decomp[i, 0], X_decomp[i, 1], '^r')
else:
plt.plot(X_decomp[i, 0], X_decomp[i, 1], '^b')
#plt.plot(X_decomp_neg[:, 0], X_decomp_neg[:, 1], 'sb')
plt.show()
#print(np.linalg.norm(tmp_pos-new_neg))
#print(tmp_pos.shape)
#print(new_neg.shape)
'''
pos_data_points, neg_data_points=rand_point_generator(point_num=50)
dataset = np.concatenate((pos_data_points, neg_data_points), axis=0)
rotation_matrix = get_transformation(angle=60)
pos_transformed = np.dot(pos_data_points[:,0:2], rotation_matrix)
neg_transformed = np.dot(neg_data_points[:,0:2], rotation_matrix)
fig = plt.figure(1)
plt.scatter([x[0] for x in pos_data_points], [x[1] for x in pos_data_points], c='r')
plt.scatter([x[0] for x in neg_data_points], [x[1] for x in neg_data_points], c='b')
#fig_2 = plt.figure(2)
plt.scatter([x[0] for x in pos_transformed], [x[1] for x in pos_transformed], c='r', marker='^')
plt.scatter([x[0] for x in neg_transformed], [x[1] for x in neg_transformed], c='b', marker='^')
plt.show()
'''
| [
"[email protected]"
] | |
475f56cddea7da88cb0c4c18cc4e1649dc2a16ba | 7bededcada9271d92f34da6dae7088f3faf61c02 | /pypureclient/flashblade/FB_2_8/models/keytab_file_base64.py | e7f84a50b47b69c9b000c62ecaa4a1e8b70b635e | [
"BSD-2-Clause"
] | permissive | PureStorage-OpenConnect/py-pure-client | a5348c6a153f8c809d6e3cf734d95d6946c5f659 | 7e3c3ec1d639fb004627e94d3d63a6fdc141ae1e | refs/heads/master | 2023-09-04T10:59:03.009972 | 2023-08-25T07:40:41 | 2023-08-25T07:40:41 | 160,391,444 | 18 | 29 | BSD-2-Clause | 2023-09-08T09:08:30 | 2018-12-04T17:02:51 | Python | UTF-8 | Python | false | false | 2,912 | py | # coding: utf-8
"""
FlashBlade REST API
A lightweight client for FlashBlade REST API 2.8, developed by Pure Storage, Inc. (http://www.purestorage.com/).
OpenAPI spec version: 2.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re
import six
import typing
from ....properties import Property
if typing.TYPE_CHECKING:
from pypureclient.flashblade.FB_2_8 import models
class KeytabFileBase64(object):
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
}
attribute_map = {
}
required_args = {
}
def __init__(
self,
):
"""
Keyword args:
"""
def __setattr__(self, key, value):
if key not in self.attribute_map:
raise KeyError("Invalid key `{}` for `KeytabFileBase64`".format(key))
self.__dict__[key] = value
def __getattribute__(self, item):
value = object.__getattribute__(self, item)
if isinstance(value, Property):
return None
else:
return value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
if hasattr(self, attr):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(KeytabFileBase64, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, KeytabFileBase64):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
54aa25a623bcd141ceb60503e4862c6560334415 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_340/ch79_2020_04_08_17_16_37_430613.py | 3692e00bb61b220fb835ac8e529d71a5ac2851ad | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | def monta_dicionario(lista1, lista2):
dicionario={}
for i in range(len(lista1)):
dicionario[lista1[i]]=lista2[i]
| [
"[email protected]"
] | |
ff93f81a89b9f25fa80f463b60f894e744aea0dd | 69a36ca23409b994a31759bad58971b197cad236 | /config/settings.py | 3445021fab5cfbc2c6ca87cdbd98f719463686c2 | [] | no_license | matt700395/awesome_repo | 56601cf817106df0e210e78c7bb1f11af1e60c3a | d49b1e55b6ade24f1f2058319ac6859b45e511bc | refs/heads/master | 2023-08-27T16:45:15.570115 | 2021-11-11T14:16:06 | 2021-11-11T14:16:06 | 427,026,282 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,346 | py | """
Django settings for config project.
Generated by 'django-admin startproject' using Django 3.0.
For more information on this file, see
https://docs.djangoproject.com/en/3.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.0/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = "cc)*5=(s+i2-&9x7&&&o+y7$g5!db3tvu85ykok#mwxf#6gir2"
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
DJANGO_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
]
PROJECT_APPS = [
"core.apps.CoreConfig",
"users.apps.UsersConfig",
"rooms.apps.RoomsConfig",
]
THIRD_PARTY_APPS = []
INSTALLED_APPS = DJANGO_APPS + PROJECT_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "config.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "config.wsgi.application"
# Database
# https://docs.djangoproject.com/en/3.0/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
# Password validation
# https://docs.djangoproject.com/en/3.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
]
# Internationalization
# https://docs.djangoproject.com/en/3.0/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.0/howto/static-files/
STATIC_URL = "/static/"
MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
MEDIA_URL = "/media/"
# Auth
AUTH_USER_MODEL = "users.User"
| [
"[email protected]"
] | |
c474ddcdc642369145b11ba23644182f63331500 | 116a4a2fcd3e9c3d216f96103006c707daa6001a | /HelloDjango/apps/awards/migrations/0017_auto_20200726_0254.py | 1dccd60f9a05a0237dcea616506c43eae765cb60 | [] | no_license | Eldar1988/a_white_birds | 22d743ed1fa651062f070c0e81b7ac665be7a72a | 0430d5322b3a55b6f55e9541675d6670f5d8a518 | refs/heads/master | 2022-12-18T20:23:26.293059 | 2020-09-15T04:27:59 | 2020-09-15T04:27:59 | 283,169,602 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,212 | py | # Generated by Django 3.0.6 on 2020-07-25 20:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('awards', '0016_juryapproved_project'),
]
operations = [
migrations.RemoveField(
model_name='jury',
name='user',
),
migrations.DeleteModel(
name='Promocode',
),
migrations.AddField(
model_name='profile',
name='interview',
field=models.URLField(null=True, verbose_name='Ссылка на интервью (только для жюри)'),
),
migrations.AddField(
model_name='profile',
name='preview',
field=models.TextField(max_length=500, null=True, verbose_name='Краткая информация - один абзац (только для жюри)'),
),
migrations.AddField(
model_name='profile',
name='professional',
field=models.CharField(max_length=200, null=True, verbose_name='Профессия (только для жюри)'),
),
migrations.DeleteModel(
name='Jury',
),
]
| [
"[email protected]"
] | |
ec6e3a87299b3f0b27c39ebb22357a57cd9e2f35 | 04afb34356de112445c3e5733fd2b773d92372ef | /Sem1/FP/S13/venv/Scripts/pip-script.py | ecfdd60747e705166efa7dda1830c8ac7fb753a9 | [] | no_license | AndreeaCimpean/Uni | a4e48e5e1dcecbc0c28ad45ddd3b0989ff7985c8 | 27df09339e4f8141be3c22ae93c4c063ffd2b172 | refs/heads/master | 2020-08-21T19:12:49.840044 | 2020-05-15T17:22:50 | 2020-05-15T17:22:50 | 216,222,647 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | #!D:\Facultate\UniRepo\Sem1\FP\S13\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
| [
"[email protected]"
] | |
07ee5ca8244bc40fdcfdffc0e184e8d66225d837 | 91d13f45f8527c368ebc6e44c75142a043f0583b | /test_zappa_cookiecutter/users/tests/test_drf_urls.py | 5c5a28e94cb0566c442fdcd429e5dbf1a914a39c | [
"MIT"
] | permissive | Andrew-Chen-Wang/cookiecutter-django-lambda | 6beed03d82eeecf95281c7f03a279c9c8b2ca85c | c4c64e174f653205c399ffa683918141f2f058d7 | refs/heads/master | 2022-11-16T12:20:00.589856 | 2020-07-19T20:19:41 | 2020-07-19T20:19:41 | 280,943,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 676 | py | import pytest
from django.urls import resolve, reverse
from test_zappa_cookiecutter.users.models import User
pytestmark = pytest.mark.django_db
def test_user_detail(user: User):
assert (
reverse("api:user-detail", kwargs={"username": user.username})
== f"/api/users/{user.username}/"
)
assert resolve(f"/api/users/{user.username}/").view_name == "api:user-detail"
def test_user_list():
assert reverse("api:user-list") == "/api/users/"
assert resolve("/api/users/").view_name == "api:user-list"
def test_user_me():
assert reverse("api:user-me") == "/api/users/me/"
assert resolve("/api/users/me/").view_name == "api:user-me"
| [
"[email protected]"
] | |
221eabeb7855ab26b445ce0626620cf82ea4dd10 | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/series/a713022194c640d79ae14ee2e504dd88.py | eb7a127a4563a635852c50f164844820a748ca91 | [] | no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 221 | py | def slices( N, size ):
if (size == 0 and len(N) > 0) or (len(N) < size):
raise ValueError('Bad input!')
return [ [ int(d) for d in N[s:s+size] ]
for s in range( len(N) - size + 1 ) ]
| [
"[email protected]"
] | |
deca411428980e2f3479946e16bec2cf5d7bc3c3 | 516932b326f58f9dc7c008e379f80cafd820acc0 | /src/helixtariff/test/logic/test_user_tariff.py | efde456f49fbfc657f1b000a05a4043a1fc4b16b | [] | no_license | sand8080/helixtariff | ffa4021fac16876bbbad8a4a8f1c53a9e4fd71d7 | 0bb56ad9e954509961db6bf636bce3a541709b93 | refs/heads/master | 2020-12-24T14:57:01.276045 | 2012-07-12T14:59:56 | 2012-07-12T14:59:56 | 1,605,281 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,923 | py | import unittest
from helixcore.error import RequestProcessingError
from helixtariff.test.logic.actor_logic_test import ActorLogicTestCase
class UserTariffTestCase(ActorLogicTestCase):
u_id = 22
def test_add_user_tariff(self):
t_id = self._add_tariff('tariff one', currency='RUB')
self._add_user_tariff(t_id, self.u_id)
def test_add_user_tariff_duplication(self):
name = 'tariff one'
t_id = self._add_tariff(name, currency='RUB')
self._add_user_tariff(t_id, self.u_id)
self.assertRaises(RequestProcessingError, self._add_user_tariff, t_id, self.u_id)
def test_add_wrong_tariff(self):
self.assertRaises(RequestProcessingError, self._add_user_tariff, 555, self.u_id)
def test_delete_user_tariff(self):
t_id = self._add_tariff('t', currency='RUB')
self._add_user_tariff(t_id, self.u_id)
user_tariffs = self._get_user_tariffs([self.u_id])
self.assertEquals([t_id], user_tariffs[0]['tariff_ids'])
sess = self.login_actor()
req = {'session_id': sess.session_id, 'user_id': self.u_id,
'tariff_ids': [t_id]}
resp = self.delete_user_tariffs(**req)
self.check_response_ok(resp)
user_tariffs = self._get_user_tariffs([self.u_id])
self.assertEquals(0, len(user_tariffs))
def test_get_user_tariffs(self):
self._add_tariff('t0', currency='RUB')
t_id_1 = self._add_tariff('t1', currency='RUB')
user_tariffs = self._get_user_tariffs([self.u_id])
self.assertEquals(0, len(user_tariffs))
self._add_user_tariff(t_id_1, self.u_id)
user_tariffs = self._get_user_tariffs([self.u_id])
self.assertEquals(1, len(user_tariffs))
self.assertEquals(self.u_id, user_tariffs[0]['user_id'])
self.assertEquals([t_id_1], user_tariffs[0]['tariff_ids'])
if __name__ == '__main__':
unittest.main() | [
"[email protected]"
] | |
0e512d5cc3c40a98f88773bb04257a5009284703 | a8062308fb3bf6c8952257504a50c3e97d801294 | /test/test_1680_concatenation_of_consecutive_binary_numbers.py | 2e973fa95d840dec8ee1b362d393d6690776c76f | [] | no_license | wan-catherine/Leetcode | 650d697a873ad23c0b64d08ad525bf9fcdb62b1b | 238995bd23c8a6c40c6035890e94baa2473d4bbc | refs/heads/master | 2023-09-01T00:56:27.677230 | 2023-08-31T00:49:31 | 2023-08-31T00:49:31 | 143,770,000 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 460 | py | from unittest import TestCase
from problems.N1680_Concatenation_Of_Consecutive_Binary_Numbers import Solution
class TestSolution(TestCase):
def test_concatenatedBinary(self):
self.assertEqual(1, Solution().concatenatedBinary(1))
def test_concatenatedBinary_1(self):
self.assertEqual(27, Solution().concatenatedBinary(3))
def test_concatenatedBinary_2(self):
self.assertEqual(505379714, Solution().concatenatedBinary(12))
| [
"[email protected]"
] | |
c2f109d8653198c97abaf0506e538f09dafebf27 | a479a5773fd5607f96c3b84fed57733fe39c3dbb | /napalm_yang/models/openconfig/network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/__init__.py | 9f9a949d618408cef24874b85b55dba45410a6d5 | [
"Apache-2.0"
] | permissive | napalm-automation/napalm-yang | 839c711e9294745534f5fbbe115e0100b645dbca | 9148e015b086ebe311c07deb92e168ea36fd7771 | refs/heads/develop | 2021-01-11T07:17:20.226734 | 2019-05-15T08:43:03 | 2019-05-15T08:43:03 | 69,226,025 | 65 | 64 | Apache-2.0 | 2019-05-15T08:43:24 | 2016-09-26T07:48:42 | Python | UTF-8 | Python | false | false | 136,581 | py | # -*- coding: utf-8 -*-
from operator import attrgetter
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType
from pyangbind.lib.yangtypes import RestrictedClassType
from pyangbind.lib.yangtypes import TypedListType
from pyangbind.lib.yangtypes import YANGBool
from pyangbind.lib.yangtypes import YANGListType
from pyangbind.lib.yangtypes import YANGDynClass
from pyangbind.lib.yangtypes import ReferenceType
from pyangbind.lib.base import PybindBase
from collections import OrderedDict
from decimal import Decimal
from bitarray import bitarray
import six
# PY3 support of some PY2 keywords (needs improved)
if six.PY3:
import builtins as __builtin__
long = int
elif six.PY2:
import __builtin__
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance - based on the path /network-instances/network-instance/afts/aft/entries/entry/next-hops/next-hop/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to the AFT
next-hop entry
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__index",
"__weight",
"__ip_address",
"__mac_address",
"__popped_mpls_label_stack",
"__pushed_mpls_label_stack",
"__decapsulate_header",
"__encapsulate_header",
"__origin_protocol",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__index = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="index",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
self.__weight = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="weight",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
self.__ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9\\.]*"},
),
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9a-fA-F:\\.]*"},
),
],
is_leaf=True,
yang_name="ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-address-no-zone",
is_config=False,
)
self.__mac_address = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={"pattern": "[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}"},
),
is_leaf=True,
yang_name="mac-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:mac-address",
is_config=False,
)
self.__popped_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="popped-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
self.__pushed_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="pushed-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
self.__decapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="decapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
self.__encapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="encapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
self.__origin_protocol = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="origin-protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"afts",
"aft",
"entries",
"entry",
"next-hops",
"next-hop",
"state",
]
def _get_index(self):
"""
Getter method for index, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/index (uint64)
YANG Description: A unique entry for the next-hop
"""
return self.__index
def _set_index(self, v, load=False):
"""
Setter method for index, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/index (uint64)
If this variable is read-only (config: false) in the
source YANG file, then _set_index is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_index() directly.
YANG Description: A unique entry for the next-hop
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="index",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """index must be of a type compatible with uint64""",
"defined-type": "uint64",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint64', is_config=False)""",
}
)
self.__index = t
if hasattr(self, "_set"):
self._set()
def _unset_index(self):
self.__index = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="index",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
def _get_weight(self):
"""
Getter method for weight, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/weight (uint32)
YANG Description: The weight of the next-hop. Traffic is balanced according to
the ratio described by the relative weights of the next hops
that exist for the AFT entry. Note that all next-hops that are
specified are assumed to be active next-hops and therefore
eligible (and selected) to be installed in the FIB, and hence
used for packet forwarding.
"""
return self.__weight
def _set_weight(self, v, load=False):
"""
Setter method for weight, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/weight (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_weight is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_weight() directly.
YANG Description: The weight of the next-hop. Traffic is balanced according to
the ratio described by the relative weights of the next hops
that exist for the AFT entry. Note that all next-hops that are
specified are assumed to be active next-hops and therefore
eligible (and selected) to be installed in the FIB, and hence
used for packet forwarding.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="weight",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """weight must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
}
)
self.__weight = t
if hasattr(self, "_set"):
self._set()
def _unset_weight(self):
self.__weight = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="weight",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
def _get_ip_address(self):
"""
Getter method for ip_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/ip_address (inet:ip-address-no-zone)
YANG Description: The IP address of the next-hop system.
"""
return self.__ip_address
def _set_ip_address(self, v, load=False):
"""
Setter method for ip_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/ip_address (inet:ip-address-no-zone)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_address() directly.
YANG Description: The IP address of the next-hop system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9\\.]*"},
),
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9a-fA-F:\\.]*"},
),
],
is_leaf=True,
yang_name="ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-address-no-zone",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ip_address must be of a type compatible with inet:ip-address-no-zone""",
"defined-type": "inet:ip-address-no-zone",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), restriction_dict={'pattern': '[0-9\\.]*'}),RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'}), restriction_dict={'pattern': '[0-9a-fA-F:\\.]*'}),], is_leaf=True, yang_name="ip-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-address-no-zone', is_config=False)""",
}
)
self.__ip_address = t
if hasattr(self, "_set"):
self._set()
def _unset_ip_address(self):
self.__ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9\\.]*"},
),
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9a-fA-F:\\.]*"},
),
],
is_leaf=True,
yang_name="ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-address-no-zone",
is_config=False,
)
def _get_mac_address(self):
"""
Getter method for mac_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/mac_address (yang:mac-address)
YANG Description: The MAC address of the next-hop if resolved by the local
network instance.
"""
return self.__mac_address
def _set_mac_address(self, v, load=False):
"""
Setter method for mac_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/mac_address (yang:mac-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_mac_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mac_address() directly.
YANG Description: The MAC address of the next-hop if resolved by the local
network instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={"pattern": "[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}"},
),
is_leaf=True,
yang_name="mac-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:mac-address",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mac_address must be of a type compatible with yang:mac-address""",
"defined-type": "yang:mac-address",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'}), is_leaf=True, yang_name="mac-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:mac-address', is_config=False)""",
}
)
self.__mac_address = t
if hasattr(self, "_set"):
self._set()
def _unset_mac_address(self):
self.__mac_address = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={"pattern": "[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}"},
),
is_leaf=True,
yang_name="mac-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:mac-address",
is_config=False,
)
def _get_popped_mpls_label_stack(self):
"""
Getter method for popped_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/popped_mpls_label_stack (oc-mplst:mpls-label)
YANG Description: The MPLS label stack to be popped from the packet when
switched by the system. The stack is encoding as a leaf-list
whereby the other of the entries is such that the first entry
is the label lowest down the label stack to be popped.
If the local system pops the outer-most label 400, then the
value of this list is [400,]. If the local system removes two
labels, the outer-most being 500, and the second of which is
500, then the value of the list is [500, 400].
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
return self.__popped_mpls_label_stack
def _set_popped_mpls_label_stack(self, v, load=False):
"""
Setter method for popped_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/popped_mpls_label_stack (oc-mplst:mpls-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_popped_mpls_label_stack is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_popped_mpls_label_stack() directly.
YANG Description: The MPLS label stack to be popped from the packet when
switched by the system. The stack is encoding as a leaf-list
whereby the other of the entries is such that the first entry
is the label lowest down the label stack to be popped.
If the local system pops the outer-most label 400, then the
value of this list is [400,]. If the local system removes two
labels, the outer-most being 500, and the second of which is
500, then the value of the list is [500, 400].
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="popped-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """popped_mpls_label_stack must be of a type compatible with oc-mplst:mpls-label""",
"defined-type": "oc-mplst:mpls-label",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['16..1048575']}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'IPV4_EXPLICIT_NULL': {'value': 0}, 'ROUTER_ALERT': {'value': 1}, 'IPV6_EXPLICIT_NULL': {'value': 2}, 'IMPLICIT_NULL': {'value': 3}, 'ENTROPY_LABEL_INDICATOR': {'value': 7}, 'NO_LABEL': {}},),]), is_leaf=False, yang_name="popped-mpls-label-stack", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-mplst:mpls-label', is_config=False)""",
}
)
self.__popped_mpls_label_stack = t
if hasattr(self, "_set"):
self._set()
def _unset_popped_mpls_label_stack(self):
self.__popped_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="popped-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
def _get_pushed_mpls_label_stack(self):
"""
Getter method for pushed_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/pushed_mpls_label_stack (oc-mplst:mpls-label)
YANG Description: The MPLS label stack imposed when forwarding packets to the
next-hop
- the stack is encoded as a leaf list whereby the order of the
entries is such that the first entry in the list is the
label at the bottom of the stack to be pushed.
To this end, a packet which is to forwarded to a device using
a service label of 42, and a transport label of 8072 will be
represented with a label stack list of [42, 8072].
The MPLS label stack list is ordered by the user, such that no
system re-ordering of leaves is permitted by the system.
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
return self.__pushed_mpls_label_stack
def _set_pushed_mpls_label_stack(self, v, load=False):
"""
Setter method for pushed_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/pushed_mpls_label_stack (oc-mplst:mpls-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_pushed_mpls_label_stack is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pushed_mpls_label_stack() directly.
YANG Description: The MPLS label stack imposed when forwarding packets to the
next-hop
- the stack is encoded as a leaf list whereby the order of the
entries is such that the first entry in the list is the
label at the bottom of the stack to be pushed.
To this end, a packet which is to forwarded to a device using
a service label of 42, and a transport label of 8072 will be
represented with a label stack list of [42, 8072].
The MPLS label stack list is ordered by the user, such that no
system re-ordering of leaves is permitted by the system.
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="pushed-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """pushed_mpls_label_stack must be of a type compatible with oc-mplst:mpls-label""",
"defined-type": "oc-mplst:mpls-label",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['16..1048575']}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'IPV4_EXPLICIT_NULL': {'value': 0}, 'ROUTER_ALERT': {'value': 1}, 'IPV6_EXPLICIT_NULL': {'value': 2}, 'IMPLICIT_NULL': {'value': 3}, 'ENTROPY_LABEL_INDICATOR': {'value': 7}, 'NO_LABEL': {}},),]), is_leaf=False, yang_name="pushed-mpls-label-stack", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-mplst:mpls-label', is_config=False)""",
}
)
self.__pushed_mpls_label_stack = t
if hasattr(self, "_set"):
self._set()
def _unset_pushed_mpls_label_stack(self):
self.__pushed_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="pushed-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
def _get_decapsulate_header(self):
"""
Getter method for decapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/decapsulate_header (oc-aftt:encapsulation-header-type)
YANG Description: When forwarding a packet to the specified next-hop, the local
system performs a decapsulation of the packet - removing the
specified header type. In the case that no next-hop is
specified, the packet header is removed, and a subsequent
forwarding lookup is performed on the packet encapsulated
within the header, matched within the relevant AFT within the
specified network-instance.
"""
return self.__decapsulate_header
def _set_decapsulate_header(self, v, load=False):
"""
Setter method for decapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/decapsulate_header (oc-aftt:encapsulation-header-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_decapsulate_header is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_decapsulate_header() directly.
YANG Description: When forwarding a packet to the specified next-hop, the local
system performs a decapsulation of the packet - removing the
specified header type. In the case that no next-hop is
specified, the packet header is removed, and a subsequent
forwarding lookup is performed on the packet encapsulated
within the header, matched within the relevant AFT within the
specified network-instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="decapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """decapsulate_header must be of a type compatible with oc-aftt:encapsulation-header-type""",
"defined-type": "oc-aftt:encapsulation-header-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GRE': {}, 'IPV4': {}, 'IPV6': {}, 'MPLS': {}},), is_leaf=True, yang_name="decapsulate-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-aftt:encapsulation-header-type', is_config=False)""",
}
)
self.__decapsulate_header = t
if hasattr(self, "_set"):
self._set()
def _unset_decapsulate_header(self):
self.__decapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="decapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
def _get_encapsulate_header(self):
"""
Getter method for encapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/encapsulate_header (oc-aftt:encapsulation-header-type)
YANG Description: When forwarding a packet to the specified next-hop the local
system performs an encapsulation of the packet - adding the
specified header type.
"""
return self.__encapsulate_header
def _set_encapsulate_header(self, v, load=False):
"""
Setter method for encapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/encapsulate_header (oc-aftt:encapsulation-header-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_encapsulate_header is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_encapsulate_header() directly.
YANG Description: When forwarding a packet to the specified next-hop the local
system performs an encapsulation of the packet - adding the
specified header type.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="encapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """encapsulate_header must be of a type compatible with oc-aftt:encapsulation-header-type""",
"defined-type": "oc-aftt:encapsulation-header-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GRE': {}, 'IPV4': {}, 'IPV6': {}, 'MPLS': {}},), is_leaf=True, yang_name="encapsulate-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-aftt:encapsulation-header-type', is_config=False)""",
}
)
self.__encapsulate_header = t
if hasattr(self, "_set"):
self._set()
def _unset_encapsulate_header(self):
self.__encapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="encapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
def _get_origin_protocol(self):
"""
Getter method for origin_protocol, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/origin_protocol (identityref)
YANG Description: The protocol from which the AFT entry was learned.
"""
return self.__origin_protocol
def _set_origin_protocol(self, v, load=False):
"""
Setter method for origin_protocol, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/origin_protocol (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_origin_protocol is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_origin_protocol() directly.
YANG Description: The protocol from which the AFT entry was learned.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="origin-protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """origin_protocol must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}},), is_leaf=True, yang_name="origin-protocol", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__origin_protocol = t
if hasattr(self, "_set"):
self._set()
def _unset_origin_protocol(self):
self.__origin_protocol = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="origin-protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
index = __builtin__.property(_get_index)
weight = __builtin__.property(_get_weight)
ip_address = __builtin__.property(_get_ip_address)
mac_address = __builtin__.property(_get_mac_address)
popped_mpls_label_stack = __builtin__.property(_get_popped_mpls_label_stack)
pushed_mpls_label_stack = __builtin__.property(_get_pushed_mpls_label_stack)
decapsulate_header = __builtin__.property(_get_decapsulate_header)
encapsulate_header = __builtin__.property(_get_encapsulate_header)
origin_protocol = __builtin__.property(_get_origin_protocol)
_pyangbind_elements = OrderedDict(
[
("index", index),
("weight", weight),
("ip_address", ip_address),
("mac_address", mac_address),
("popped_mpls_label_stack", popped_mpls_label_stack),
("pushed_mpls_label_stack", pushed_mpls_label_stack),
("decapsulate_header", decapsulate_header),
("encapsulate_header", encapsulate_header),
("origin_protocol", origin_protocol),
]
)
class state(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module openconfig-network-instance-l2 - based on the path /network-instances/network-instance/afts/aft/entries/entry/next-hops/next-hop/state. Each member element of
the container is represented as a class variable - with a specific
YANG type.
YANG Description: Operational state parameters relating to the AFT
next-hop entry
"""
__slots__ = (
"_path_helper",
"_extmethods",
"__index",
"__weight",
"__ip_address",
"__mac_address",
"__popped_mpls_label_stack",
"__pushed_mpls_label_stack",
"__decapsulate_header",
"__encapsulate_header",
"__origin_protocol",
)
_yang_name = "state"
_pybind_generated_by = "container"
def __init__(self, *args, **kwargs):
self._path_helper = False
self._extmethods = False
self.__index = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="index",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
self.__weight = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="weight",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
self.__ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9\\.]*"},
),
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9a-fA-F:\\.]*"},
),
],
is_leaf=True,
yang_name="ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-address-no-zone",
is_config=False,
)
self.__mac_address = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={"pattern": "[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}"},
),
is_leaf=True,
yang_name="mac-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:mac-address",
is_config=False,
)
self.__popped_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="popped-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
self.__pushed_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="pushed-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
self.__decapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="decapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
self.__encapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="encapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
self.__origin_protocol = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="origin-protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path() + [self._yang_name]
else:
return [
"network-instances",
"network-instance",
"afts",
"aft",
"entries",
"entry",
"next-hops",
"next-hop",
"state",
]
def _get_index(self):
"""
Getter method for index, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/index (uint64)
YANG Description: A unique entry for the next-hop
"""
return self.__index
def _set_index(self, v, load=False):
"""
Setter method for index, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/index (uint64)
If this variable is read-only (config: false) in the
source YANG file, then _set_index is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_index() directly.
YANG Description: A unique entry for the next-hop
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="index",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """index must be of a type compatible with uint64""",
"defined-type": "uint64",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..18446744073709551615']}, int_size=64), is_leaf=True, yang_name="index", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint64', is_config=False)""",
}
)
self.__index = t
if hasattr(self, "_set"):
self._set()
def _unset_index(self):
self.__index = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..18446744073709551615"]},
int_size=64,
),
is_leaf=True,
yang_name="index",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint64",
is_config=False,
)
def _get_weight(self):
"""
Getter method for weight, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/weight (uint32)
YANG Description: The weight of the next-hop. Traffic is balanced according to
the ratio described by the relative weights of the next hops
that exist for the AFT entry. Note that all next-hops that are
specified are assumed to be active next-hops and therefore
eligible (and selected) to be installed in the FIB, and hence
used for packet forwarding.
"""
return self.__weight
def _set_weight(self, v, load=False):
"""
Setter method for weight, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/weight (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_weight is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_weight() directly.
YANG Description: The weight of the next-hop. Traffic is balanced according to
the ratio described by the relative weights of the next hops
that exist for the AFT entry. Note that all next-hops that are
specified are assumed to be active next-hops and therefore
eligible (and selected) to be installed in the FIB, and hence
used for packet forwarding.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="weight",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """weight must be of a type compatible with uint32""",
"defined-type": "uint32",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), is_leaf=True, yang_name="weight", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='uint32', is_config=False)""",
}
)
self.__weight = t
if hasattr(self, "_set"):
self._set()
def _unset_weight(self):
self.__weight = YANGDynClass(
base=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
is_leaf=True,
yang_name="weight",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="uint32",
is_config=False,
)
def _get_ip_address(self):
"""
Getter method for ip_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/ip_address (inet:ip-address-no-zone)
YANG Description: The IP address of the next-hop system.
"""
return self.__ip_address
def _set_ip_address(self, v, load=False):
"""
Setter method for ip_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/ip_address (inet:ip-address-no-zone)
If this variable is read-only (config: false) in the
source YANG file, then _set_ip_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_ip_address() directly.
YANG Description: The IP address of the next-hop system.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9\\.]*"},
),
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9a-fA-F:\\.]*"},
),
],
is_leaf=True,
yang_name="ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-address-no-zone",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """ip_address must be of a type compatible with inet:ip-address-no-zone""",
"defined-type": "inet:ip-address-no-zone",
"generated-type": """YANGDynClass(base=[RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?'}), restriction_dict={'pattern': '[0-9\\.]*'}),RestrictedClassType(base_type=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?'}), restriction_dict={'pattern': '[0-9a-fA-F:\\.]*'}),], is_leaf=True, yang_name="ip-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='inet:ip-address-no-zone', is_config=False)""",
}
)
self.__ip_address = t
if hasattr(self, "_set"):
self._set()
def _unset_ip_address(self):
self.__ip_address = YANGDynClass(
base=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){3}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9\\.]*"},
),
RestrictedClassType(
base_type=RestrictedClassType(
base_type=six.text_type,
restriction_dict={
"pattern": "((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(%[\\p{N}\\p{L}]+)?"
},
),
restriction_dict={"pattern": "[0-9a-fA-F:\\.]*"},
),
],
is_leaf=True,
yang_name="ip-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="inet:ip-address-no-zone",
is_config=False,
)
def _get_mac_address(self):
"""
Getter method for mac_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/mac_address (yang:mac-address)
YANG Description: The MAC address of the next-hop if resolved by the local
network instance.
"""
return self.__mac_address
def _set_mac_address(self, v, load=False):
"""
Setter method for mac_address, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/mac_address (yang:mac-address)
If this variable is read-only (config: false) in the
source YANG file, then _set_mac_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_mac_address() directly.
YANG Description: The MAC address of the next-hop if resolved by the local
network instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={"pattern": "[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}"},
),
is_leaf=True,
yang_name="mac-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:mac-address",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """mac_address must be of a type compatible with yang:mac-address""",
"defined-type": "yang:mac-address",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_dict={'pattern': '[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}'}), is_leaf=True, yang_name="mac-address", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='yang:mac-address', is_config=False)""",
}
)
self.__mac_address = t
if hasattr(self, "_set"):
self._set()
def _unset_mac_address(self):
self.__mac_address = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_dict={"pattern": "[0-9a-fA-F]{2}(:[0-9a-fA-F]{2}){5}"},
),
is_leaf=True,
yang_name="mac-address",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="yang:mac-address",
is_config=False,
)
def _get_popped_mpls_label_stack(self):
"""
Getter method for popped_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/popped_mpls_label_stack (oc-mplst:mpls-label)
YANG Description: The MPLS label stack to be popped from the packet when
switched by the system. The stack is encoding as a leaf-list
whereby the other of the entries is such that the first entry
is the label lowest down the label stack to be popped.
If the local system pops the outer-most label 400, then the
value of this list is [400,]. If the local system removes two
labels, the outer-most being 500, and the second of which is
500, then the value of the list is [500, 400].
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
return self.__popped_mpls_label_stack
def _set_popped_mpls_label_stack(self, v, load=False):
"""
Setter method for popped_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/popped_mpls_label_stack (oc-mplst:mpls-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_popped_mpls_label_stack is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_popped_mpls_label_stack() directly.
YANG Description: The MPLS label stack to be popped from the packet when
switched by the system. The stack is encoding as a leaf-list
whereby the other of the entries is such that the first entry
is the label lowest down the label stack to be popped.
If the local system pops the outer-most label 400, then the
value of this list is [400,]. If the local system removes two
labels, the outer-most being 500, and the second of which is
500, then the value of the list is [500, 400].
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="popped-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """popped_mpls_label_stack must be of a type compatible with oc-mplst:mpls-label""",
"defined-type": "oc-mplst:mpls-label",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['16..1048575']}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'IPV4_EXPLICIT_NULL': {'value': 0}, 'ROUTER_ALERT': {'value': 1}, 'IPV6_EXPLICIT_NULL': {'value': 2}, 'IMPLICIT_NULL': {'value': 3}, 'ENTROPY_LABEL_INDICATOR': {'value': 7}, 'NO_LABEL': {}},),]), is_leaf=False, yang_name="popped-mpls-label-stack", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-mplst:mpls-label', is_config=False)""",
}
)
self.__popped_mpls_label_stack = t
if hasattr(self, "_set"):
self._set()
def _unset_popped_mpls_label_stack(self):
self.__popped_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="popped-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
def _get_pushed_mpls_label_stack(self):
"""
Getter method for pushed_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/pushed_mpls_label_stack (oc-mplst:mpls-label)
YANG Description: The MPLS label stack imposed when forwarding packets to the
next-hop
- the stack is encoded as a leaf list whereby the order of the
entries is such that the first entry in the list is the
label at the bottom of the stack to be pushed.
To this end, a packet which is to forwarded to a device using
a service label of 42, and a transport label of 8072 will be
represented with a label stack list of [42, 8072].
The MPLS label stack list is ordered by the user, such that no
system re-ordering of leaves is permitted by the system.
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
return self.__pushed_mpls_label_stack
def _set_pushed_mpls_label_stack(self, v, load=False):
"""
Setter method for pushed_mpls_label_stack, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/pushed_mpls_label_stack (oc-mplst:mpls-label)
If this variable is read-only (config: false) in the
source YANG file, then _set_pushed_mpls_label_stack is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_pushed_mpls_label_stack() directly.
YANG Description: The MPLS label stack imposed when forwarding packets to the
next-hop
- the stack is encoded as a leaf list whereby the order of the
entries is such that the first entry in the list is the
label at the bottom of the stack to be pushed.
To this end, a packet which is to forwarded to a device using
a service label of 42, and a transport label of 8072 will be
represented with a label stack list of [42, 8072].
The MPLS label stack list is ordered by the user, such that no
system re-ordering of leaves is permitted by the system.
A swap operation is reflected by entries in the
popped-mpls-label-stack and pushed-mpls-label-stack nodes.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="pushed-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """pushed_mpls_label_stack must be of a type compatible with oc-mplst:mpls-label""",
"defined-type": "oc-mplst:mpls-label",
"generated-type": """YANGDynClass(base=TypedListType(allowed_type=[RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': ['16..1048575']}),RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'IPV4_EXPLICIT_NULL': {'value': 0}, 'ROUTER_ALERT': {'value': 1}, 'IPV6_EXPLICIT_NULL': {'value': 2}, 'IMPLICIT_NULL': {'value': 3}, 'ENTROPY_LABEL_INDICATOR': {'value': 7}, 'NO_LABEL': {}},),]), is_leaf=False, yang_name="pushed-mpls-label-stack", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-mplst:mpls-label', is_config=False)""",
}
)
self.__pushed_mpls_label_stack = t
if hasattr(self, "_set"):
self._set()
def _unset_pushed_mpls_label_stack(self):
self.__pushed_mpls_label_stack = YANGDynClass(
base=TypedListType(
allowed_type=[
RestrictedClassType(
base_type=RestrictedClassType(
base_type=long,
restriction_dict={"range": ["0..4294967295"]},
int_size=32,
),
restriction_dict={"range": ["16..1048575"]},
),
RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"IPV4_EXPLICIT_NULL": {"value": 0},
"ROUTER_ALERT": {"value": 1},
"IPV6_EXPLICIT_NULL": {"value": 2},
"IMPLICIT_NULL": {"value": 3},
"ENTROPY_LABEL_INDICATOR": {"value": 7},
"NO_LABEL": {},
},
),
]
),
is_leaf=False,
yang_name="pushed-mpls-label-stack",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-mplst:mpls-label",
is_config=False,
)
def _get_decapsulate_header(self):
"""
Getter method for decapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/decapsulate_header (oc-aftt:encapsulation-header-type)
YANG Description: When forwarding a packet to the specified next-hop, the local
system performs a decapsulation of the packet - removing the
specified header type. In the case that no next-hop is
specified, the packet header is removed, and a subsequent
forwarding lookup is performed on the packet encapsulated
within the header, matched within the relevant AFT within the
specified network-instance.
"""
return self.__decapsulate_header
def _set_decapsulate_header(self, v, load=False):
"""
Setter method for decapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/decapsulate_header (oc-aftt:encapsulation-header-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_decapsulate_header is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_decapsulate_header() directly.
YANG Description: When forwarding a packet to the specified next-hop, the local
system performs a decapsulation of the packet - removing the
specified header type. In the case that no next-hop is
specified, the packet header is removed, and a subsequent
forwarding lookup is performed on the packet encapsulated
within the header, matched within the relevant AFT within the
specified network-instance.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="decapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """decapsulate_header must be of a type compatible with oc-aftt:encapsulation-header-type""",
"defined-type": "oc-aftt:encapsulation-header-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GRE': {}, 'IPV4': {}, 'IPV6': {}, 'MPLS': {}},), is_leaf=True, yang_name="decapsulate-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-aftt:encapsulation-header-type', is_config=False)""",
}
)
self.__decapsulate_header = t
if hasattr(self, "_set"):
self._set()
def _unset_decapsulate_header(self):
self.__decapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="decapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
def _get_encapsulate_header(self):
"""
Getter method for encapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/encapsulate_header (oc-aftt:encapsulation-header-type)
YANG Description: When forwarding a packet to the specified next-hop the local
system performs an encapsulation of the packet - adding the
specified header type.
"""
return self.__encapsulate_header
def _set_encapsulate_header(self, v, load=False):
"""
Setter method for encapsulate_header, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/encapsulate_header (oc-aftt:encapsulation-header-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_encapsulate_header is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_encapsulate_header() directly.
YANG Description: When forwarding a packet to the specified next-hop the local
system performs an encapsulation of the packet - adding the
specified header type.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="encapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """encapsulate_header must be of a type compatible with oc-aftt:encapsulation-header-type""",
"defined-type": "oc-aftt:encapsulation-header-type",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'GRE': {}, 'IPV4': {}, 'IPV6': {}, 'MPLS': {}},), is_leaf=True, yang_name="encapsulate-header", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='oc-aftt:encapsulation-header-type', is_config=False)""",
}
)
self.__encapsulate_header = t
if hasattr(self, "_set"):
self._set()
def _unset_encapsulate_header(self):
self.__encapsulate_header = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={"GRE": {}, "IPV4": {}, "IPV6": {}, "MPLS": {}},
),
is_leaf=True,
yang_name="encapsulate-header",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="oc-aftt:encapsulation-header-type",
is_config=False,
)
def _get_origin_protocol(self):
"""
Getter method for origin_protocol, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/origin_protocol (identityref)
YANG Description: The protocol from which the AFT entry was learned.
"""
return self.__origin_protocol
def _set_origin_protocol(self, v, load=False):
"""
Setter method for origin_protocol, mapped from YANG variable /network_instances/network_instance/afts/aft/entries/entry/next_hops/next_hop/state/origin_protocol (identityref)
If this variable is read-only (config: false) in the
source YANG file, then _set_origin_protocol is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_origin_protocol() directly.
YANG Description: The protocol from which the AFT entry was learned.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(
v,
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="origin-protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
except (TypeError, ValueError):
raise ValueError(
{
"error-string": """origin_protocol must be of a type compatible with identityref""",
"defined-type": "openconfig-network-instance:identityref",
"generated-type": """YANGDynClass(base=RestrictedClassType(base_type=six.text_type, restriction_type="dict_key", restriction_arg={'BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:BGP': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:ISIS': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:OSPF3': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:STATIC': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:DIRECTLY_CONNECTED': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pol-types:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}, 'oc-pt:LOCAL_AGGREGATE': {'@module': 'openconfig-policy-types', '@namespace': 'http://openconfig.net/yang/policy-types'}},), is_leaf=True, yang_name="origin-protocol", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='http://openconfig.net/yang/network-instance', defining_module='openconfig-network-instance', yang_type='identityref', is_config=False)""",
}
)
self.__origin_protocol = t
if hasattr(self, "_set"):
self._set()
def _unset_origin_protocol(self):
self.__origin_protocol = YANGDynClass(
base=RestrictedClassType(
base_type=six.text_type,
restriction_type="dict_key",
restriction_arg={
"BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:BGP": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:ISIS": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:OSPF3": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:STATIC": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:DIRECTLY_CONNECTED": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pol-types:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
"oc-pt:LOCAL_AGGREGATE": {
"@module": "openconfig-policy-types",
"@namespace": "http://openconfig.net/yang/policy-types",
},
},
),
is_leaf=True,
yang_name="origin-protocol",
parent=self,
path_helper=self._path_helper,
extmethods=self._extmethods,
register_paths=True,
namespace="http://openconfig.net/yang/network-instance",
defining_module="openconfig-network-instance",
yang_type="identityref",
is_config=False,
)
index = __builtin__.property(_get_index)
weight = __builtin__.property(_get_weight)
ip_address = __builtin__.property(_get_ip_address)
mac_address = __builtin__.property(_get_mac_address)
popped_mpls_label_stack = __builtin__.property(_get_popped_mpls_label_stack)
pushed_mpls_label_stack = __builtin__.property(_get_pushed_mpls_label_stack)
decapsulate_header = __builtin__.property(_get_decapsulate_header)
encapsulate_header = __builtin__.property(_get_encapsulate_header)
origin_protocol = __builtin__.property(_get_origin_protocol)
_pyangbind_elements = OrderedDict(
[
("index", index),
("weight", weight),
("ip_address", ip_address),
("mac_address", mac_address),
("popped_mpls_label_stack", popped_mpls_label_stack),
("pushed_mpls_label_stack", pushed_mpls_label_stack),
("decapsulate_header", decapsulate_header),
("encapsulate_header", encapsulate_header),
("origin_protocol", origin_protocol),
]
)
| [
"[email protected]"
] | |
929c4b554d91766794b550e36a6c1d59d80404f6 | 2cfc228988a51857269edf2fe7b85c7f9a03e94b | /prysm/otf.py | f6583f6e4089a0dfd734bba53b56f1734431fde1 | [
"MIT"
] | permissive | fakahil/prysm | 93dd2523e6416afa2774435a6df796df8b7b6f37 | c6235043fae90540c392291051d454e8813d3884 | refs/heads/master | 2021-05-18T21:20:46.745298 | 2020-02-18T00:21:27 | 2020-02-18T00:21:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,626 | py | """A base optical transfer function interface."""
import warnings
from .conf import config
from .mathops import engine as e
from ._richdata import RichData
from .psf import PSF
from .fttools import forward_ft_unit
def transform_psf(psf, sample_spacing):
data = e.fft.fftshift(e.fft.fft2(e.fft.ifftshift(psf.data))) # no need to ifftshift first - phase is unimportant
y, x = [forward_ft_unit(sample_spacing / 1e3, s) for s in psf.shape] # 1e3 for microns => mm
return x, y, data
class OTF:
"""Optical Transfer Function."""
def __init__(self, mtf, ptf):
"""Create a new OTF Instance.
Will have .mtf and .ptf attributes holding the MTF and PTF.
Parameters
----------
data : `numpy.ndarray`
complex ndarray, 2D
x : `numpy.ndarray`
x Cartesian spatial frequencies
y : `numpy.ndarray`
y Cartesian spatial frequencies
"""
self.mtf = mtf
self.ptf = ptf
@staticmethod
def from_psf(psf, unwrap=True):
"""Create an OTF instance from a PSF.
Parameters
----------
psf : `PSF`
Point Spread Function
unwrap : `bool`, optional
if True, unwrap phase
Returns
-------
`OTF`
new OTF instance with mtf and PSF attributes holding MTF and PSF instances
"""
x, y, ft = transform_psf(psf, psf.sample_spacing)
mtf = MTF.from_ftdata(ft=ft, x=x, y=y)
ptf = PTF.from_ftdata(ft=ft, x=x, y=y, unwrap=unwrap)
return OTF(mtf=mtf, ptf=ptf)
@staticmethod
def from_pupil(pupil, efl, Q=config.Q, unwrap=True):
psf = PSF.from_pupil(pupil, efl=efl, Q=Q)
return OTF.from_psf(psf, unwrap=unwrap)
class MTF(RichData):
"""Modulation Transfer Function."""
_data_attr = 'data'
_data_type = 'image'
_default_twosided = False
def __init__(self, data, x, y, xy_unit=None, z_unit=None, labels=None):
"""Create a new `MTF` instance.
Parameters
----------
data : `numpy.ndarray`
2D array of MTF data
x : `numpy.ndarray`
1D array of x spatial frequencies
y : `numpy.ndarray`
1D array of y spatial frequencies
units : `Units`
units instance, can be shared
labels : `Labels`
labels instance, can be shared
"""
super().__init__(x=x, y=y, data=data,
xy_unit=xy_unit or config.mtf_xy_unit,
z_unit=z_unit or config.mtf_z_unit,
labels=labels or config.mtf_labels)
@staticmethod
def from_psf(psf):
"""Generate an MTF from a PSF.
Parameters
----------
psf : `PSF`
PSF to compute an MTF from
Returns
-------
`MTF`
A new MTF instance
"""
# some code duplication here:
# MTF is a hot code path, and the drop of a shift operation
# improves performance in exchange for sharing some code with
# the OTF class definition
dat = e.fft.fftshift(e.fft.fft2(psf.data)) # no need to ifftshift first - phase is unimportant
x = forward_ft_unit(psf.sample_spacing / 1e3, psf.samples_x) # 1e3 for microns => mm
y = forward_ft_unit(psf.sample_spacing / 1e3, psf.samples_y)
return MTF.from_ftdata(ft=dat, x=x, y=y)
@staticmethod
def from_pupil(pupil, efl, Q=2):
"""Generate an MTF from a pupil, given a focal length (propagation distance).
Parameters
----------
pupil : `Pupil`
A pupil to propagate to a PSF, and convert to an MTF
efl : `float`
Effective focal length or propagation distance of the wavefunction
Q : `float`
ratio of pupil sample count to PSF sample count. Q > 2 satisfies nyquist
Returns
-------
`MTF`
A new MTF instance
"""
psf = PSF.from_pupil(pupil, efl=efl, Q=Q)
return MTF.from_psf(psf)
@staticmethod
def from_ftdata(ft, x, y):
"""Generate an MTF from the Fourier transform of a PSF.
Parameters
----------
ft : `numpy.ndarray`
2D ndarray of Fourier transform data
x : `numpy.ndarray`
1D ndarray of x (axis 1) coordinates
y : `numpy.ndarray`
1D ndarray of y (axis 0) coordinates
Returns
-------
`MTF`
a new MTF instance
"""
cy, cx = (int(e.ceil(s / 2)) for s in ft.shape)
dat = abs(ft)
dat /= dat[cy, cx]
return MTF(data=dat, x=x, y=y)
@property
def tan(self):
warnings.warn('.tan is deprecated and will be removed in v0.18, please use .slices().x')
return self.slices().x
@property
def sag(self):
warnings.warn('.sag is deprecated and will be removed in v0.18, please use .slices().y')
return self.slices().y
def exact_tan(self, freq):
warnings.warn('.exact_tan is deprecated and will be removed in v0.18, please use .exact_x')
return self.exact_x(freq)
def exact_sag(self, freq):
warnings.warn('.exact_sag is deprecated and will be removed in v0.18, please use .exact_y')
return self.exact_y(freq)
class PTF(RichData):
"""Phase Transfer Function"""
def __init__(self, data, x, y, xy_unit=None, z_unit=None, labels=None):
"""Create a new `PTF` instance.
Parameters
----------
data : `numpy.ndarray`
2D array of MTF data
x : `numpy.ndarray`
1D array of x spatial frequencies
y : `numpy.ndarray`
1D array of y spatial frequencies
units : `Units`
units instance, can be shared
labels : `Labels`
labels instance, can be shared
"""
super().__init__(x=x, y=y, data=data,
xy_unit=xy_unit or config.ptf_xy_unit,
z_unit=z_unit or config.ptf_z_unit,
labels=labels or config.mtf_labels)
@staticmethod
def from_psf(psf, unwrap=True):
"""Generate a PTF from a PSF.
Parameters
----------
psf : `PSF`
PSF to compute an MTF from
unwrap : `bool,` optional
whether to unwrap the phase
Returns
-------
`PTF`
A new PTF instance
"""
# some code duplication here:
# MTF is a hot code path, and the drop of a shift operation
# improves performance in exchange for sharing some code with
# the OTF class definition
# repeat this duplication in PTF for symmetry more than performance
dat = e.fft.fftshift(e.fft.fft2(e.fft.ifftshift(psf.data)))
x = forward_ft_unit(psf.sample_spacing / 1e3, psf.samples_x) # 1e3 for microns => mm
y = forward_ft_unit(psf.sample_spacing / 1e3, psf.samples_y)
return PTF.from_ftdata(ft=dat, x=x, y=y)
@staticmethod
def from_pupil(pupil, efl, Q=2, unwrap=True):
"""Generate a PTF from a pupil, given a focal length (propagation distance).
Parameters
----------
pupil : `Pupil`
A pupil to propagate to a PSF, and convert to an MTF
efl : `float`
Effective focal length or propagation distance of the wavefunction
Q : `float`, optional
ratio of pupil sample count to PSF sample count. Q > 2 satisfies nyquist
unwrap : `bool,` optional
whether to unwrap the phase
Returns
-------
`PTF`
A new PTF instance
"""
psf = PSF.from_pupil(pupil, efl=efl, Q=Q)
return PTF.from_psf(psf, unwrap=unwrap)
@staticmethod
def from_ftdata(ft, x, y, unwrap=True):
"""Generate a PTF from the Fourier transform of a PSF.
Parameters
----------
ft : `numpy.ndarray`
2D ndarray of Fourier transform data
x : `numpy.ndarray`
1D ndarray of x (axis 1) coordinates
y : `numpy.ndarray`
1D ndarray of y (axis 0) coordinates
unwrap : `bool`, optional
if True, unwrap phase
Returns
-------
`PTF`
a new PTF instance
"""
ft = e.angle(ft)
cy, cx = (int(e.ceil(s / 2)) for s in ft.shape)
offset = ft[cy, cx]
if offset != 0:
ft /= offset
if unwrap:
from skimage import restoration
ft = restoration.unwrap_phase(ft)
return PTF(ft, x, y)
def diffraction_limited_mtf(fno, wavelength, frequencies=None, samples=128):
"""Give the diffraction limited MTF for a circular pupil and the given parameters.
Parameters
----------
fno : `float`
f/# of the lens.
wavelength : `float`
wavelength of light, in microns.
frequencies : `numpy.ndarray`
spatial frequencies of interest, in cy/mm if frequencies are given, samples is ignored.
samples : `int`
number of points in the output array, if frequencies not given.
Returns
-------
if frequencies not given:
frequencies : `numpy.ndarray`
array of ordinate data
mtf : `numpy.ndarray`
array of coordinate data
else:
mtf : `numpy.ndarray`
array of MTF data
Notes
-----
If frequencies are given, just returns the MTF. If frequencies are not
given, returns both the frequencies and the MTF.
"""
extinction = 1 / (wavelength / 1000 * fno)
if frequencies is None:
normalized_frequency = e.linspace(0, 1, samples)
else:
normalized_frequency = e.asarray(frequencies) / extinction
try:
normalized_frequency[normalized_frequency > 1] = 1 # clamp values
except TypeError: # single freq
if normalized_frequency > 1:
normalized_frequency = 1
mtf = _difflim_mtf_core(normalized_frequency)
if frequencies is None:
return normalized_frequency * extinction, mtf
else:
return mtf
def _difflim_mtf_core(normalized_frequency):
"""Compute the MTF at a given normalized spatial frequency.
Parameters
----------
normalized_frequency : `numpy.ndarray`
normalized frequency; function is defined over [0, and takes a value of 0 for [1,
Returns
-------
`numpy.ndarray`
The diffraction MTF function at a given normalized spatial frequency
"""
return (2 / e.pi) * \
(e.arccos(normalized_frequency) - normalized_frequency *
e.sqrt(1 - normalized_frequency ** 2))
def longexposure_otf(nu, Cn, z, f, lambdabar, h_z_by_r=2.91):
"""Compute the long exposure OTF for given parameters.
Parameters
----------
nu : `numpy.ndarray`
spatial frequencies, cy/mm
Cn: `float`
atmospheric structure constant of refractive index, ranges ~ 10^-13 - 10^-17
z : `float`
propagation distance through atmosphere, m
f : `float`
effective focal length of the optical system, mm
lambdabar : `float`
mean wavelength, microns
h_z_by_r : `float`, optional
constant for h[z/r] -- see Eq. 8.5-37 & 8.5-38 in Statistical Optics, J. Goodman, 2nd ed.
Returns
-------
`numpy.ndarray`
the OTF
"""
# homogenize units
nu = nu / 1e3
f = f / 1e3
lambdabar = lambdabar / 1e6
power = 5/3
const1 = - e.pi ** 2 * 2 * h_z_by_r * Cn ** 2
const2 = z * f ** power / (lambdabar ** 3)
nupow = nu ** power
const = const1 * const2
return e.exp(const * nupow)
def komogorov(r, r0):
"""Calculate the phase structure function D_phi in the komogorov approximation
Parameters
----------
r : `numpy.ndarray`
r, radial frequency parameter (object space)
r0 : `float`
Fried parameter
Returns
-------
`numpy.ndarray`
"""
return 6.88 * (r/r0) ** (5/3)
def estimate_Cn(P=1013, T=273.15, Ct=1e-4):
"""Use Weng et al to estimate Cn from meteorological data.
Parameters
----------
P : `float`
atmospheric pressure in hPa
T : `float`
temperature in Kelvin
Ct : `float`
atmospheric struction constant of temperature, typically 10^-5 - 10^-2 near the surface
Returns
-------
`float`
Cn
"""
return (79 * P / (T ** 2)) * Ct ** 2 * 1e-12
| [
"[email protected]"
] | |
461aedd8d00d14d677bdaaa5d221d39e7bd1f887 | d7016f69993570a1c55974582cda899ff70907ec | /sdk/storage/azure-mgmt-storage/azure/mgmt/storage/v2021_04_01/aio/operations/_table_services_operations.py | a773278f633a42e0385983cffb93ca8fbcf3224b | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | kurtzeborn/azure-sdk-for-python | 51ca636ad26ca51bc0c9e6865332781787e6f882 | b23e71b289c71f179b9cf9b8c75b1922833a542a | refs/heads/main | 2023-03-21T14:19:50.299852 | 2023-02-15T13:30:47 | 2023-02-15T13:30:47 | 157,927,277 | 0 | 0 | MIT | 2022-07-19T08:05:23 | 2018-11-16T22:15:30 | Python | UTF-8 | Python | false | false | 17,622 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
import sys
from typing import Any, Callable, Dict, IO, Optional, TypeVar, Union, overload
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._table_services_operations import (
build_get_service_properties_request,
build_list_request,
build_set_service_properties_request,
)
if sys.version_info >= (3, 8):
from typing import Literal # pylint: disable=no-name-in-module, ungrouped-imports
else:
from typing_extensions import Literal # type: ignore # pylint: disable=ungrouped-imports
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TableServicesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.storage.v2021_04_01.aio.StorageManagementClient`'s
:attr:`table_services` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def list(self, resource_group_name: str, account_name: str, **kwargs: Any) -> _models.ListTableServices:
"""List all table services for the storage account.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ListTableServices or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.ListTableServices
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01")) # type: Literal["2021-04-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.ListTableServices]
request = build_list_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("ListTableServices", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices"} # type: ignore
@overload
async def set_service_properties(
self,
resource_group_name: str,
account_name: str,
table_service_name: Union[str, _models.Enum35],
parameters: _models.TableServiceProperties,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TableServiceProperties:
"""Sets the properties of a storage account’s Table service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param table_service_name: The name of the Table Service within the specified storage account.
Table Service Name must be 'default'. "default" Required.
:type table_service_name: str or ~azure.mgmt.storage.v2021_04_01.models.Enum35
:param parameters: The properties of a storage account’s Table service, only properties for
Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required.
:type parameters: ~azure.mgmt.storage.v2021_04_01.models.TableServiceProperties
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TableServiceProperties or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.TableServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def set_service_properties(
self,
resource_group_name: str,
account_name: str,
table_service_name: Union[str, _models.Enum35],
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TableServiceProperties:
"""Sets the properties of a storage account’s Table service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param table_service_name: The name of the Table Service within the specified storage account.
Table Service Name must be 'default'. "default" Required.
:type table_service_name: str or ~azure.mgmt.storage.v2021_04_01.models.Enum35
:param parameters: The properties of a storage account’s Table service, only properties for
Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TableServiceProperties or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.TableServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def set_service_properties(
self,
resource_group_name: str,
account_name: str,
table_service_name: Union[str, _models.Enum35],
parameters: Union[_models.TableServiceProperties, IO],
**kwargs: Any
) -> _models.TableServiceProperties:
"""Sets the properties of a storage account’s Table service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param table_service_name: The name of the Table Service within the specified storage account.
Table Service Name must be 'default'. "default" Required.
:type table_service_name: str or ~azure.mgmt.storage.v2021_04_01.models.Enum35
:param parameters: The properties of a storage account’s Table service, only properties for
Storage Analytics and CORS (Cross-Origin Resource Sharing) rules can be specified. Is either a
model type or a IO type. Required.
:type parameters: ~azure.mgmt.storage.v2021_04_01.models.TableServiceProperties or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TableServiceProperties or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.TableServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01")) # type: Literal["2021-04-01"]
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.TableServiceProperties]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IO, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "TableServiceProperties")
request = build_set_service_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
table_service_name=table_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.set_service_properties.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("TableServiceProperties", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
set_service_properties.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}"} # type: ignore
@distributed_trace_async
async def get_service_properties(
self, resource_group_name: str, account_name: str, table_service_name: Union[str, _models.Enum35], **kwargs: Any
) -> _models.TableServiceProperties:
"""Gets the properties of a storage account’s Table service, including properties for Storage
Analytics and CORS (Cross-Origin Resource Sharing) rules.
:param resource_group_name: The name of the resource group within the user's subscription. The
name is case insensitive. Required.
:type resource_group_name: str
:param account_name: The name of the storage account within the specified resource group.
Storage account names must be between 3 and 24 characters in length and use numbers and
lower-case letters only. Required.
:type account_name: str
:param table_service_name: The name of the Table Service within the specified storage account.
Table Service Name must be 'default'. "default" Required.
:type table_service_name: str or ~azure.mgmt.storage.v2021_04_01.models.Enum35
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TableServiceProperties or the result of cls(response)
:rtype: ~azure.mgmt.storage.v2021_04_01.models.TableServiceProperties
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-04-01")) # type: Literal["2021-04-01"]
cls = kwargs.pop("cls", None) # type: ClsType[_models.TableServiceProperties]
request = build_get_service_properties_request(
resource_group_name=resource_group_name,
account_name=account_name,
table_service_name=table_service_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get_service_properties.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.CloudErrorAutoGenerated, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = self._deserialize("TableServiceProperties", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get_service_properties.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Storage/storageAccounts/{accountName}/tableServices/{tableServiceName}"} # type: ignore
| [
"[email protected]"
] | |
6c3f3138f5a174c373d308b7a48067eb2479d0e0 | c208954de92470c0144fad2e07a92ed1822edd59 | /selenia/out_dis.py | 183b10d388d465444ad227f3c3a25273230c321e | [
"MIT"
] | permissive | rendy026/reverse-enginnering | 4217f3b723569fb792bac0f22a56a305199db1dc | f04cec0bf518a2617fc4fd7155f755fafc2af799 | refs/heads/master | 2023-01-07T15:49:15.791052 | 2020-10-13T09:22:02 | 2020-10-13T09:22:02 | 303,575,571 | 0 | 0 | MIT | 2020-10-13T09:41:59 | 2020-10-13T03:17:42 | Python | UTF-8 | Python | false | false | 67,675 | py | # FileNames : <EzzKun>
# Python Bytecode : 3.8.5
# Selector <module> In Line 1 file out.pyc
# Timestamp In Code : (2020-09-18 18:10:25)
# Method Name: <module>
# Filename: <EzzKun>
# Argument count: 0
# Kw-only arguments: 0
# Number of locals: 0
# Stack size: 10
# Flags: 0x00000040 (NOFREE)
# First Line: 1
# Constants:
# 0: 0
# 1: None
# 2: ('datetime',)
# 3: ('*',)
# 4: 'Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/79.0.3945.93 Mobile Safari/537.36'
# 5: 'application/x-www-form-urlencoded'
# 6: '/'
# 7: 'com.reland.relandicebot'
# 8: 'cross-site'
# 9: 'cors'
# 10: 'gzip, deflate'
# 11: 'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7'
# 12: 'lang=id'
# 13: ('user-agent', 'content-type', 'accept', 'x-requested-with', 'sec-fetch-site', 'sec-fetch-mode', 'accept-encoding', 'accept-language', 'cookie')
# 14: 'https://www.999doge.com/api/web.aspx'
# 15: '\x1b[0;34m'
# 16: '\x1b[0m'
# 17: '\x1b[1;33m'
# 18: '\x1b[1;32m'
# 19: '\x1b[1;31m'
# 20: '\x1b[0;36m'
# 21: '\x1b[1;35m'
# 22: '\x1b[5;37;42m'
# 23: '\x1b[5;37;41m'
# 24: '{:.8f}'
# 25: '{:0>1.0f}'
# 26: 'Username'
# 27: 'Password'
# 28: 'BaseTrade'
# 29: 'C1'
# 30: 'C2'
# 31: 'TradeCount_1'
# 32: 'TradeCount_2'
# 33: 200
# 34: 'Number Of Trade Out of Limit'
# 35: 'MultiplyOnWin'
# 36: '0'
# 37: 1
# 38: 'MultiplyOnLose'
# 39: 'MaxBaseTrade'
# 40: 'Toogle'
# 41: 'ON'
# 42: 'Max'
# 43: 100000000
# 44: 'ResetOnLoseMaxTrade'
# 45: 'StopOnLoseMaxTrade'
# 46: 'OFF'
# 47: 'ForceTC1AfterLose'
# 48: 'ChangeTCAfterLose'
# 49: 'TargetProfit'
# 50: 'ClientSeed'
# 51: 'RecoveryMultiplier'
# 52: 'RecoveryIncrease'
# 53: 'AddDelayTrade'
# 54: 'AddDelayTradeWin'
# 55: 'AddDelayTradeLose'
# 56: 'StopLoseBalance'
# 57: 'ContinueLastBase'
# 58: 'SmartRecovery'
# 59: -999999999999999999999999999
# 60: -100000000
# 61: 'doge'
# 62: 'Offline'
# 63: <code object withdraw at 0xa8bab020, file "<EzzKun>", line 86>
# 64: 'withdraw'
# 65: <code object harga_license at 0xa8a88f28, file "<EzzKun>", line 107>
# 66: 'harga_license'
# 67: <code object post at 0xa8a03020, file "<EzzKun>", line 121>
# 68: 'post'
# 69: <code object login at 0xa89fff90, file "<EzzKun>", line 125>
# 70: 'login'
# 71: <code object autobet at 0xa89fff28, file "<EzzKun>", line 171>
# 72: 'autobet'
# 73: <code object ainfo at 0xa89ffec0, file "<EzzKun>", line 347>
# 74: 'ainfo'
# 75: <code object verify at 0xa89ffe58, file "<EzzKun>", line 373>
# 76: 'verify'
# 77: <code object register at 0xa89ffc50, file "<EzzKun>", line 382>
# 78: 'register'
# 79: <code object check_license at 0xa89ff9e0, file "<EzzKun>", line 434>
# 80: 'check_license'
# 81: <code object check_license_platinum at 0xa89ff8a8, file "<EzzKun>", line 482>
# 82: 'check_license_platinum'
# 83: <code object gblnc at 0xa89ff840, file "<EzzKun>", line 522>
# 84: 'gblnc'
# 85: 'clear'
# 86: 'https://layscape.xyz/selenia/info.php'
# 87: 'Server Status Code ['
# 88: ']'
# 89: 'Alive'
# 90: 'versi'
# 91: 'Server Down Try Again or Check Latest Version Script'
# 92: 'ERROR CONNECTION TRY AGAIN'
# 93: 'a=GetBalance&s='
# 94: '&Currency=doge'
# 95: 'Balance'
# 96: '\x1b[1;31m====================================================\x1b[0m'
# 97: '\x1b[1;32m[+]\x1b[0m \x1b[0;36mDO WITH YOUR OWN RISK \x1b[0m \x1b[1;32m[+]\x1b[0m'
# 98: '\x1b[1;32m[+]\x1b[0m \x1b[1;33mCreator : Layscape\x1b[0m \x1b[1;32m[+]\x1b[0m'
# 99: '\x1b[1;32m[+]\x1b[0m \x1b[1;33mVersi Script V3.0\x1b[0m \x1b[1;32m[+]\x1b[0m'
# 100: '\x1b[1;32m[+]\x1b[0m \x1b[1;33mJoin Group Whatsapp For News and Update\x1b[0m \x1b[1;32m[+]\x1b[0m'
# 101: "Disclaimer : \nScript Not Working Don't Blame Creator :). \nRead/Watch How to Use As Well"
# 102: 'Info :'
# 103: 'notice5'
# 104: 'Information Script :'
# 105: 'Versi :'
# 106: 'Creator :'
# 107: 'created'
# 108: 'Youtube :'
# 109: 'youtube'
# 110: 'Script :'
# 111: 'script'
# 112: '3.0'
# 113: 'New Version'
# 114: 'New Version '
# 115: ' Release'
# 116: 'Please Update'
# 117: 'Type This Command:\n- git stash\n- git pull'
# 118: 'Notice :\n'
# 119: 'notice1'
# 120: 'notice2'
# 121: 'notice3'
# 122: 'notice4'
# 123: '- Attention to Your Connection'
# 124: 'Buy License Here : \nhttps://layscape.xyz/selenia/license'
# 125: ''
# 126: 'Online'
# 127: 'Re-Login for Refresh'
# 128: 'License Out of Date'
# 129: 'Buy New One'
# 130: 'Informasi Status Login :'
# 131: 'Account ID :'
# 132: 'Username :'
# 133: 'Doge Balance :'
# 134: 'Doge Deposit Wallet :'
# 135: 'License Type : '
# 136: 'Free License'
# 137: 'Expired Date : None'
# 138: 'SG Server Status :'
# 139: 'Max Balance : 150 DOGE'
# 140: 'Premium License'
# 141: 'Platinum License'
# 142: 'Date :'
# 143: '%Y-%m-%d'
# 144: 'Expired Date :'
# 145: 'Expired In :'
# 146: 'Days'
# 147: 'Max Balance : Unlimited'
# 148: 'Currency Available : DOGE'
# 149: 'Information Status Login :'
# 150: '\nPilih Menu :'
# 151: '1. Login Premium License'
# 152: '2. Login For Free'
# 153: '3. Login Platinum License'
# 154: '4. Register Account SELENIA'
# 155: '5. Price List License'
# 156: '0. Keluar'
# 157: '6. Start Trade'
# 158: '7. Withdraw'
# 159: '8. Account Information'
# 160: '==>'
# 161: '1'
# 162: '2'
# 163: '3'
# 164: '4'
# 165: '6'
# 166: '5'
# 167: '7'
# 168: '8'
# 169: 'NO MENU SELECTED'
# 170: (0,)
# Names:
# 0: cloudscraper
# 1: sys
# 2: os
# 3: time
# 4: random
# 5: requests
# 6: datetime
# 7: config
# 8: headers
# 9: create_scraper
# 10: scr
# 11: url
# 12: birutua
# 13: putih
# 14: kuning
# 15: hijau
# 16: merah
# 17: biru
# 18: ungu
# 19: bghijau_white
# 20: bgmerah_black
# 21: format
# 22: num_format
# 23: num_PayIn
# 24: account
# 25: Username
# 26: Password
# 27: float
# 28: tradeset
# 29: BaseTrade
# 30: C1
# 31: C2
# 32: int
# 33: TC1
# 34: TC2
# 35: print
# 36: exit
# 37: str
# 38: IncreaseOnWinPercent
# 39: ResetOnWin
# 40: IncreaseOnLosePercent
# 41: ResetOnLose
# 42: MaxBase
# 43: MaxBaseTrade
# 44: ResetOnLoseMaxTrade
# 45: StopOnLoseMaxTrade
# 46: tools
# 47: ForceTC1AfterLose
# 48: ChangeTCAfterLose
# 49: TargetProfit
# 50: ClientSeed
# 51: RecoveryMultiplier
# 52: RecoveryIncrease
# 53: AddDelayTrade
# 54: AddDelayTradeWin
# 55: AddDelayTradeLose
# 56: StopLoseBalance
# 57: ContinueLastBase
# 58: SmartRecovery
# 59: Currency
# 60: statslogin
# 61: limit
# 62: withdraw
# 63: harga_license
# 64: post
# 65: login
# 66: autobet
# 67: ainfo
# 68: verify
# 69: register
# 70: check_license
# 71: check_license_platinum
# 72: gblnc
# 73: system
# 74: get
# 75: srv
# 76: status_code
# 77: status
# 78: json
# 79: info
# 80: version
# 81: Exception
# 82: e
# 83: ses
# 84: getbalance
# 85: req
# 86: dogebalance
# 87: sleep
# 88: Expired
# 89: accid
# 90: dogewallet
# 91: logintype
# 92: statssrv
# 93: now
# 94: mydatetime
# 95: strftime
# 96: userdate
# 97: input
# 98: smenu
1 0 LOAD_CONST 0 (0)
2 LOAD_CONST 1 (None)
4 IMPORT_NAME 0 (cloudscraper)
6 STORE_NAME 0 (cloudscraper)
8 LOAD_CONST 0 (0)
10 LOAD_CONST 1 (None)
12 IMPORT_NAME 1 (sys)
14 STORE_NAME 1 (sys)
16 LOAD_CONST 0 (0)
18 LOAD_CONST 1 (None)
20 IMPORT_NAME 2 (os)
22 STORE_NAME 2 (os)
24 LOAD_CONST 0 (0)
26 LOAD_CONST 1 (None)
28 IMPORT_NAME 3 (time)
30 STORE_NAME 3 (time)
32 LOAD_CONST 0 (0)
34 LOAD_CONST 1 (None)
36 IMPORT_NAME 4 (random)
38 STORE_NAME 4 (random)
40 LOAD_CONST 0 (0)
42 LOAD_CONST 1 (None)
44 IMPORT_NAME 5 (requests)
46 STORE_NAME 5 (requests)
2 48 LOAD_CONST 0 (0)
50 LOAD_CONST 2 (('datetime',))
52 IMPORT_NAME 6 (datetime)
54 IMPORT_FROM 6 (datetime)
56 STORE_NAME 6 (datetime)
58 POP_TOP
3 60 LOAD_CONST 0 (0)
62 LOAD_CONST 3 (('*',))
64 IMPORT_NAME 7 (config)
66 IMPORT_STAR
5 68 LOAD_CONST 4 ('Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/79.0.3945.93 Mobile Safari/537.36')
6 70 LOAD_CONST 5 ('application/x-www-form-urlencoded')
7 72 LOAD_CONST 6 ('/')
8 74 LOAD_CONST 7 ('com.reland.relandicebot')
9 76 LOAD_CONST 8 ('cross-site')
10 78 LOAD_CONST 9 ('cors')
11 80 LOAD_CONST 10 ('gzip, deflate')
12 82 LOAD_CONST 11 ('id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7')
13 84 LOAD_CONST 12 ('lang=id')
4 86 LOAD_CONST 13 (('user-agent', 'content-type', 'accept', 'x-requested-with', 'sec-fetch-site', 'sec-fetch-mode', 'accept-encoding', 'accept-language', 'cookie'))
88 BUILD_CONST_KEY_MAP 9
90 STORE_NAME 8 (headers)
15 92 LOAD_NAME 0 (cloudscraper)
94 LOAD_METHOD 9 (create_scraper)
96 CALL_METHOD 0
98 STORE_NAME 10 (scr)
16 100 LOAD_CONST 14 ('https://www.999doge.com/api/web.aspx')
102 STORE_NAME 11 (url)
17 104 LOAD_CONST 15 ('\x1b[0;34m')
106 STORE_NAME 12 (birutua)
18 108 LOAD_CONST 16 ('\x1b[0m')
110 STORE_NAME 13 (putih)
19 112 LOAD_CONST 17 ('\x1b[1;33m')
114 STORE_NAME 14 (kuning)
20 116 LOAD_CONST 18 ('\x1b[1;32m')
118 STORE_NAME 15 (hijau)
21 120 LOAD_CONST 19 ('\x1b[1;31m')
122 STORE_NAME 16 (merah)
22 124 LOAD_CONST 20 ('\x1b[0;36m')
126 STORE_NAME 17 (biru)
23 128 LOAD_CONST 21 ('\x1b[1;35m')
130 STORE_NAME 18 (ungu)
24 132 LOAD_CONST 22 ('\x1b[5;37;42m')
134 STORE_NAME 19 (bghijau_white)
25 136 LOAD_CONST 23 ('\x1b[5;37;41m')
138 STORE_NAME 20 (bgmerah_black)
26 140 LOAD_CONST 24 ('{:.8f}')
142 LOAD_ATTR 21 (format)
144 STORE_NAME 22 (num_format)
27 146 LOAD_CONST 25 ('{:0>1.0f}')
148 LOAD_ATTR 21 (format)
150 STORE_NAME 23 (num_PayIn)
28 152 LOAD_NAME 24 (account)
154 LOAD_CONST 26 ('Username')
156 BINARY_SUBSCR
158 STORE_NAME 25 (Username)
29 160 LOAD_NAME 24 (account)
162 LOAD_CONST 27 ('Password')
164 BINARY_SUBSCR
166 STORE_NAME 26 (Password)
31 168 LOAD_NAME 27 (float)
170 LOAD_NAME 28 (tradeset)
172 LOAD_CONST 28 ('BaseTrade')
174 BINARY_SUBSCR
176 CALL_FUNCTION 1
178 STORE_GLOBAL 29 (BaseTrade)
32 180 LOAD_NAME 27 (float)
182 LOAD_NAME 28 (tradeset)
184 LOAD_CONST 29 ('C1')
186 BINARY_SUBSCR
188 CALL_FUNCTION 1
190 STORE_NAME 30 (C1)
33 192 LOAD_NAME 27 (float)
194 LOAD_NAME 28 (tradeset)
196 LOAD_CONST 30 ('C2')
198 BINARY_SUBSCR
200 CALL_FUNCTION 1
202 STORE_NAME 31 (C2)
34 204 LOAD_NAME 32 (int)
206 LOAD_NAME 28 (tradeset)
208 LOAD_CONST 31 ('TradeCount_1')
210 BINARY_SUBSCR
212 CALL_FUNCTION 1
214 STORE_NAME 33 (TC1)
35 216 LOAD_NAME 32 (int)
218 LOAD_NAME 28 (tradeset)
220 LOAD_CONST 32 ('TradeCount_2')
222 BINARY_SUBSCR
224 CALL_FUNCTION 1
226 STORE_NAME 34 (TC2)
36 228 LOAD_NAME 33 (TC1)
230 LOAD_CONST 33 (200)
232 COMPARE_OP 4 (>)
234 POP_JUMP_IF_TRUE 246
236 LOAD_NAME 34 (TC2)
238 LOAD_CONST 33 (200)
240 COMPARE_OP 4 (>)
242 EXTENDED_ARG 1
244 POP_JUMP_IF_FALSE 262
37 >> 246 LOAD_NAME 35 (print)
248 LOAD_CONST 34 ('Number Of Trade Out of Limit')
250 CALL_FUNCTION 1
252 POP_TOP
38 254 LOAD_NAME 1 (sys)
256 LOAD_METHOD 36 (exit)
258 CALL_METHOD 0
260 POP_TOP
39 >> 262 LOAD_NAME 37 (str)
264 LOAD_NAME 28 (tradeset)
266 LOAD_CONST 35 ('MultiplyOnWin')
268 BINARY_SUBSCR
270 CALL_FUNCTION 1
272 STORE_NAME 38 (IncreaseOnWinPercent)
40 274 LOAD_NAME 38 (IncreaseOnWinPercent)
276 LOAD_CONST 36 ('0')
278 COMPARE_OP 2 (==)
280 EXTENDED_ARG 1
282 POP_JUMP_IF_FALSE 290
41 284 LOAD_CONST 37 (1)
286 STORE_NAME 39 (ResetOnWin)
288 JUMP_FORWARD 4 (to 294)
43 >> 290 LOAD_CONST 0 (0)
292 STORE_NAME 39 (ResetOnWin)
44 >> 294 LOAD_NAME 37 (str)
296 LOAD_NAME 28 (tradeset)
298 LOAD_CONST 38 ('MultiplyOnLose')
300 BINARY_SUBSCR
302 CALL_FUNCTION 1
304 STORE_NAME 40 (IncreaseOnLosePercent)
45 306 LOAD_NAME 40 (IncreaseOnLosePercent)
308 LOAD_CONST 36 ('0')
310 COMPARE_OP 2 (==)
312 EXTENDED_ARG 1
314 POP_JUMP_IF_FALSE 322
46 316 LOAD_CONST 37 (1)
318 STORE_NAME 41 (ResetOnLose)
320 JUMP_FORWARD 4 (to 326)
48 >> 322 LOAD_CONST 0 (0)
324 STORE_NAME 41 (ResetOnLose)
49 >> 326 LOAD_NAME 28 (tradeset)
328 LOAD_CONST 39 ('MaxBaseTrade')
330 BINARY_SUBSCR
332 LOAD_CONST 40 ('Toogle')
334 BINARY_SUBSCR
336 STORE_NAME 42 (MaxBase)
50 338 LOAD_NAME 42 (MaxBase)
340 LOAD_CONST 41 ('ON')
342 COMPARE_OP 2 (==)
344 EXTENDED_ARG 1
346 POP_JUMP_IF_FALSE 426
51 348 LOAD_NAME 27 (float)
350 LOAD_NAME 28 (tradeset)
352 LOAD_CONST 39 ('MaxBaseTrade')
354 BINARY_SUBSCR
356 LOAD_CONST 42 ('Max')
358 BINARY_SUBSCR
360 CALL_FUNCTION 1
362 LOAD_CONST 43 (100000000)
364 BINARY_MULTIPLY
366 STORE_GLOBAL 43 (MaxBaseTrade)
52 368 LOAD_NAME 28 (tradeset)
370 LOAD_CONST 39 ('MaxBaseTrade')
372 BINARY_SUBSCR
374 LOAD_CONST 44 ('ResetOnLoseMaxTrade')
376 BINARY_SUBSCR
378 LOAD_CONST 41 ('ON')
380 COMPARE_OP 2 (==)
382 EXTENDED_ARG 1
384 POP_JUMP_IF_FALSE 392
53 386 LOAD_CONST 37 (1)
388 STORE_NAME 44 (ResetOnLoseMaxTrade)
390 JUMP_FORWARD 4 (to 396)
55 >> 392 LOAD_CONST 0 (0)
394 STORE_NAME 44 (ResetOnLoseMaxTrade)
56 >> 396 LOAD_NAME 28 (tradeset)
398 LOAD_CONST 39 ('MaxBaseTrade')
400 BINARY_SUBSCR
402 LOAD_CONST 45 ('StopOnLoseMaxTrade')
404 BINARY_SUBSCR
406 LOAD_CONST 41 ('ON')
408 COMPARE_OP 2 (==)
410 EXTENDED_ARG 1
412 POP_JUMP_IF_FALSE 420
57 414 LOAD_CONST 37 (1)
416 STORE_NAME 45 (StopOnLoseMaxTrade)
418 JUMP_FORWARD 4 (to 424)
59 >> 420 LOAD_CONST 0 (0)
422 STORE_NAME 45 (StopOnLoseMaxTrade)
>> 424 JUMP_FORWARD 22 (to 448)
60 >> 426 LOAD_NAME 42 (MaxBase)
428 LOAD_CONST 46 ('OFF')
430 COMPARE_OP 2 (==)
432 EXTENDED_ARG 1
434 POP_JUMP_IF_FALSE 448
61 436 LOAD_CONST 0 (0)
438 STORE_GLOBAL 43 (MaxBaseTrade)
62 440 LOAD_CONST 0 (0)
442 STORE_NAME 44 (ResetOnLoseMaxTrade)
63 444 LOAD_CONST 0 (0)
446 STORE_NAME 45 (StopOnLoseMaxTrade)
66 >> 448 LOAD_NAME 46 (tools)
450 LOAD_CONST 47 ('ForceTC1AfterLose')
452 BINARY_SUBSCR
454 STORE_NAME 47 (ForceTC1AfterLose)
67 456 LOAD_NAME 46 (tools)
458 LOAD_CONST 48 ('ChangeTCAfterLose')
460 BINARY_SUBSCR
462 LOAD_CONST 40 ('Toogle')
464 BINARY_SUBSCR
466 STORE_NAME 48 (ChangeTCAfterLose)
68 468 LOAD_NAME 27 (float)
470 LOAD_NAME 46 (tools)
472 LOAD_CONST 49 ('TargetProfit')
474 BINARY_SUBSCR
476 CALL_FUNCTION 1
478 STORE_NAME 49 (TargetProfit)
69 480 LOAD_NAME 32 (int)
482 LOAD_NAME 28 (tradeset)
484 LOAD_CONST 50 ('ClientSeed')
486 BINARY_SUBSCR
488 CALL_FUNCTION 1
490 STORE_NAME 50 (ClientSeed)
70 492 LOAD_NAME 27 (float)
494 LOAD_NAME 46 (tools)
496 LOAD_CONST 51 ('RecoveryMultiplier')
498 BINARY_SUBSCR
500 CALL_FUNCTION 1
502 STORE_NAME 51 (RecoveryMultiplier)
71 504 LOAD_NAME 27 (float)
506 LOAD_NAME 46 (tools)
508 LOAD_CONST 52 ('RecoveryIncrease')
510 BINARY_SUBSCR
512 CALL_FUNCTION 1
514 STORE_NAME 52 (RecoveryIncrease)
72 516 LOAD_NAME 27 (float)
518 LOAD_NAME 46 (tools)
520 LOAD_CONST 53 ('AddDelayTrade')
522 BINARY_SUBSCR
524 CALL_FUNCTION 1
526 STORE_NAME 53 (AddDelayTrade)
73 528 LOAD_NAME 27 (float)
530 LOAD_NAME 46 (tools)
532 LOAD_CONST 54 ('AddDelayTradeWin')
534 BINARY_SUBSCR
536 CALL_FUNCTION 1
538 STORE_NAME 54 (AddDelayTradeWin)
74 540 LOAD_NAME 27 (float)
542 LOAD_NAME 46 (tools)
544 LOAD_CONST 55 ('AddDelayTradeLose')
546 BINARY_SUBSCR
548 CALL_FUNCTION 1
550 STORE_NAME 55 (AddDelayTradeLose)
75 552 LOAD_NAME 27 (float)
554 LOAD_NAME 46 (tools)
556 LOAD_CONST 56 ('StopLoseBalance')
558 BINARY_SUBSCR
560 CALL_FUNCTION 1
562 STORE_NAME 56 (StopLoseBalance)
76 564 LOAD_NAME 46 (tools)
566 LOAD_CONST 57 ('ContinueLastBase')
568 BINARY_SUBSCR
570 STORE_GLOBAL 57 (ContinueLastBase)
77 572 LOAD_NAME 46 (tools)
574 LOAD_CONST 58 ('SmartRecovery')
576 BINARY_SUBSCR
578 STORE_NAME 58 (SmartRecovery)
import cloudscraper, sys, os, time, random, requests
from datetime import datetime
from config import *
headers = {'user-agent':'Mozilla/5.0 (Linux; Android 7.0; Redmi Note 4 Build/NRD90M; wv) AppleWebKit/537.36 (KHTML, like Gecko) Version/4.0 Chrome/79.0.3945.93 Mobile Safari/537.36',
'content-type':'application/x-www-form-urlencoded',
'accept':'/',
'x-requested-with':'com.reland.relandicebot',
'sec-fetch-site':'cross-site',
'sec-fetch-mode':'cors',
'accept-encoding':'gzip, deflate',
'accept-language':'id-ID,id;q=0.9,en-US;q=0.8,en;q=0.7',
'cookie':'lang=id'}
scr = cloudscraper.create_scraper()
url = 'https://www.999doge.com/api/web.aspx'
birutua = '\x1b[0;34m'
putih = '\x1b[0m'
kuning = '\x1b[1;33m'
hijau = '\x1b[1;32m'
merah = '\x1b[1;31m'
biru = '\x1b[0;36m'
ungu = '\x1b[1;35m'
bghijau_white = '\x1b[5;37;42m'
bgmerah_black = '\x1b[5;37;41m'
num_format = '{:.8f}'.format
num_PayIn = '{:0>1.0f}'.format
Username = account['Username']
Password = account['Password']
BaseTrade = float(tradeset['BaseTrade'])
C1 = float(tradeset['C1'])
C2 = float(tradeset['C2'])
TC1 = int(tradeset['TradeCount_1'])
TC2 = int(tradeset['TradeCount_2'])
if TC1 > 200 or (TC2 > 200):
print('Number Of Trade Out of Limit')
sys.exit()
IncreaseOnWinPercent = str(tradeset['MultiplyOnWin'])
if IncreaseOnWinPercent == '0':
ResetOnWin = 1
else:
ResetOnWin = 0
IncreaseOnLosePercent = str(tradeset['MultiplyOnLose'])
if IncreaseOnLosePercent == '0':
ResetOnLose = 1
else:
ResetOnLose = 0
MaxBase = tradeset['MaxBaseTrade']['Toogle']
if MaxBase == 'ON':
MaxBaseTrade = float(tradeset['MaxBaseTrade']['Max']) * 100000000
if tradeset['MaxBaseTrade']['ResetOnLoseMaxTrade'] == 'ON':
ResetOnLoseMaxTrade = 1
else:
ResetOnLoseMaxTrade = 0
if tradeset['MaxBaseTrade']['StopOnLoseMaxTrade'] == 'ON':
StopOnLoseMaxTrade = 1
else:
StopOnLoseMaxTrade = 0
elif MaxBase == 'OFF':
MaxBaseTrade = 0
ResetOnLoseMaxTrade = 0
StopOnLoseMaxTrade = 0
ForceTC1AfterLose = tools['ForceTC1AfterLose']
ChangeTCAfterLose = tools['ChangeTCAfterLose']['Toogle']
TargetProfit = float(tools['TargetProfit'])
ClientSeed = int(tradeset['ClientSeed'])
RecoveryMultiplier = float(tools['RecoveryMultiplier'])
RecoveryIncrease = float(tools['RecoveryIncrease'])
AddDelayTrade = float(tools['AddDelayTrade'])
AddDelayTradeWin = float(tools['AddDelayTradeWin'])
AddDelayTradeLose = float(tools['AddDelayTradeLose'])
StopLoseBalance = float(tools['StopLoseBalance'])
ContinueLastBase = tools['ContinueLastBase']
SmartRecovery = tools['SmartRecovery']
78 580 LOAD_NAME 56 (StopLoseBalance)
582 LOAD_CONST 36 ('0')
584 COMPARE_OP 2 (==)
586 EXTENDED_ARG 2
588 POP_JUMP_IF_TRUE 600
590 LOAD_NAME 56 (StopLoseBalance)
592 LOAD_CONST 0 (0)
594 COMPARE_OP 2 (==)
596 EXTENDED_ARG 2
598 POP_JUMP_IF_FALSE 606
79 >> 600 LOAD_CONST 59 (-999999999999999999999999999)
602 STORE_NAME 56 (StopLoseBalance)
604 JUMP_FORWARD 28 (to 634)
80 >> 606 LOAD_NAME 56 (StopLoseBalance)
608 LOAD_CONST 36 ('0')
610 COMPARE_OP 3 (!=)
612 EXTENDED_ARG 2
614 POP_JUMP_IF_TRUE 626
616 LOAD_NAME 56 (StopLoseBalance)
618 LOAD_CONST 0 (0)
620 COMPARE_OP 3 (!=)
622 EXTENDED_ARG 2
624 POP_JUMP_IF_FALSE 634
81 >> 626 LOAD_NAME 56 (StopLoseBalance)
628 LOAD_CONST 60 (-100000000)
630 BINARY_MULTIPLY
632 STORE_NAME 56 (StopLoseBalance)
82 >> 634 LOAD_CONST 61 ('doge')
636 STORE_NAME 59 (Currency)
84 638 LOAD_CONST 62 ('Offline')
640 STORE_GLOBAL 60 (statslogin)
85 642 LOAD_CONST 0 (0)
644 STORE_NAME 61 (limit)
# --- skip function ---
86 646 LOAD_CONST 63 (<code object withdraw at 0xa8bab020, file "<EzzKun>", line 86>)
648 LOAD_CONST 64 ('withdraw')
650 MAKE_FUNCTION 0
652 STORE_NAME 62 (withdraw)
107 654 LOAD_CONST 65 (<code object harga_license at 0xa8a88f28, file "<EzzKun>", line 107>)
656 LOAD_CONST 66 ('harga_license')
658 MAKE_FUNCTION 0
660 STORE_NAME 63 (harga_license)
121 662 LOAD_CONST 67 (<code object post at 0xa8a03020, file "<EzzKun>", line 121>)
664 LOAD_CONST 68 ('post')
666 MAKE_FUNCTION 0
668 STORE_NAME 64 (post)
125 670 LOAD_CONST 69 (<code object login at 0xa89fff90, file "<EzzKun>", line 125>)
672 LOAD_CONST 70 ('login')
674 MAKE_FUNCTION 0
676 STORE_NAME 65 (login)
171 678 LOAD_CONST 170 ((0,))
680 LOAD_CONST 71 (<code object autobet at 0xa89fff28, file "<EzzKun>", line 171>)
682 LOAD_CONST 72 ('autobet')
684 MAKE_FUNCTION 1 (defaults)
686 STORE_NAME 66 (autobet)
347 688 LOAD_CONST 73 (<code object ainfo at 0xa89ffec0, file "<EzzKun>", line 347>)
690 LOAD_CONST 74 ('ainfo')
692 MAKE_FUNCTION 0
694 STORE_NAME 67 (ainfo)
373 696 LOAD_CONST 75 (<code object verify at 0xa89ffe58, file "<EzzKun>", line 373>)
698 LOAD_CONST 76 ('verify')
700 MAKE_FUNCTION 0
702 STORE_NAME 68 (verify)
382 704 LOAD_CONST 77 (<code object register at 0xa89ffc50, file "<EzzKun>", line 382>)
706 LOAD_CONST 78 ('register')
708 MAKE_FUNCTION 0
710 STORE_NAME 69 (register)
434 712 LOAD_CONST 79 (<code object check_license at 0xa89ff9e0, file "<EzzKun>", line 434>)
714 LOAD_CONST 80 ('check_license')
716 MAKE_FUNCTION 0
718 STORE_NAME 70 (check_license)
482 720 LOAD_CONST 81 (<code object check_license_platinum at 0xa89ff8a8, file "<EzzKun>", line 482>)
722 LOAD_CONST 82 ('check_license_platinum')
724 MAKE_FUNCTION 0
726 STORE_NAME 71 (check_license_platinum)
522 728 LOAD_CONST 83 (<code object gblnc at 0xa89ff840, file "<EzzKun>", line 522>)
730 LOAD_CONST 84 ('gblnc')
732 MAKE_FUNCTION 0
734 STORE_NAME 72 (gblnc)
# --- all function i'am saved to file in dir ``object``
529 736 SETUP_FINALLY 86 (to 824)
530 738 LOAD_NAME 2 (os)
740 LOAD_METHOD 73 (system)
742 LOAD_CONST 85 ('clear')
744 CALL_METHOD 1
746 POP_TOP
531 748 LOAD_NAME 10 (scr)
750 LOAD_METHOD 74 (get)
752 LOAD_CONST 86 ('https://layscape.xyz/selenia/info.php')
754 CALL_METHOD 1
756 STORE_NAME 75 (srv)
532 758 LOAD_NAME 75 (srv)
760 LOAD_ATTR 76 (status_code)
762 STORE_NAME 77 (status)
533 764 LOAD_NAME 35 (print)
766 LOAD_CONST 87 ('Server Status Code [')
768 LOAD_NAME 77 (status)
770 LOAD_CONST 88 (']')
772 CALL_FUNCTION 3
774 POP_TOP
534 776 LOAD_NAME 77 (status)
778 LOAD_CONST 33 (200)
780 COMPARE_OP 2 (==)
782 EXTENDED_ARG 3
784 POP_JUMP_IF_FALSE 820
535 786 LOAD_NAME 35 (print)
788 LOAD_NAME 15 (hijau)
790 LOAD_CONST 89 ('Alive')
792 BINARY_ADD
794 LOAD_NAME 13 (putih)
796 BINARY_ADD
798 CALL_FUNCTION 1
800 POP_TOP
536 802 LOAD_NAME 75 (srv)
804 LOAD_METHOD 78 (json)
806 CALL_METHOD 0
808 STORE_NAME 79 (info)
537 810 LOAD_NAME 79 (info)
812 LOAD_CONST 90 ('versi')
814 BINARY_SUBSCR
816 STORE_NAME 80 (version)
818 JUMP_FORWARD 0 (to 820)
539 >> 820 POP_BLOCK
822 JUMP_FORWARD 86 (to 910)
540 >> 824 DUP_TOP
826 LOAD_NAME 81 (Exception)
828 COMPARE_OP 10 (exception match)
830 EXTENDED_ARG 3
832 POP_JUMP_IF_FALSE 908
834 POP_TOP
836 STORE_NAME 82 (e)
838 POP_TOP
840 SETUP_FINALLY 54 (to 896)
541 842 LOAD_NAME 35 (print)
844 LOAD_NAME 16 (merah)
846 LOAD_CONST 91 ('Server Down Try Again or Check Latest Version Script')
848 BINARY_ADD
850 LOAD_NAME 13 (putih)
852 BINARY_ADD
854 CALL_FUNCTION 1
856 POP_TOP
542 858 LOAD_NAME 35 (print)
860 LOAD_CONST 87 ('Server Status Code [')
862 LOAD_NAME 77 (status)
864 LOAD_CONST 88 (']')
866 CALL_FUNCTION 3
868 POP_TOP
543 870 LOAD_NAME 35 (print)
872 LOAD_NAME 16 (merah)
874 LOAD_CONST 92 ('ERROR CONNECTION TRY AGAIN')
876 LOAD_NAME 13 (putih)
878 BINARY_ADD
880 CALL_FUNCTION 2
882 POP_TOP
544 884 LOAD_NAME 1 (sys)
886 LOAD_METHOD 36 (exit)
888 CALL_METHOD 0
890 POP_TOP
892 POP_BLOCK
894 BEGIN_FINALLY
>> 896 LOAD_CONST 1 (None)
898 STORE_NAME 82 (e)
900 DELETE_NAME 82 (e)
902 END_FINALLY
904 POP_EXCEPT
906 JUMP_FORWARD 2 (to 910)
>> 908 END_FINALLY
if StopLoseBalance == '0' or StopLoseBalance == 0:
StopLoseBalance = -999999999999999999999999999
else:
if StopLoseBalance != '0' or (StopLoseBalance != 0):
StopLoseBalance = StopLoseBalance * -100000000
Currency = 'doge'
statslogin = 'Offline'
limit = 0
try:
os.system('clear')
srv = scr.get('https://layscape.xyz/selenia/info.php')
status = srv.status_code
print('Server Status Code [', status, ']')
if status == 200:
print(hijau + 'Alive' + putih)
info = srv.json()
version = info['versi']
else:
pass
except Exception as e:
try:
print(merah + 'Server Down Try Again or Check Latest Version Script' + putih)
print('Server Status Code [', status, ']')
print(merah, 'ERROR CONNECTION TRY AGAIN' + putih)
sys.exit()
finally:
e = None
del e
545 >> 910 LOAD_NAME 77 (status)
912 LOAD_CONST 33 (200)
914 COMPARE_OP 2 (==)
916 EXTENDED_ARG 8
918 POP_JUMP_IF_FALSE 2064
546 920 SETUP_FINALLY 36 (to 958)
547 922 LOAD_CONST 93 ('a=GetBalance&s=')
924 LOAD_GLOBAL 83 (ses)
926 BINARY_ADD
928 LOAD_CONST 94 ('&Currency=doge')
930 BINARY_ADD
932 STORE_NAME 84 (getbalance)
548 934 LOAD_NAME 64 (post)
936 LOAD_NAME 84 (getbalance)
938 CALL_FUNCTION 1
940 POP_TOP
549 942 LOAD_GLOBAL 85 (req)
944 LOAD_CONST 95 ('Balance')
946 BINARY_SUBSCR
948 LOAD_CONST 43 (100000000)
950 BINARY_TRUE_DIVIDE
952 STORE_GLOBAL 86 (dogebalance)
954 POP_BLOCK
956 JUMP_FORWARD 12 (to 970)
550 >> 958 POP_TOP
960 POP_TOP
962 POP_TOP
551 964 POP_EXCEPT
966 JUMP_FORWARD 2 (to 970)
968 END_FINALLY
552 >> 970 LOAD_NAME 3 (time)
972 LOAD_METHOD 87 (sleep)
974 LOAD_CONST 37 (1)
976 CALL_METHOD 1
978 POP_TOP
553 980 LOAD_NAME 2 (os)
982 LOAD_METHOD 73 (system)
984 LOAD_CONST 85 ('clear')
986 CALL_METHOD 1
988 POP_TOP
554 990 LOAD_NAME 35 (print)
992 LOAD_CONST 96 ('\x1b[1;31m====================================================\x1b[0m')
994 CALL_FUNCTION 1
996 POP_TOP
555 998 LOAD_NAME 35 (print)
1000 LOAD_CONST 97 ('\x1b[1;32m[+]\x1b[0m \x1b[0;36mDO WITH YOUR OWN RISK \x1b[0m \x1b[1;32m[+]\x1b[0m')
1002 CALL_FUNCTION 1
1004 POP_TOP
556 1006 LOAD_NAME 35 (print)
1008 LOAD_CONST 98 ('\x1b[1;32m[+]\x1b[0m \x1b[1;33mCreator : Layscape\x1b[0m \x1b[1;32m[+]\x1b[0m')
1010 CALL_FUNCTION 1
1012 POP_TOP
557 1014 LOAD_NAME 35 (print)
1016 LOAD_CONST 99 ('\x1b[1;32m[+]\x1b[0m \x1b[1;33mVersi Script V3.0\x1b[0m \x1b[1;32m[+]\x1b[0m')
1018 CALL_FUNCTION 1
1020 POP_TOP
558 1022 LOAD_NAME 35 (print)
1024 LOAD_CONST 100 ('\x1b[1;32m[+]\x1b[0m \x1b[1;33mJoin Group Whatsapp For News and Update\x1b[0m \x1b[1;32m[+]\x1b[0m')
1026 CALL_FUNCTION 1
1028 POP_TOP
559 1030 LOAD_NAME 35 (print)
1032 LOAD_CONST 96 ('\x1b[1;31m====================================================\x1b[0m')
1034 CALL_FUNCTION 1
1036 POP_TOP
560 1038 LOAD_NAME 35 (print)
1040 LOAD_CONST 101 ("Disclaimer : \nScript Not Working Don't Blame Creator :). \nRead/Watch How to Use As Well")
1042 CALL_FUNCTION 1
1044 POP_TOP
561 1046 LOAD_NAME 35 (print)
1048 LOAD_CONST 96 ('\x1b[1;31m====================================================\x1b[0m')
1050 CALL_FUNCTION 1
1052 POP_TOP
562 1054 LOAD_NAME 35 (print)
1056 LOAD_NAME 14 (kuning)
1058 LOAD_CONST 102 ('Info :')
1060 BINARY_ADD
1062 LOAD_NAME 79 (info)
1064 LOAD_CONST 103 ('notice5')
1066 BINARY_SUBSCR
1068 BINARY_ADD
1070 LOAD_NAME 13 (putih)
1072 BINARY_ADD
1074 CALL_FUNCTION 1
1076 POP_TOP
563 1078 LOAD_NAME 35 (print)
1080 LOAD_CONST 96 ('\x1b[1;31m====================================================\x1b[0m')
1082 CALL_FUNCTION 1
1084 POP_TOP
564 1086 LOAD_NAME 35 (print)
1088 LOAD_NAME 15 (hijau)
1090 LOAD_CONST 104 ('Information Script :')
1092 BINARY_ADD
1094 CALL_FUNCTION 1
1096 POP_TOP
565 1098 LOAD_NAME 35 (print)
1100 LOAD_CONST 105 ('Versi :')
1102 LOAD_NAME 79 (info)
1104 LOAD_CONST 90 ('versi')
1106 BINARY_SUBSCR
1108 CALL_FUNCTION 2
1110 POP_TOP
566 1112 LOAD_NAME 35 (print)
1114 LOAD_CONST 106 ('Creator :')
1116 LOAD_NAME 79 (info)
1118 LOAD_CONST 107 ('created')
1120 BINARY_SUBSCR
1122 CALL_FUNCTION 2
1124 POP_TOP
567 1126 LOAD_NAME 35 (print)
1128 LOAD_CONST 108 ('Youtube :')
1130 LOAD_NAME 79 (info)
1132 LOAD_CONST 109 ('youtube')
1134 BINARY_SUBSCR
1136 CALL_FUNCTION 2
1138 POP_TOP
568 1140 LOAD_NAME 35 (print)
1142 LOAD_CONST 110 ('Script :')
1144 LOAD_NAME 79 (info)
1146 LOAD_CONST 111 ('script')
1148 BINARY_SUBSCR
1150 LOAD_NAME 13 (putih)
1152 BINARY_ADD
1154 CALL_FUNCTION 2
1156 POP_TOP
569 1158 LOAD_CONST 112 ('3.0')
1160 LOAD_NAME 80 (version)
1162 COMPARE_OP 2 (==)
1164 EXTENDED_ARG 4
1166 POP_JUMP_IF_FALSE 1186 # this is if
570 1168 LOAD_NAME 35 (print)
1170 LOAD_NAME 15 (hijau)
1172 LOAD_CONST 113 ('New Version')
1174 BINARY_ADD
1176 LOAD_NAME 13 (putih)
1178 BINARY_ADD
1180 CALL_FUNCTION 1
1182 POP_TOP
1184 JUMP_FORWARD 74 (to 1260)
571 >> 1186 LOAD_NAME 80 (version)
1188 LOAD_CONST 112 ('3.0')
1190 COMPARE_OP 4 (>)
1192 EXTENDED_ARG 4
1194 POP_JUMP_IF_FALSE 1260 # this is elif
572 1196 LOAD_NAME 35 (print)
1198 LOAD_NAME 16 (merah)
1200 LOAD_CONST 114 ('New Version ')
1202 BINARY_ADD
1204 LOAD_NAME 80 (version)
1206 BINARY_ADD
1208 LOAD_CONST 115 (' Release')
1210 BINARY_ADD
1212 LOAD_NAME 13 (putih)
1214 BINARY_ADD
1216 CALL_FUNCTION 1
1218 POP_TOP
573 1220 LOAD_NAME 35 (print)
1222 LOAD_NAME 16 (merah)
1224 LOAD_CONST 116 ('Please Update')
1226 BINARY_ADD
1228 LOAD_NAME 13 (putih)
1230 BINARY_ADD
1232 CALL_FUNCTION 1
1234 POP_TOP
574 1236 LOAD_NAME 35 (print)
1238 LOAD_NAME 15 (hijau)
1240 LOAD_CONST 117 ('Type This Command:\n- git stash\n- git pull')
1242 BINARY_ADD
1244 LOAD_NAME 13 (putih)
1246 BINARY_ADD
1248 CALL_FUNCTION 1
1250 POP_TOP
575 1252 LOAD_NAME 1 (sys)
1254 LOAD_METHOD 36 (exit)
1256 CALL_METHOD 0
1258 POP_TOP
576 >> 1260 LOAD_NAME 35 (print)
1262 LOAD_NAME 14 (kuning)
1264 LOAD_CONST 118 ('Notice :\n')
1266 BINARY_ADD
1268 LOAD_NAME 79 (info)
1270 LOAD_CONST 119 ('notice1')
1272 BINARY_SUBSCR
1274 BINARY_ADD
1276 CALL_FUNCTION 1
1278 POP_TOP
577 1280 LOAD_NAME 35 (print)
1282 LOAD_NAME 79 (info)
1284 LOAD_CONST 120 ('notice2')
1286 BINARY_SUBSCR
1288 CALL_FUNCTION 1
1290 POP_TOP
578 1292 LOAD_NAME 35 (print)
1294 LOAD_NAME 79 (info)
1296 LOAD_CONST 121 ('notice3')
1298 BINARY_SUBSCR
1300 CALL_FUNCTION 1
1302 POP_TOP
579 1304 LOAD_NAME 35 (print)
1306 LOAD_NAME 79 (info)
1308 LOAD_CONST 122 ('notice4')
1310 BINARY_SUBSCR
1312 CALL_FUNCTION 1
1314 POP_TOP
580 1316 LOAD_NAME 35 (print)
1318 LOAD_CONST 123 ('- Attention to Your Connection')
1320 LOAD_NAME 13 (putih)
1322 BINARY_ADD
1324 CALL_FUNCTION 1
1326 POP_TOP
581 1328 LOAD_NAME 35 (print)
1330 LOAD_CONST 124 ('Buy License Here : \nhttps://layscape.xyz/selenia/license')
1332 CALL_FUNCTION 1
1334 POP_TOP
582 1336 LOAD_NAME 35 (print)
1338 LOAD_CONST 125 ('')
1340 CALL_FUNCTION 1
1342 POP_TOP
583 1344 LOAD_GLOBAL 60 (statslogin)
1346 LOAD_CONST 126 ('Online')
1348 COMPARE_OP 2 (==)
1350 EXTENDED_ARG 6
1352 POP_JUMP_IF_FALSE 1666
584 1354 LOAD_NAME 35 (print)
1356 LOAD_NAME 15 (hijau)
1358 LOAD_CONST 127 ('Re-Login for Refresh')
1360 BINARY_ADD
1362 LOAD_NAME 13 (putih)
1364 CALL_FUNCTION 2
1366 POP_TOP
if status == 200:
try:
getbalance = 'a=GetBalance&s=' + ses + '&Currency=doge'
post(getbalance)
dogebalance = req['Balance'] / 100000000
except:
pass
else:
time.sleep(1)
os.system('clear')
print('\x1b[1;31m====================================================\x1b[0m')
print('\x1b[1;32m[+]\x1b[0m \x1b[0;36mDO WITH YOUR OWN RISK \x1b[0m \x1b[1;32m[+]\x1b[0m')
print('\x1b[1;32m[+]\x1b[0m \x1b[1;33mCreator : Layscape\x1b[0m \x1b[1;32m[+]\x1b[0m')
print('\x1b[1;32m[+]\x1b[0m \x1b[1;33mVersi Script V3.0\x1b[0m \x1b[1;32m[+]\x1b[0m')
print('\x1b[1;32m[+]\x1b[0m \x1b[1;33mJoin Group Whatsapp For News and Update\x1b[0m \x1b[1;32m[+]\x1b[0m')
print('\x1b[1;31m====================================================\x1b[0m')
print("Disclaimer : \nScript Not Working Don't Blame Creator :). \nRead/Watch How to Use As Well")
print('\x1b[1;31m====================================================\x1b[0m')
print(kuning + 'Info :' + info['notice5'] + putih)
print('\x1b[1;31m====================================================\x1b[0m')
print(hijau + 'Information Script :')
print('Versi :', info['versi'])
print('Creator :', info['created'])
print('Youtube :', info['youtube'])
print('Script :', info['script'] + putih)
if '3.0' == version:
print(hijau + 'New Version' + putih)
elif version > '3.0': # -> float required not str
print(merah + 'New Version ' + version + ' Release' + putih)
print(merah + 'Please Update' + putih)
print(hijau + 'Type This Command:\n- git stash\n- git pull' + putih)
sys.exit()
print(kuning + 'Notice :\n' + info['notice1'])
print(info['notice2'])
print(info['notice3'])
print(info['notice4'])
print('- Attention to Your Connection' + putih)
print('Buy License Here : \nhttps://layscape.xyz/selenia/license')
print('')
if statslogin == 'Online':
print(hijau + 'Re-Login for Refresh', putih)
else:
pass
585 1368 SETUP_FINALLY 56 (to 1426)
586 1370 LOAD_GLOBAL 88 (Expired)
1372 LOAD_CONST 0 (0)
1374 COMPARE_OP 1 (<=)
1376 EXTENDED_ARG 5
1378 POP_JUMP_IF_FALSE 1422
587 1380 LOAD_NAME 35 (print)
1382 LOAD_NAME 16 (merah)
1384 LOAD_CONST 128 ('License Out of Date')
1386 BINARY_ADD
1388 LOAD_NAME 13 (putih)
1390 BINARY_ADD
1392 CALL_FUNCTION 1
1394 POP_TOP
588 1396 LOAD_NAME 35 (print)
1398 LOAD_NAME 14 (kuning)
1400 LOAD_CONST 129 ('Buy New One')
1402 BINARY_ADD
1404 LOAD_NAME 13 (putih)
1406 BINARY_ADD
1408 CALL_FUNCTION 1
1410 POP_TOP
589 1412 LOAD_NAME 1 (sys)
1414 LOAD_METHOD 36 (exit)
1416 CALL_METHOD 0
1418 POP_TOP
1420 JUMP_FORWARD 0 (to 1422)
591 >> 1422 POP_BLOCK
1424 JUMP_FORWARD 12 (to 1438)
592 >> 1426 POP_TOP
1428 POP_TOP
1430 POP_TOP
593 1432 POP_EXCEPT
1434 JUMP_FORWARD 2 (to 1438)
1436 END_FINALLY
594 >> 1438 LOAD_NAME 35 (print)
1440 LOAD_CONST 130 ('Informasi Status Login :')
1442 LOAD_NAME 15 (hijau)
1444 LOAD_GLOBAL 60 (statslogin)
1446 BINARY_ADD
1448 LOAD_NAME 13 (putih)
1450 BINARY_ADD
1452 CALL_FUNCTION 2
1454 POP_TOP
595 1456 LOAD_NAME 35 (print)
1458 LOAD_CONST 131 ('Account ID :')
1460 LOAD_GLOBAL 89 (accid)
1462 CALL_FUNCTION 2
1464 POP_TOP
596 1466 LOAD_NAME 35 (print)
1468 LOAD_CONST 132 ('Username :')
1470 LOAD_NAME 25 (Username)
1472 CALL_FUNCTION 2
1474 POP_TOP
597 1476 LOAD_NAME 35 (print)
1478 LOAD_CONST 133 ('Doge Balance :')
1480 LOAD_NAME 22 (num_format)
1482 LOAD_GLOBAL 86 (dogebalance)
1484 CALL_FUNCTION 1
1486 CALL_FUNCTION 2
1488 POP_TOP
598 1490 LOAD_NAME 35 (print)
1492 LOAD_CONST 134 ('Doge Deposit Wallet :')
1494 LOAD_GLOBAL 90 (dogewallet)
1496 CALL_FUNCTION 2
1498 POP_TOP
599 1500 LOAD_NAME 35 (print)
1502 LOAD_CONST 135 ('License Type : ')
1504 LOAD_NAME 91 (logintype)
1506 CALL_FUNCTION 2
1508 POP_TOP
600 1510 LOAD_NAME 91 (logintype)
1512 LOAD_CONST 136 ('Free License')
1514 COMPARE_OP 2 (==)
1516 EXTENDED_ARG 6
1518 POP_JUMP_IF_FALSE 1560
601 1520 LOAD_CONST 62 ('Offline')
1522 STORE_GLOBAL 92 (statssrv)
602 1524 LOAD_NAME 35 (print)
1526 LOAD_CONST 137 ('Expired Date : None')
1528 CALL_FUNCTION 1
1530 POP_TOP
603 1532 LOAD_NAME 35 (print)
1534 LOAD_CONST 138 ('SG Server Status :')
1536 LOAD_NAME 16 (merah)
1538 LOAD_GLOBAL 92 (statssrv)
1540 BINARY_ADD
1542 LOAD_NAME 13 (putih)
1544 BINARY_ADD
1546 CALL_FUNCTION 2
1548 POP_TOP
604 1550 LOAD_NAME 35 (print)
1552 LOAD_CONST 139 ('Max Balance : 150 DOGE')
1554 CALL_FUNCTION 1
1556 POP_TOP
1558 JUMP_FORWARD 96 (to 1656)
605 >> 1560 LOAD_NAME 91 (logintype)
1562 LOAD_CONST 140 ('Premium License')
1564 COMPARE_OP 2 (==)
1566 EXTENDED_ARG 6
1568 POP_JUMP_IF_TRUE 1580
1570 LOAD_NAME 91 (logintype)
1572 LOAD_CONST 141 ('Platinum License')
1574 COMPARE_OP 2 (==)
1576 EXTENDED_ARG 6
1578 POP_JUMP_IF_FALSE 1656
606 >> 1580 LOAD_CONST 126 ('Online')
1582 STORE_GLOBAL 92 (statssrv)
607 1584 LOAD_NAME 6 (datetime)
1586 LOAD_METHOD 93 (now)
1588 CALL_METHOD 0
1590 STORE_NAME 94 (mydatetime)
608 1592 LOAD_NAME 35 (print)
1594 LOAD_CONST 138 ('SG Server Status :')
1596 LOAD_NAME 15 (hijau)
1598 LOAD_GLOBAL 92 (statssrv)
1600 BINARY_ADD
1602 LOAD_NAME 13 (putih)
1604 BINARY_ADD
1606 CALL_FUNCTION 2
1608 POP_TOP
609 1610 LOAD_NAME 35 (print)
1612 LOAD_CONST 142 ('Date :')
1614 LOAD_NAME 94 (mydatetime)
1616 LOAD_METHOD 95 (strftime)
1618 LOAD_CONST 143 ('%Y-%m-%d')
1620 CALL_METHOD 1
1622 CALL_FUNCTION 2
1624 POP_TOP
610 1626 LOAD_NAME 35 (print)
1628 LOAD_CONST 144 ('Expired Date :')
1630 LOAD_GLOBAL 96 (userdate)
1632 CALL_FUNCTION 2
1634 POP_TOP
611 1636 LOAD_NAME 35 (print)
1638 LOAD_CONST 145 ('Expired In :')
1640 LOAD_GLOBAL 88 (Expired)
1642 LOAD_CONST 146 ('Days')
1644 CALL_FUNCTION 3
1646 POP_TOP
612 1648 LOAD_NAME 35 (print)
1650 LOAD_CONST 147 ('Max Balance : Unlimited')
1652 CALL_FUNCTION 1
1654 POP_TOP
613 >> 1656 LOAD_NAME 35 (print)
1658 LOAD_CONST 148 ('Currency Available : DOGE')
1660 CALL_FUNCTION 1
1662 POP_TOP
1664 JUMP_FORWARD 10 (to 1676)
615 >> 1666 LOAD_NAME 35 (print)
1668 LOAD_CONST 149 ('Information Status Login :')
1670 LOAD_GLOBAL 60 (statslogin)
1672 CALL_FUNCTION 2
1674 POP_TOP
617 >> 1676 LOAD_NAME 35 (print)
1678 LOAD_NAME 15 (hijau)
1680 LOAD_CONST 150 ('\nPilih Menu :')
1682 BINARY_ADD
1684 CALL_FUNCTION 1
1686 POP_TOP
618 1688 LOAD_NAME 35 (print)
1690 LOAD_NAME 14 (kuning)
1692 LOAD_CONST 151 ('1. Login Premium License')
1694 BINARY_ADD
1696 CALL_FUNCTION 1
1698 POP_TOP
619 1700 LOAD_NAME 35 (print)
1702 LOAD_CONST 152 ('2. Login For Free')
1704 CALL_FUNCTION 1
1706 POP_TOP
620 1708 LOAD_NAME 35 (print)
1710 LOAD_CONST 153 ('3. Login Platinum License')
1712 CALL_FUNCTION 1
1714 POP_TOP
621 1716 LOAD_NAME 35 (print)
1718 LOAD_CONST 154 ('4. Register Account SELENIA')
1720 CALL_FUNCTION 1
1722 POP_TOP
622 1724 LOAD_NAME 35 (print)
1726 LOAD_CONST 155 ('5. Price List License')
1728 CALL_FUNCTION 1
1730 POP_TOP
623 1732 LOAD_NAME 35 (print)
1734 LOAD_CONST 156 ('0. Keluar')
1736 CALL_FUNCTION 1
1738 POP_TOP
624 1740 LOAD_GLOBAL 60 (statslogin)
1742 LOAD_CONST 126 ('Online')
1744 COMPARE_OP 2 (==)
1746 EXTENDED_ARG 6
1748 POP_JUMP_IF_FALSE 1774
625 1750 LOAD_NAME 35 (print)
1752 LOAD_CONST 157 ('6. Start Trade')
1754 CALL_FUNCTION 1
1756 POP_TOP
626 1758 LOAD_NAME 35 (print)
1760 LOAD_CONST 158 ('7. Withdraw')
1762 CALL_FUNCTION 1
1764 POP_TOP
627 1766 LOAD_NAME 35 (print)
1768 LOAD_CONST 159 ('8. Account Information')
1770 CALL_FUNCTION 1
1772 POP_TOP
629 >> 1774 LOAD_NAME 97 (input)
1776 LOAD_CONST 160 ('==>')
1778 CALL_FUNCTION 1
1780 STORE_NAME 98 (smenu)
630 1782 LOAD_NAME 98 (smenu)
1784 LOAD_CONST 161 ('1')
1786 COMPARE_OP 2 (==)
1788 EXTENDED_ARG 7
1790 POP_JUMP_IF_FALSE 1820
631 1792 LOAD_CONST 0 (0)
1794 STORE_NAME 61 (limit)
632 1796 LOAD_CONST 140 ('Premium License')
1798 STORE_NAME 91 (logintype)
633 1800 LOAD_NAME 65 (login)
1802 CALL_FUNCTION 0
1804 POP_TOP
634 1806 LOAD_NAME 68 (verify)
1808 CALL_FUNCTION 0
1810 POP_TOP
635 1812 LOAD_NAME 70 (check_license)
1814 CALL_FUNCTION 0
1816 POP_TOP
1818 JUMP_FORWARD 30 (to 1850)
636 >> 1820 LOAD_NAME 98 (smenu)
1822 LOAD_CONST 162 ('2')
1824 COMPARE_OP 2 (==)
1826 EXTENDED_ARG 7
1828 POP_JUMP_IF_FALSE 1850
637 1830 LOAD_CONST 136 ('Free License')
1832 STORE_NAME 91 (logintype)
638 1834 LOAD_NAME 65 (login)
1836 CALL_FUNCTION 0
1838 POP_TOP
639 1840 LOAD_GLOBAL 86 (dogebalance)
1842 STORE_NAME 61 (limit)
640 1844 LOAD_NAME 68 (verify)
1846 CALL_FUNCTION 0
1848 POP_TOP
641 >> 1850 LOAD_NAME 98 (smenu)
1852 LOAD_CONST 163 ('3')
1854 COMPARE_OP 2 (==)
1856 EXTENDED_ARG 7
1858 POP_JUMP_IF_FALSE 1888
642 1860 LOAD_CONST 0 (0)
1862 STORE_NAME 61 (limit)
643 1864 LOAD_CONST 141 ('Platinum License')
1866 STORE_NAME 91 (logintype)
644 1868 LOAD_NAME 65 (login)
1870 CALL_FUNCTION 0
1872 POP_TOP
645 1874 LOAD_NAME 68 (verify)
1876 CALL_FUNCTION 0
1878 POP_TOP
646 1880 LOAD_NAME 71 (check_license_platinum)
1882 CALL_FUNCTION 0
1884 POP_TOP
1886 JUMP_FORWARD 172 (to 2060)
647 >> 1888 LOAD_NAME 98 (smenu)
1890 LOAD_CONST 164 ('4')
1892 COMPARE_OP 2 (==)
1894 EXTENDED_ARG 7
1896 POP_JUMP_IF_FALSE 1906
648 1898 LOAD_NAME 69 (register)
1900 CALL_FUNCTION 0
1902 POP_TOP
1904 JUMP_FORWARD 154 (to 2060)
649 >> 1906 LOAD_NAME 98 (smenu)
1908 LOAD_CONST 165 ('6')
1910 COMPARE_OP 2 (==)
1912 EXTENDED_ARG 7
1914 POP_JUMP_IF_FALSE 1958
1916 LOAD_GLOBAL 60 (statslogin)
1918 LOAD_CONST 126 ('Online')
1920 COMPARE_OP 2 (==)
1922 EXTENDED_ARG 7
1924 POP_JUMP_IF_FALSE 1958
650 1926 LOAD_NAME 91 (logintype)
1928 LOAD_CONST 136 ('Free License')
1930 COMPARE_OP 2 (==)
1932 EXTENDED_ARG 7
1934 POP_JUMP_IF_FALSE 1948
651 1936 LOAD_NAME 72 (gblnc)
1938 CALL_FUNCTION 0
1940 POP_TOP
652 1942 LOAD_GLOBAL 86 (dogebalance)
1944 STORE_NAME 61 (limit)
1946 JUMP_FORWARD 0 (to 1948)
655 >> 1948 LOAD_NAME 66 (autobet)
1950 LOAD_NAME 61 (limit)
1952 CALL_FUNCTION 1
1954 POP_TOP
1956 JUMP_FORWARD 102 (to 2060)
656 >> 1958 LOAD_NAME 98 (smenu)
1960 LOAD_CONST 166 ('5')
1962 COMPARE_OP 2 (==)
1964 EXTENDED_ARG 7
1966 POP_JUMP_IF_FALSE 1976
657 1968 LOAD_NAME 63 (harga_license)
1970 CALL_FUNCTION 0
1972 POP_TOP
1974 JUMP_FORWARD 84 (to 2060)
658 >> 1976 LOAD_NAME 98 (smenu)
1978 LOAD_CONST 167 ('7')
1980 COMPARE_OP 2 (==)
1982 EXTENDED_ARG 7
1984 POP_JUMP_IF_FALSE 2004
1986 LOAD_GLOBAL 60 (statslogin)
1988 LOAD_CONST 126 ('Online')
1990 COMPARE_OP 2 (==)
1992 EXTENDED_ARG 7
1994 POP_JUMP_IF_FALSE 2004
659 1996 LOAD_NAME 62 (withdraw)
1998 CALL_FUNCTION 0
2000 POP_TOP
2002 JUMP_FORWARD 56 (to 2060)
660 >> 2004 LOAD_NAME 98 (smenu)
2006 LOAD_CONST 168 ('8')
2008 COMPARE_OP 2 (==)
2010 EXTENDED_ARG 7
2012 POP_JUMP_IF_FALSE 2032
2014 LOAD_GLOBAL 60 (statslogin)
2016 LOAD_CONST 126 ('Online')
2018 COMPARE_OP 2 (==)
2020 EXTENDED_ARG 7
2022 POP_JUMP_IF_FALSE 2032
661 2024 LOAD_NAME 67 (ainfo)
2026 CALL_FUNCTION 0
2028 POP_TOP
2030 JUMP_FORWARD 28 (to 2060)
662 >> 2032 LOAD_NAME 98 (smenu)
2034 LOAD_CONST 36 ('0')
2036 COMPARE_OP 2 (==)
2038 EXTENDED_ARG 8
2040 POP_JUMP_IF_FALSE 2052
663 2042 LOAD_NAME 1 (sys)
2044 LOAD_METHOD 36 (exit)
2046 CALL_METHOD 0
2048 POP_TOP
2050 JUMP_FORWARD 8 (to 2060)
665 >> 2052 LOAD_NAME 35 (print)
2054 LOAD_CONST 169 ('NO MENU SELECTED')
2056 CALL_FUNCTION 1
2058 POP_TOP
>> 2060 EXTENDED_ARG 3
2062 JUMP_ABSOLUTE 910
try:
if Expired <= 0:
print(merah + 'License Out of Date' + putih)
print(kuning + 'Buy New One' + putih)
sys.exit()
else:
pass
except:
pass
else:
print('Informasi Status Login :', hijau + statslogin + putih)
print('Account ID :', accid)
print('Username :', Username)
print('Doge Balance :', num_format(dogebalance))
print('Doge Deposit Wallet :', dogewallet)
print('License Type : ', logintype)
if logintype == 'Free License':
statssrv = 'Offline'
print('Expired Date : None')
print('SG Server Status :', merah + statssrv + putih)
print('Max Balance : 150 DOGE')
else:
if not logintype == 'Premium License':
if logintype == 'Platinum License':
pass
statssrv = 'Online'
mydatetime = datetime.now()
print('SG Server Status :', hijau + statssrv + putih)
print('Date :', mydatetime.strftime('%Y-%m-%d'))
print('Expired Date :', userdate)
print('Expired In :', Expired, 'Days')
print('Max Balance : Unlimited')
print('Currency Available : DOGE')
print('Information Status Login :', statslogin)
print(hijau + '\nPilih Menu :')
print(kuning + '1. Login Premium License')
print('2. Login For Free')
print('3. Login Platinum License')
print('4. Register Account SELENIA')
print('5. Price List License')
print('0. Keluar')
if statslogin == 'Online':
print('6. Start Trade')
print('7. Withdraw')
print('8. Account Information')
smenu = input('==>')
if smenu == '1':
limit = 0
logintype = 'Premium License'
login()
verify()
check_license()
if smenu == '2':
logintype = 'Free License'
login()
limit = dogebalance
verify()
if smenu == '3':
limit = 0
logintype = 'Platinum License'
login()
verify()
check_license_platinum()
if smenu == '4':
register()
if smenu == '6':
if statslogin == 'Online':
if logintype == 'Free License':
gblnc()
limit = dogebalance
else:
pass
autobet(limit)
if smenu == '5':
harga_license()
if smenu == '7':
if statslogin == 'Online':
withdraw()
if smenu == '8':
if statslogin == 'Online':
ainfo()
if smenu == '0':
sys.exit()
print('NO MENU SELECTED')
667 >> 2064 LOAD_CONST 1 (None)
2066 RETURN_VALUE
| [
"[email protected]"
] | |
20a59d30363f13db08a271bd7d4156a4795b5037 | 9fa71d5834dae1c8900b3444f564b11326374d36 | /packages/ipm_cloud_postgresql/folha/rotinas_envio/tipo-afastamento.py | 81f76c9ccfb467f9f87b432e8845eb17d8d9c18f | [] | no_license | JoaoPauloLeal/toolbox | a85e726cfeb74603cb64d73c4af64757a9a60db7 | 924c063ba81395aeddc039a51f8365c02e527963 | refs/heads/master | 2023-06-07T02:17:42.069985 | 2021-06-28T19:06:40 | 2021-06-28T19:06:40 | 381,128,045 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,411 | py | import packages.ipm_cloud_postgresql.model as model
import bth.interacao_cloud as interacao_cloud
import json
import logging
from datetime import datetime
tipo_registro = 'tipo-afastamento'
sistema = 300
limite_lote = 500
url = "https://pessoal.cloud.betha.com.br/service-layer/v1/api/tipo-afastamento"
def iniciar_processo_envio(params_exec, *args, **kwargs):
dados_assunto = coletar_dados(params_exec)
dados_enviar = pre_validar(params_exec, dados_assunto)
if not params_exec.get('somente_pre_validar'):
iniciar_envio(params_exec, dados_enviar, 'POST')
model.valida_lotes_enviados(params_exec, tipo_registro=tipo_registro)
def coletar_dados(params_exec):
print('- Iniciando a consulta dos dados a enviar.')
df = None
try:
query = model.get_consulta(params_exec, tipo_registro + '.sql')
pgcnn = model.PostgreSQLConnection()
df = pgcnn.exec_sql(query, index_col='id')
print(f'- Consulta finalizada. {len(df.index)} registro(s) encontrado(s).')
except Exception as error:
print(f'Erro ao executar função {tipo_registro}. {error}')
finally:
return df
def pre_validar(params_exec, dados):
print('- Iniciando pré-validação dos registros.')
dados_validados = []
registro_erros = []
try:
lista_dados = dados.to_dict('records')
for linha in lista_dados:
registro_valido = True
if registro_valido:
dados_validados.append(linha)
print(f'- Pré-validação finalizada. Registros validados com sucesso: '
f'{len(dados_validados)} | Registros com advertência: {len(registro_erros)}')
except Exception as error:
logging.error(f'Erro ao executar função "pre_validar". {error}')
finally:
return dados_validados
def iniciar_envio(params_exec, dados, metodo, *args, **kwargs):
print('- Iniciando envio dos dados.')
lista_dados_enviar = []
lista_controle_migracao = []
hoje = datetime.now().strftime("%Y-%m-%d")
token = params_exec['token']
contador = 0
for item in dados:
hash_chaves = model.gerar_hash_chaves(sistema, tipo_registro, item['id_entidade'], item['codigo'])
dict_dados = {
'idIntegracao': hash_chaves,
'conteudo': {
'descricao': None if 'descricao' not in item else item['descricao'],
'classificacao': None if 'classificacao' not in item else item['classificacao'],
'tipoMovimentacaoPessoal': None if 'tipomovimentacaopessoal' not in item else item['tipomovimentacaopessoal'],
'diasPrevistos': None if 'diasprevistos' not in item else item['diasprevistos'],
'perdeTempoServico': None if 'perdetemposervico' not in item else item['perdetemposervico'],
'consideraVencimento': None if 'consideravencimento' not in item else item['consideravencimento'],
'reduz13Salario': None if 'reduz13salario' not in item else item['reduz13salario'],
'reduzFerias': None if 'reduzferias' not in item else item['reduzferias'],
'justificado': None if 'justificado' not in item else item['justificado'],
'reduzFgts': None if 'reduzfgts' not in item else item['reduzfgts']
}
}
contador += 1
print(f'Dados gerados ({contador}): ', dict_dados)
lista_dados_enviar.append(dict_dados)
lista_controle_migracao.append({
'sistema': sistema,
'tipo_registro': tipo_registro,
'hash_chave_dsk': hash_chaves,
'descricao_tipo_registro': 'Cadastro de Tipo de Afastamento',
'id_gerado': None,
'i_chave_dsk1': item['id_entidade'],
'i_chave_dsk2': item['codigo']
})
if True:
model.insere_tabela_controle_migracao_registro2(params_exec, lista_req=lista_controle_migracao)
req_res = interacao_cloud.preparar_requisicao(lista_dados=lista_dados_enviar,
token=token,
url=url,
tipo_registro=tipo_registro,
tamanho_lote=limite_lote)
model.insere_tabela_controle_lote(req_res)
print('- Envio de dados finalizado.') | [
"[email protected]"
] | |
c3a9262abc44ac5508726e238bdcacc3f8454599 | 24cee07743790afde5040c38ef95bb940451e2f6 | /acode/abc284/e/update.py | cbe323dede2e63602d87336c493cc58525a7c3eb | [] | no_license | tinaba96/coding | fe903fb8740d115cf5a7f4ff5af73c7d16b9bce1 | d999bf5620e52fabce4e564c73b9f186e493b070 | refs/heads/master | 2023-09-01T02:24:33.476364 | 2023-08-30T15:01:47 | 2023-08-30T15:01:47 | 227,594,153 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,006 | py | import sys
sys.setrecursionlimit(500005)
#sys.setrecursionlimit(10**9)
#import pypyjit # this is for solving slow issue for pypy when using recursion but python will not need this (test will fail but submit works)
#pypyjit.set_param('max_unroll_recursion=-1')
N, M = list(map(int, input().split()))
mp = [[] for n in range(N+1)]
for i in range(M):
u, v = list(map(int, input().split()))
mp[u].append(v)
mp[v].append(u)
al = set()
cnt = 0
def dfs(p, e):
global cnt
if p not in al:
al.add(p)
cnt += 1
if len(al) > 10**6:
print(10**6)
exit()
for n in mp[e]:
if str(n) in p:
continue
dfs(p+str(n), n)
return
dfs('1', 1)
print(cnt)
# WA: 全探索ができていない?
# TLE: len(al)やstr(n) in p に時間を要している? それともpythonの再帰だから? -> len(al) is O(1), str(n) in p is almopst O(NlogN) (this is the cause of TLE)
# len(al) can costs almost 10**6 specially at the end. -> this is wrong see below
# str(n) in p costs O(len(p)) which is O(N) at maximum -> almost O(NlogN)
'''
ask question in LINE
ME
ABC284Eなのですが、このように実装して提出した結果、AC: 21 WA: 9 TLE: 3というような結果になってしまいました。
TLEになる原因は、len(al)やstr(n) in p だと思うのですが、WAになる原因が分かりません。パスを文字列として、setに格納していく実装なのですが、WAの原因分かる方いらっしゃいますでしょうか。
answer1
p = '1'+'2'のときに12も行ったことになるとか?
path graph (一直線のグラフ)だとalに入る文字数がO(n^2)になって大変なことになりませんか
ME
そうですね!確かにこれだと0-9までの頂点しか機能しないですね!
ありがとうございます!
ans2
dfs(p+‘$’+str(n), n)
とかってしたらこの問題は解決できそうですね
ME
al.add(p)のpの(文字列の)長さlen(p)がO(n^2)なるということでしょうか。(for ans1)
確かに頭に文字列をつければ、探索する際も特定できますね!ありがとうございます!(for ans2)
ans1
alに入っている文字列の合計の長さです
単純グラフなので、DFSする限りでは毎回必ず違ったpになるので、個数だけ管理しておけばよいです
ME
確かにそうなりますね!気づきませんでした、、
これは単純にメモリ制限的に引っかかるという考え方で良いのでしょうか。
勉強になります!
ans1
基本的にそのはず…賢い言語実装だとメモリ節約してくれるのもあった気がしますが
ME
ありがとうございます!
ちなみに、dfsの部分はO(N+M)だと思っているのですが、
それに加え、len(al)やstr(n) in p の部分がさらにO(N)かかり、全体的にO(N(N+M))ではないかと考えたのですが、考え方はあっているのでしょうか。
len(al)やstr(n) in pの部分はそれぞれalとpの長さの分計算コストかかると思っているのですが、それぞれの長さがNくらいになるのは最後の方だけだと思います。全体としてO(N(N+M)と考えて良いのでしょうか。
len(al)やstr(n) in pの部分は、ならし計算量でもO(1)にならないと思うので、ならし計算量でO(1)にならなければ、O(N)と考えれば良いのでしょうか?
asn3
(余計なお世話かもしれませんがnを文字列で表した時の長さはO(log n)なのでalに含まれる文字列の長さの合計にもlogが付くと思います)
ans4
len は定数時間じゃないですか?
ME
ありがとうございます!
これは、グラフの分岐があるためlogがつくということでしょうか。
一直線のグラフなどの最悪ケースでO(n^2)になるという理解で良いでしょうか? (for ans3)
pythonは長さを別で用意していて、len()はO(1)のようでした。
ご指摘ありがとうございます!(for ans4)
ans3
nを文字列で表そうとすると、その桁数分の文字が必要で、その桁数というのがO(log n)なので文字列の長さ、つまり文字の個数の合計にlogが付くという話です
例えば1や3は1桁なので1文字で良いですが、100000は6桁なので6文字必要です
ans5
その問題、再帰関数を用いたdfsが一般的だと思うのですが、スタックを用いたdfs で実装するのは厳しそうですかね?
ME
そういうことですね!理解できました。ありがとうございます!(for ans3)
となると、TLEの原因はstr(n) in pの部分でpの長さ分コストがかかるという理解で良いのでしょうか。pは最大N回文字列が足され、それぞれ足される文字列の長さがO(logN)と考えるとpの長さは O (NlogN)という感じでしょうか。
実装まではしていないのですが、pythonの再帰処理が苦手であることを考えるとスタックによる実装の方が早くなるとは思います。
ただこれがTLEの原因なのでしょうか。それとも上記のstr(n) in pがボトルネックになっているのでしょうか。(for ans5)
ans3
正しいと思います
TLEの原因がこれで、もしTLが無限であった場合今度はalのメモリが原因でMLEになると思います
ans4
+str(n) も PyPy だと遅そうなのと、なんか "123" か 1 → 2 → 3 なのか 1 → 23 なのかの曖昧性があって壊れませんか?
後者が WA になってそうで、例えば
1 → 23 → 2 のときに、2 が踏めないと判断されそうです
あ、既に指摘されてましたごめんなさい
ME
ありがとうございます!非常に納得がいき、勉強になりました!(for ans3)
いえいえ!ありがとうございます!
具体例も非常に勉強になりました!(for ans4)
'''
| [
"[email protected]"
] | |
00e61e3359148ae5195cff96ee8a1f87917fa3ba | 6f05f7d5a67b6bb87956a22b988067ec772ba966 | /data/test/python/e0a24819976e888969becc8f9ec8d2f0e7e377efurls.py | e0a24819976e888969becc8f9ec8d2f0e7e377ef | [
"MIT"
] | permissive | harshp8l/deep-learning-lang-detection | 93b6d24a38081597c610ecf9b1f3b92c7d669be5 | 2a54293181c1c2b1a2b840ddee4d4d80177efb33 | refs/heads/master | 2020-04-07T18:07:00.697994 | 2018-11-29T23:21:23 | 2018-11-29T23:21:23 | 158,597,498 | 0 | 0 | MIT | 2018-11-21T19:36:42 | 2018-11-21T19:36:41 | null | UTF-8 | Python | false | false | 591 | py | #coding:utf-8
from controller.index import Index
from controller.article import Article
from controller.signin import Signin
from controller.write import Write
from controller.signout import Signout
from controller.page import About,Type
from controller.api import Article as ART,Comment as com
urls = [
#首页
(r'/', Index),
#文章
(r'/article/([^\n]*)',Article),
#登录
(r'/signin',Signin),
#发表
(r'/write',Write),
#API文章
(r'/api/article/([^\n]*)',ART),
(r'/api/comment',com),
#退出
(r'/signout',Signout),
#关于
(r'/about',About),
# 分类
(r'/type',Type)
] | [
"[email protected]"
] | |
626c922de9219080952e7221d26a8a4a2740ad29 | 6e8b606bca1eaddd8858fffc0fdeda039a438af5 | /source/precipitation/precip_stats_for_central_arctic_to_timeseries.py | 3af2eff5c86951f4471ed3fd8fddbaeec12bb877 | [] | no_license | andypbarrett/SnowOnSeaIce | 1f93a0523933fff0bfdd89fc87ad32b371bae359 | b8fe84a23bf790eb8efc43f4b89725fb7ba7d73c | refs/heads/master | 2023-01-20T11:53:30.835890 | 2023-01-18T17:43:19 | 2023-01-18T17:43:19 | 137,275,118 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,836 | py | #----------------------------------------------------------------------
# Calculates mean of precipitation stats for Arctic Ocean excluding
# Barents and Kara seas. This region conforms to the regions with
# data from the NP drifting stations.
#----------------------------------------------------------------------
import pandas as pd
import os
import utilities as util
from constants import arctic_mask_region as region
from constants import accumulation_period_filepath
def make_outfilepath(fili):
"""Returns output filepath"""
_, ext = os.path.splitext(fili)
return fili.replace(ext, '.npsnow_region.csv')
def precip_stats_for_central_arctic_to_time_series(reanalysis, verbose=False):
ds = util.load_annual_accumulation(reanalysis)
ds['drizzle'] = ds['precTot'] - ds['wetdayTot']
# Make mask for central Arctic excluding Barents and Kara seas
mask = util.read_region_mask()
newmask = (mask == region['CENTRAL_ARCTIC']) | \
(mask == region['BEAUFORT']) | \
(mask == region['CHUKCHI']) | \
(mask == region['LAPTEV']) | \
(mask == region['EAST_SIBERIAN'])
region_mean = ds.where(newmask).mean(dim=['x','y']).to_dataframe()
filo = make_outfilepath(accumulation_period_filepath[reanalysis])
#annual_accumulation_filepath[reanalysis].replace('.nc','.RegionSeries.csv')
print (f'Writing time series to {filo}')
region_mean.to_csv(filo)
return
if __name__ == "__main__":
import argparse
parser = argparse.ArgumentParser(description="Calculates time series of average precip stats for NPSNOW Arctic region")
parser.add_argument('reanalysis', type=str, help='Reanalysis to process')
args = parser.parse_args()
precip_stats_for_central_arctic_to_time_series(args.reanalysis, verbose=True)
| [
"[email protected]"
] | |
b586bc73c8abf2ab0858af5c05cb97731f7d31fa | a366db0f2a117e0a8cf923e9b4de5c643155e047 | /bench/state.py | 08dad6e8fdc6e4fca68111ef1035197012e312ac | [] | no_license | genome/nessy-server | d2ff6aa7bb692f50e5cabb435a380670be75b2b9 | f8207310d33bf259130df806b4d759ef1a883e56 | refs/heads/master | 2021-01-10T18:59:38.910186 | 2014-12-29T22:11:16 | 2014-12-29T22:11:16 | 15,785,645 | 0 | 0 | null | 2014-12-29T22:11:16 | 2014-01-10T01:57:38 | Python | UTF-8 | Python | false | false | 2,115 | py | import collections
import datetime
class State(object):
UNSET = object()
def __init__(self, resource_names):
self._state_index = collections.defaultdict(set)
self._state_index['released'].update(resource_names)
self._resource_index = {r: 'released' for r in resource_names}
self._claim_urls = {}
self.transition_count = 0
self._request_times = collections.defaultdict(list)
def get_claim_url(self, resource):
return self._claim_urls[resource]
def resources_in_states(self, *states):
blah = [self._state_index[s] for s in states]
return set.union(*blah)
def set_resource_state(self, resource, state, claim_url=UNSET):
self.transition_count += 1
old_state = self._resource_index.pop(resource)
self._resource_index[resource] = state
self._state_index[old_state].remove(resource)
self._state_index[state].add(resource)
if claim_url is not self.UNSET:
if claim_url is None and resource in self._claim_urls:
self._claim_urls.pop(resource)
else:
self._claim_urls[resource] = claim_url
def noop(self):
self.transition_count += 1
def start_timer(self):
self._begin_time = datetime.datetime.now()
def stop_timer(self):
self._end_time = datetime.datetime.now()
@property
def _total_runtime(self):
return (self._end_time - self._begin_time).total_seconds()
def report(self):
tag_times = {
tag: {
'mean': sum(times) / len(times),
'number': len(times),
'rps': len(times) / sum(times),
}
for tag, times in self._request_times.iteritems()
}
return {
'total_requests': self.transition_count,
'total_runtime': self._total_runtime,
'rps': self.transition_count / self._total_runtime,
'times': tag_times,
}
def register_request(self, tag, seconds):
self._request_times[tag].append(seconds)
| [
"[email protected]"
] | |
fad0a9d402c2a9c652ef1ffc6eb8328b5bf559c7 | 5257652fc34ec87fe45d390ba49b15b238860104 | /nn_interpretation/nn_unique/get_nn_unique.py | 0aefadbf1cc44379399634748c270b52f7fc9a45 | [] | no_license | thekingofall/alzheimers_parkinsons | cd247fa2520c989e8dd853ed22b58a9bff564391 | 4ceae6ea3eb4c58919ff41aed8803855bca240c8 | refs/heads/master | 2022-11-30T22:36:37.201334 | 2020-08-12T01:23:55 | 2020-08-12T01:23:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 233 | py | buddies_nn=set(open('buddies_nn.txt','r').read().strip().split('\n'))
sig_svm=set(open('sig_svm.txt','r').read().strip().split('\n'))
nn_unique=buddies_nn-sig_svm
outf=open('nn_unique.txt','w')
outf.write('\n'.join(nn_unique)+'\n')
| [
"[email protected]"
] | |
392775843ef9a141cf72d2566d5ca45de26aa634 | 5aa27e52058d014bf4fb784d63a70c7d1f565330 | /Games/Tanks.py | 49cfe3c4acab326b37343a174e0e53d14d9bffe0 | [] | no_license | a5vh/AdPro | 2e5c5f952bb917d3b98c277a512670b67ce0718f | 595c1c3e5088ae3cfb85078282dffceb44c1901e | refs/heads/master | 2020-04-11T00:16:37.088549 | 2019-05-05T19:24:53 | 2019-05-05T19:24:53 | 161,381,037 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,458 | py | import sys, time, random, math, pygame
from pygame.locals import *
from MyLibrary import *
class Bullet():
def __init__(self,position):
self.alive = True
self.color = (250,20,20)
self.position = Point(position.x,position.y)
self.velocity = Point(0,0)
self.rect = Rect(0,0,4,4)
self.owner = ""
def update(self,ticks):
self.position.x += self.velocity.x * 10.0
self.position.y += self.velocity.y * 10.0
if self.position.x < 0 or self.position.x > 800 \
or self.position.y < 0 or self.position.y > 600:
self.alive = False
self.rect = Rect(self.position.x, self.position.y, 4, 4)
def draw(self,surface):
pos = (int(self.position.x), int(self.position.y))
pygame.draw.circle(surface, self.color, pos, 4, 0)
def fire_cannon(tank):
position = Point(tank.turret.X, tank.turret.Y)
bullet = Bullet(position)
angle = tank.turret.rotation
bullet.velocity = angular_velocity(angle)
bullets.append(bullet)
play_sound(shoot_sound)
return bullet
def player_fire_cannon():
bullet = fire_cannon(player)
bullet.owner = "player"
bullet.color = (30,250,30)
def enemy_fire_cannon():
bullet = fire_cannon(e)
bullet.owner = "enemy"
bullet.color = (250,30,30)
class Tank(MySprite):
def __init__(self,tank_file="tank.png",turret_file="turret.png"):
MySprite.__init__(self)
self.load(tank_file, 50, 60, 4)
self.speed = 0.0
self.scratch = None
self.float_pos = Point(0,0)
self.velocity = Point(0,0)
self.turret = MySprite()
self.turret.load(turret_file, 32, 64, 4)
self.fire_timer = 0
def update(self,ticks):
#update chassis
MySprite.update(self,ticks,150)
self.rotation = wrap_angle(self.rotation)
self.scratch = pygame.transform.rotate(self.image, -self.rotation)
angle = wrap_angle(self.rotation)
self.velocity = angular_velocity(angle+dir)
self.float_pos.x += self.velocity.x
self.float_pos.y += self.velocity.y
#warp tank around screen edges (keep it simple)
if self.float_pos.x < -50: self.float_pos.x = 800
elif self.float_pos.x > 800: self.float_pos.x = -50
if self.float_pos.y < -60: self.float_pos.y = 600
elif self.float_pos.y > 600: self.float_pos.y = -60
#transfer float position to integer position for drawing
self.X = int(self.float_pos.x)
self.Y = int(self.float_pos.y)
#update turret
self.turret.position = (self.X,self.Y)
self.turret.last_frame = 0
self.turret.update(ticks,100)
self.turret.rotation = wrap_angle(self.turret.rotation)
angle = self.turret.rotation+90
self.turret.scratch = pygame.transform.rotate(self.turret.image, -angle)
def draw(self,surface):
#draw the chassis
width,height = self.scratch.get_size()
center = Point(width/2,height/2)
surface.blit(self.scratch, (self.X-center.x, self.Y-center.y))
#draw the turret
width,height = self.turret.scratch.get_size()
center = Point(width/2,height/2)
surface.blit(self.turret.scratch, (self.turret.X-center.x, self.turret.Y-center.y))
def __str__(self):
return MySprite.__str__(self) + "," + str(self.velocity)
class EnemyTank(Tank):
def __init__(self,tank_file="enemy_tank.png",turret_file="enemy_turret.png"):
Tank.__init__(self,tank_file,turret_file)
def update(self,ticks):
MySprite.update(self,ticks,100)
self.rotation = wrap_angle(self.rotation)
self.scratch = pygame.transform.rotate(self.image, -self.rotation)
angle = wrap_angle(self.rotation)
self.velocity = angular_velocity(angle-90)
self.float_pos.x += self.velocity.x
self.float_pos.y += self.velocity.y
#warp tank around screen edges (keep it simple)
if self.float_pos.x < -50: self.float_pos.x = 800
elif self.float_pos.x > 800: self.float_pos.x = -50
if self.float_pos.y < -60: self.float_pos.y = 600
elif self.float_pos.y > 600: self.float_pos.y = -60
#transfer float position to integer position for drawing
self.X = int(self.float_pos.x)
self.Y = int(self.float_pos.y)
#update turret
self.turret.position = (self.X,self.Y)
self.turret.last_frame = 0
self.turret.update(ticks,100)
self.turret.rotation = wrap_angle(self.turret.rotation)
angle = self.turret.rotation+90
self.turret.scratch = pygame.transform.rotate(self.turret.image, -angle)
def draw(self,surface):
Tank.draw(self,surface)
#this function initializes the game
def game_init():
global screen, backbuffer, font, timer, player_group, player, \
enemy_tank, bullets, crosshair, crosshair_group, enemy_group, explosion
pygame.init()
screen = pygame.display.set_mode((800,600))
backbuffer = pygame.Surface((800,600))
pygame.display.set_caption("Tank Battle Game")
font = pygame.font.Font(None, 30)
timer = pygame.time.Clock()
pygame.mouse.set_visible(False)
#load mouse cursor
crosshair = MySprite()
crosshair.load("crosshair.png")
crosshair_group = pygame.sprite.GroupSingle()
crosshair_group.add(crosshair)
#create player tank
player = Tank()
player.float_pos = Point(400,300)
enemy_group = pygame.sprite.Group()
#create enemy tanks
for n in range(0, 5):
enemy_tank = EnemyTank()
enemy_tank.float_pos = Point(random.randint(50,760), random.randint(50, 550))
enemy_tank.rotation = random.randint(100, 180)
enemy_group.add(enemy_tank)
#create bullets
bullets = list()
# this function initializes the audio system
def audio_init():
global shoot_sound, boom_sound, move_sound
#initialize the audio mixer
pygame.mixer.init()
#load sound files
shoot_sound = pygame.mixer.Sound("shoot.wav")
boom_sound = pygame.mixer.Sound("boom.wav")
move_sound = pygame.mixer.Sound("move.wav")
# this function uses any available channel to play a sound clip
def play_sound(sound):
channel = pygame.mixer.find_channel(True)
channel.set_volume(0.5)
channel.play(sound)
#main program begins
dir = 90
wait = 3
game_init()
audio_init()
game_over = False
player_score = 0
enemy_score = 0
last_time = 0
mouse_x = mouse_y = 0
#main loop
while True:
timer.tick(30)
ticks = pygame.time.get_ticks()
#reset mouse state variables
mouse_up = mouse_down = 0
mouse_up_x = mouse_up_y = 0
mouse_down_x = mouse_down_y = 0
#event section
for event in pygame.event.get():
if event.type == QUIT: sys.exit()
elif event.type == MOUSEMOTION:
mouse_x,mouse_y = event.pos
move_x,move_y = event.rel
elif event.type == MOUSEBUTTONDOWN:
mouse_down = event.button
mouse_down_x,mouse_down_y = event.pos
elif event.type == MOUSEBUTTONUP:
mouse_up = event.button
mouse_up_x,mouse_up_y = event.pos
#get key states
keys = pygame.key.get_pressed()
if keys[K_ESCAPE]: sys.exit()
elif keys[K_LEFT] or keys[K_a]:
#calculate new direction velocity
player.rotation -= 2.0
elif keys[K_RIGHT] or keys[K_d]:
#calculate new direction velocity
player.rotation += 2.0
elif keys[K_s] or keys[K_DOWN]:
#Reverse velocity
play_sound(move_sound)
if dir == -90 and wait < 1:
dir = 90
wait = 3
elif wait < 1:
dir = -90
wait = 3
wait -= 1
elif keys[K_p] and game_over:
dir = 90
wait = 3
game_init()
game_over = False
player_score = 0
elif player_score == 5:
game_over = True
#fire cannon!
if keys[K_SPACE] or mouse_up > 0:
if ticks > player.fire_timer + 500:
player.fire_timer = ticks
player_fire_cannon()
play_sound(move_sound)
#update section
if not game_over:
crosshair.position = (mouse_x,mouse_y)
crosshair_group.update(ticks)
#point tank turret toward crosshair
angle = target_angle(player.turret.X,player.turret.Y,
crosshair.X + crosshair.frame_width/2,
crosshair.Y + crosshair.frame_height/2)
player.turret.rotation = angle
#move tank
player.update(ticks)
#update enemies
for e in enemy_group:
e.update(ticks)
for e in enemy_group:
if ticks > e.fire_timer + 1000:
e.fire_timer = ticks
enemy_fire_cannon()
#update bullets
for bullet in bullets:
bullet.update(ticks)
if bullet.owner == "player":
for e in enemy_group:
if pygame.sprite.collide_rect(bullet, e):
player_score += 1
bullet.alive = False
play_sound(boom_sound)
enemy_group.remove(e)
elif bullet.owner == "enemy":
if pygame.sprite.collide_rect(bullet, player):
enemy_score += 1
bullet.alive = False
play_sound(boom_sound)
game_over = True
#drawing section
backbuffer.fill((100,100,20))
for bullet in bullets:
bullet.draw(backbuffer)
for e in enemy_group:
e.draw(backbuffer)
player.draw(backbuffer)
crosshair_group.draw(backbuffer)
screen.blit(backbuffer, (0,0))
if not game_over:
print_text(font, 0, 0, "PLAYER " + str(player_score))
print_text(font, 700, 0, "ENEMY " + str(enemy_score))
else:
print_text(font, 0, 0, "PLAYER " + str(player_score))
print_text(font, 700, 0, "ENEMY " + str(enemy_score))
print_text(font, 300, 380, "GAME OVER")
print_text(font, 300, 400, "PRESS P TO PLAY AGAIN!")
pygame.display.update()
#remove expired bullets
for bullet in bullets:
if bullet.alive == False:
bullets.remove(bullet) | [
"[email protected]"
] | |
184bfebb357383b520e0be4fda111faf8a4b9ffa | e4fcd551a9d83e37a2cd6d5a2b53a3cc397ccb10 | /codes/eval_metrics/writing/mmocr/tools/dataset_converters/textdet/synthtext_converter.py | 811b1cc0e669b8dd185dbcf8156595002713a850 | [
"Apache-2.0"
] | permissive | eslambakr/HRS_benchmark | 20f32458a47c6e1032285b44e70cf041a64f842c | 9f153d8c71d1119e4b5c926b899bb556a6eb8a59 | refs/heads/main | 2023-08-08T11:57:26.094578 | 2023-07-22T12:24:51 | 2023-07-22T12:24:51 | 597,550,499 | 33 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,177 | py | # Copyright (c) OpenMMLab. All rights reserved.
import argparse
import json
import os.path as osp
import time
import lmdb
import mmcv
import mmengine
import numpy as np
from scipy.io import loadmat
from shapely.geometry import Polygon
from mmocr.utils import check_argument
def trace_boundary(char_boxes):
"""Trace the boundary point of text.
Args:
char_boxes (list[ndarray]): The char boxes for one text. Each element
is 4x2 ndarray.
Returns:
boundary (ndarray): The boundary point sets with size nx2.
"""
assert check_argument.is_type_list(char_boxes, np.ndarray)
# from top left to to right
p_top = [box[0:2] for box in char_boxes]
# from bottom right to bottom left
p_bottom = [
char_boxes[idx][[2, 3], :]
for idx in range(len(char_boxes) - 1, -1, -1)
]
p = p_top + p_bottom
boundary = np.concatenate(p).astype(int)
return boundary
def match_bbox_char_str(bboxes, char_bboxes, strs):
"""match the bboxes, char bboxes, and strs.
Args:
bboxes (ndarray): The text boxes of size (2, 4, num_box).
char_bboxes (ndarray): The char boxes of size (2, 4, num_char_box).
strs (ndarray): The string of size (num_strs,)
"""
assert isinstance(bboxes, np.ndarray)
assert isinstance(char_bboxes, np.ndarray)
assert isinstance(strs, np.ndarray)
bboxes = bboxes.astype(np.int32)
char_bboxes = char_bboxes.astype(np.int32)
if len(char_bboxes.shape) == 2:
char_bboxes = np.expand_dims(char_bboxes, axis=2)
char_bboxes = np.transpose(char_bboxes, (2, 1, 0))
if len(bboxes.shape) == 2:
bboxes = np.expand_dims(bboxes, axis=2)
bboxes = np.transpose(bboxes, (2, 1, 0))
chars = ''.join(strs).replace('\n', '').replace(' ', '')
num_boxes = bboxes.shape[0]
poly_list = [Polygon(bboxes[iter]) for iter in range(num_boxes)]
poly_box_list = [bboxes[iter] for iter in range(num_boxes)]
poly_char_list = [[] for iter in range(num_boxes)]
poly_char_idx_list = [[] for iter in range(num_boxes)]
poly_charbox_list = [[] for iter in range(num_boxes)]
words = []
for s in strs:
words += s.split()
words_len = [len(w) for w in words]
words_end_inx = np.cumsum(words_len)
start_inx = 0
for word_inx, end_inx in enumerate(words_end_inx):
for char_inx in range(start_inx, end_inx):
poly_char_idx_list[word_inx].append(char_inx)
poly_char_list[word_inx].append(chars[char_inx])
poly_charbox_list[word_inx].append(char_bboxes[char_inx])
start_inx = end_inx
for box_inx in range(num_boxes):
assert len(poly_charbox_list[box_inx]) > 0
poly_boundary_list = []
for item in poly_charbox_list:
boundary = np.ndarray((0, 2))
if len(item) > 0:
boundary = trace_boundary(item)
poly_boundary_list.append(boundary)
return (poly_list, poly_box_list, poly_boundary_list, poly_charbox_list,
poly_char_idx_list, poly_char_list)
def convert_annotations(root_path, gt_name, lmdb_name):
"""Convert the annotation into lmdb dataset.
Args:
root_path (str): The root path of dataset.
gt_name (str): The ground truth filename.
lmdb_name (str): The output lmdb filename.
"""
assert isinstance(root_path, str)
assert isinstance(gt_name, str)
assert isinstance(lmdb_name, str)
start_time = time.time()
gt = loadmat(gt_name)
img_num = len(gt['imnames'][0])
env = lmdb.open(lmdb_name, map_size=int(1e9 * 40))
with env.begin(write=True) as txn:
for img_id in range(img_num):
if img_id % 1000 == 0 and img_id > 0:
total_time_sec = time.time() - start_time
avg_time_sec = total_time_sec / img_id
eta_mins = (avg_time_sec * (img_num - img_id)) / 60
print(f'\ncurrent_img/total_imgs {img_id}/{img_num} | '
f'eta: {eta_mins:.3f} mins')
# for each img
img_file = osp.join(root_path, 'imgs', gt['imnames'][0][img_id][0])
img = mmcv.imread(img_file, 'unchanged')
height, width = img.shape[0:2]
img_json = {}
img_json['file_name'] = gt['imnames'][0][img_id][0]
img_json['height'] = height
img_json['width'] = width
img_json['annotations'] = []
wordBB = gt['wordBB'][0][img_id]
charBB = gt['charBB'][0][img_id]
txt = gt['txt'][0][img_id]
poly_list, _, poly_boundary_list, _, _, _ = match_bbox_char_str(
wordBB, charBB, txt)
for poly_inx in range(len(poly_list)):
polygon = poly_list[poly_inx]
min_x, min_y, max_x, max_y = polygon.bounds
bbox = [min_x, min_y, max_x - min_x, max_y - min_y]
anno_info = dict()
anno_info['iscrowd'] = 0
anno_info['category_id'] = 1
anno_info['bbox'] = bbox
anno_info['segmentation'] = [
poly_boundary_list[poly_inx].flatten().tolist()
]
img_json['annotations'].append(anno_info)
string = json.dumps(img_json)
txn.put(str(img_id).encode('utf8'), string.encode('utf8'))
key = b'total_number'
value = str(img_num).encode('utf8')
txn.put(key, value)
def parse_args():
parser = argparse.ArgumentParser(
description='Convert synthtext to lmdb dataset')
parser.add_argument('synthtext_path', help='synthetic root path')
parser.add_argument('-o', '--out-dir', help='output path')
args = parser.parse_args()
return args
# TODO: Refactor synthtext
def main():
args = parse_args()
synthtext_path = args.synthtext_path
out_dir = args.out_dir if args.out_dir else synthtext_path
mmengine.mkdir_or_exist(out_dir)
gt_name = osp.join(synthtext_path, 'gt.mat')
lmdb_name = 'synthtext.lmdb'
convert_annotations(synthtext_path, gt_name, osp.join(out_dir, lmdb_name))
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
dba9826fd017a5155e4aeb88ce6828001cac6adb | f4a4c9a68a4ead50c0882832f3f73b9cb29271f6 | /backend/cardgameapp_22189/settings.py | 5d1e2e15e8ab4ff8efd7b0549a39e0e1e23558cb | [] | no_license | crowdbotics-apps/cardgameapp-22189 | 276e0c18661a3e1ae474f2deb11b6fc32b66eb38 | 200ca6880781d3d832be39f44b8aa290db481ec2 | refs/heads/master | 2023-01-11T11:45:35.488695 | 2020-11-01T17:50:20 | 2020-11-01T17:50:20 | 309,153,974 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,111 | py | """
Django settings for cardgameapp_22189 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
"django.contrib.admin",
"django.contrib.auth",
"django.contrib.contenttypes",
"django.contrib.sessions",
"django.contrib.messages",
"django.contrib.staticfiles",
"django.contrib.sites",
"course",
]
LOCAL_APPS = [
"home",
"users.apps.UsersConfig",
]
THIRD_PARTY_APPS = [
"rest_framework",
"rest_framework.authtoken",
"rest_auth",
"rest_auth.registration",
"bootstrap4",
"allauth",
"allauth.account",
"allauth.socialaccount",
"allauth.socialaccount.providers.google",
"django_extensions",
"drf_yasg",
# start fcm_django push notifications
"fcm_django",
# end fcm_django push notifications
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
"django.middleware.security.SecurityMiddleware",
"django.contrib.sessions.middleware.SessionMiddleware",
"django.middleware.common.CommonMiddleware",
"django.middleware.csrf.CsrfViewMiddleware",
"django.contrib.auth.middleware.AuthenticationMiddleware",
"django.contrib.messages.middleware.MessageMiddleware",
"django.middleware.clickjacking.XFrameOptionsMiddleware",
]
ROOT_URLCONF = "cardgameapp_22189.urls"
TEMPLATES = [
{
"BACKEND": "django.template.backends.django.DjangoTemplates",
"DIRS": [],
"APP_DIRS": True,
"OPTIONS": {
"context_processors": [
"django.template.context_processors.debug",
"django.template.context_processors.request",
"django.contrib.auth.context_processors.auth",
"django.contrib.messages.context_processors.messages",
],
},
},
]
WSGI_APPLICATION = "cardgameapp_22189.wsgi.application"
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": os.path.join(BASE_DIR, "db.sqlite3"),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {"default": env.db()}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
},
{
"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator",
},
{
"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator",
},
{
"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator",
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = "en-us"
TIME_ZONE = "UTC"
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = "/static/"
MIDDLEWARE += ["whitenoise.middleware.WhiteNoiseMiddleware"]
AUTHENTICATION_BACKENDS = (
"django.contrib.auth.backends.ModelBackend",
"allauth.account.auth_backends.AuthenticationBackend",
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, "static")]
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = "email"
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# start fcm_django push notifications
FCM_DJANGO_SETTINGS = {"FCM_SERVER_KEY": env.str("FCM_SERVER_KEY", "")}
# end fcm_django push notifications
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning(
"You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails."
)
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
cf025e4e4a83d5bcf74a7018aca2af88294b8cb1 | ec645951ce4a1f7d404ebca438dfff8fb74cc3f4 | /venv/bin/jupyter-troubleshoot | 8f347cbe928d8562e8843ae244df1a6cae1c7e4f | [] | no_license | Josue23/scrapy | 695c0e93e6c1f0c0de5a04bd2eaced3e5520801e | 469cc4cff7d986264c4bcc291e9eb5f5a6256da9 | refs/heads/master | 2021-01-09T06:19:59.794012 | 2017-03-04T00:21:52 | 2017-03-04T00:21:52 | 80,957,631 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 264 | #!/home/josh/Documentos/labs/scrapy/venv/bin/python3.5
# -*- coding: utf-8 -*-
import re
import sys
from jupyter_core.troubleshoot import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
e1286fa2a637e5aa1f0465a38d82e1bd3905c8d1 | 659a7a65c877f2eb0adbb6001a1f85f063d01acd | /mscreen/autodocktools_prepare_py3k/AutoDockTools/VisionInterface/Adt/Input/PublicServerLigandDB.py | 26991dad68bfc2d248eec6fec64dacb18f2d6a6b | [
"MIT"
] | permissive | e-mayo/mscreen | da59771be250ebe341feb102e0cbf41aab70de43 | a50f0b2f7104007c730baa51b4ec65c891008c47 | refs/heads/main | 2023-06-21T17:47:06.519307 | 2021-08-09T16:06:29 | 2021-08-09T16:06:29 | 345,008,321 | 10 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,110 | py | #########################################################################
#
# Date: Nov 2001 Authors: Michel Sanner
#
# [email protected]
#
# The Scripps Research Institute (TSRI)
# Molecular Graphics Lab
# La Jolla, CA 92037, USA
#
# Copyright: Michel Sanner and TSRI
#
#########################################################################
from NetworkEditor.items import NetworkNode
from AutoDockTools.VisionInterface.Adt.LigandDB import LigandDB
from mglutil.util.packageFilePath import getResourceFolderWithVersion
import os
import time
import urllib.request, urllib.error, urllib.parse
class PublicServerLigandDB(NetworkNode):
"""
List of available public libraries on the virtual screening server.
A description of the ligand libraries can be found on
http://nbcr.sdsc.edu/pub/wiki/index.php?title=Virtual_Screening_Libraries
Input: a public ligand library name
Output: LigandDB object containing info about the info
"""
def __init__(self, name='PublicServerLigandDB', **kw):
import urllib.request, urllib.parse, urllib.error
kw['name'] = name
NetworkNode.__init__(*(self,), **kw)
kw['name'] = name
NetworkNode.__init__(*(self,), **kw)
ip = self.inputPortsDescr
ip.append(datatype='string', name='server_lib', required=True, )
fqdn = "kryptonite.nbcr.net"
url = "http://" + fqdn + "/pub_ligand_libs.txt"
publibdir = os.path.join(getResourceFolderWithVersion(), 'ws')
if not (os.path.exists(publibdir)):
os.mkdir(publibdir)
publiblocal = os.path.join(publibdir, 'publibs.txt')
lock = publiblocal + '.lock'
if os.path.exists(lock) and time.time() - os.path.getmtime(lock) > 15:
os.remove(lock)
try:
if not(os.path.exists(lock)):
open(lock, 'w').close()
publibweb = urllib.request.urlopen(url)
outfile = open(publiblocal, 'w')
outfile.write(publibweb.read())
outfile.close()
os.remove(lock)
except:
print("[INFO]: Getting list of public server libs from cache")
pass
try:
f = open(publiblocal, 'r')
self.choices = f.read().split()
f.close()
except:
self.choices = []
print("[ERROR]: Unable to public server libs from the web and from cache")
self.widgetDescr['server_lib'] = {
'class':'NEComboBox', 'master':'node',
'choices':self.choices,
'fixedChoices':True,
'entryfield_entry_width':18,
'labelGridCfg':{'sticky':'w'},
'widgetGridCfg':{'sticky':'w'},
'labelCfg':{'text':'Server Libraries:'}}
op = self.outputPortsDescr
op.append(datatype='LigandDB', name='ligDB')
code = """def doit(self, server_lib):
ligDB = LigandDB(server_lib=server_lib)
self.outputData(ligDB=ligDB)
"""
self.setFunction(code)
| [
"[email protected]"
] | |
a625b979deaf6a06f61b88dd43ac56027f5f5322 | c59d6587ed5d7e7c4f4cbad2e4c8188eee741ad9 | /conftest.py | 134e36c3a4f3b5465cce6e8c54ef587ba3565484 | [] | no_license | n1k0din/kekino-api | 921d83b1be0c50e7dfb0b2411ba63fd9f3dc8039 | 4fef4b1c6bdec970fae1b599be4c719eee06e999 | refs/heads/master | 2023-08-27T18:21:16.640031 | 2021-11-07T11:13:01 | 2021-11-07T11:13:01 | 425,223,294 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 228 | py | import pytest
from django.conf import settings
@pytest.fixture(scope='session')
def django_db_setup():
settings.DATABASES['default'] = {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'db.sqlite3',
}
| [
"[email protected]"
] | |
d564c8aa72b8618e3d89a78ea6866c695c94cd74 | 7462f315c3f011f50dc0d1ce89cf3d5f2eb024db | /tramp/likelihoods/abs_likelihood.py | 8ad31af49a0340c934ae371dcc2c870f70851570 | [
"MIT"
] | permissive | Artaxerces/tramp | 060bcceb50f59ad5de96ab4eba8aa322651d90cf | e5351e65676f2e9a1b90d0f4eaf11d8259b548ef | refs/heads/master | 2023-04-03T04:49:14.345162 | 2021-04-08T08:55:54 | 2021-04-08T08:55:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,252 | py | import numpy as np
from scipy.stats import norm
from .base_likelihood import Likelihood
from ..utils.integration import gaussian_measure_2d
class AbsLikelihood(Likelihood):
def __init__(self, y, y_name="y"):
self.y_name = y_name
self.size = self.get_size(y)
self.repr_init()
self.y = y
def sample(self, X):
return np.abs(X)
def math(self):
return r"$\mathrm{abs}$"
def compute_backward_posterior(self, az, bz, y):
rz = y * np.tanh(bz * y)
# 1 / cosh**2 leads to overflow
v = (y**2) * (1 - np.tanh(bz * y)**2)
vz = np.mean(v)
return rz, vz
def beliefs_measure(self, az, tau_z, f):
"NB: Assumes that f(bz, y) pair in y."
u_eff = np.maximum(0, az * tau_z - 1)
sz_eff = np.sqrt(az * u_eff)
def f_scaled(xi_b, xi_y):
bz = sz_eff * xi_b
y = bz / az + xi_y / np.sqrt(az)
return f(bz, y)
mu = gaussian_measure_2d(0, 1, 0, 1, f_scaled)
return mu
def measure(self, y, f):
return f(+y) + f(-y)
def compute_log_partition(self, az, bz, y):
logZ = np.sum(
-0.5*az*(y**2) + np.logaddexp(bz*y, -bz*y)
)
return logZ
| [
"[email protected]"
] | |
f357eb496bccb34a809712c97c9517ac6f0fdd70 | 8ed3d2d285bb7255209b56a5ff9ec83bb4b8f430 | /setup.py | 6a083b3b06d7ee5d3ed16d73aacfe015edf07f6e | [] | no_license | MarkLuro/requests-html | f4af9211353e09908f254a9edc0965c084c59a36 | f43f3241f0c63cd50bb4286edffcc1f8ee5ae7bd | refs/heads/master | 2021-01-24T02:11:25.628019 | 2018-02-25T13:23:40 | 2018-02-25T13:23:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,023 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Note: To use the 'upload' functionality of this file, you must:
# $ pip install twine
import io
import os
import sys
from shutil import rmtree
from setuptools import find_packages, setup, Command
# Package meta-data.
NAME = 'requests-html'
DESCRIPTION = 'HTML Parsing for Humans.'
URL = 'https://github.com/requests/requests'
EMAIL = '[email protected]'
AUTHOR = 'Kenneth Reitz'
VERSION = '0.1.0'
# What packages are required for this module to be executed?
REQUIRED = [
'requests', 'pyquery', 'html2text', 'fake-useragent', 'parse'
]
# The rest you shouldn't have to touch too much :)
# ------------------------------------------------
# Except, perhaps the License and Trove Classifiers!
# If you do change the License, remember to change the Trove Classifier for that!
here = os.path.abspath(os.path.dirname(__file__))
# Import the README and use it as the long-description.
# Note: this will only work if 'README.rst' is present in your MANIFEST.in file!
with io.open(os.path.join(here, 'README.rst'), encoding='utf-8') as f:
long_description = '\n' + f.read()
class UploadCommand(Command):
"""Support setup.py upload."""
description = 'Build and publish the package.'
user_options = []
@staticmethod
def status(s):
"""Prints things in bold."""
print('\033[1m{0}\033[0m'.format(s))
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
try:
self.status('Removing previous builds…')
rmtree(os.path.join(here, 'dist'))
except OSError:
pass
self.status('Building Source and Wheel (universal) distribution…')
os.system('{0} setup.py sdist bdist_wheel --universal'.format(sys.executable))
self.status('Uploading the package to PyPi via Twine…')
os.system('twine upload dist/*')
sys.exit()
# Where the magic happens:
setup(
name=NAME,
version=VERSION,
description=DESCRIPTION,
long_description=long_description,
author=AUTHOR,
author_email=EMAIL,
url=URL,
# If your package is a single module, use this instead of 'packages':
py_modules=['requests_html'],
# entry_points={
# 'console_scripts': ['mycli=mymodule:cli'],
# },
install_requires=REQUIRED,
include_package_data=True,
license='MIT',
classifiers=[
# Trove classifiers
# Full list: https://pypi.python.org/pypi?%3Aaction=list_classifiers
'License :: OSI Approved :: MIT License',
'Programming Language :: Python',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: Implementation :: CPython',
'Programming Language :: Python :: Implementation :: PyPy'
],
# $ setup.py publish support.
cmdclass={
'upload': UploadCommand,
},
)
| [
"[email protected]"
] | |
233da41e7bd6d8bc26423d834ec30979432da47b | 77f65ea86ebc544c3f3e66c0152086e45669068c | /ch09-objects/e42b2_recent_dict.py | 6449ace10c6bb5805d30a84d5cf9f40f10adaedd | [] | no_license | Cptgreenjeans/python-workout | e403f48b0694ff4db32fe5fc3f87f02f48a1a68e | b9c68520d572bf70eff8e554a8ee9c8702c88e6e | refs/heads/master | 2023-07-16T21:49:14.198660 | 2021-08-29T13:49:12 | 2021-08-29T13:49:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 374 | py | #!/usr/bin/env python3
"""Solution to chapter 9, exercise 42, beyond 2: recent_dict"""
class RecentDict(dict):
def __init__(self, maxsize):
super().__init__()
self.maxsize = maxsize
def __setitem__(self, key, value):
dict.__setitem__(self, str(key), value)
if len(self) > self.maxsize:
self.pop(list(self.keys())[0])
| [
"[email protected]"
] | |
d1d8b0f0745d0cbadc0870e6c03600d69579d87f | 52b5773617a1b972a905de4d692540d26ff74926 | /.history/coins_20200607123443.py | 742c4c7e6e1d456cb4ac4cfeb1fabd621a8f9488 | [] | no_license | MaryanneNjeri/pythonModules | 56f54bf098ae58ea069bf33f11ae94fa8eedcabc | f4e56b1e4dda2349267af634a46f6b9df6686020 | refs/heads/master | 2022-12-16T02:59:19.896129 | 2020-09-11T12:05:22 | 2020-09-11T12:05:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 115 | py |
def change(amount,coins):
count = 0
for i in range(len(coins)):
if mount
change(5,[1,2,5])
| [
"[email protected]"
] | |
c53ec5397ec182007a22d88243b2d4ec32a3f966 | d6ca0b326f1bd0ce381c6db611f6331096bf4187 | /examples/example_20_using_deap_manual_runs.py | ab46bba8dfec0020018669d017e35953cbda71ea | [
"BSD-3-Clause"
] | permissive | SmokinCaterpillar/pypet | aa35355d70e8f44be015313494376d993f645d80 | 3d454ac65f89e7833baaf89510f73c546e90d8f6 | refs/heads/develop | 2023-08-08T16:01:54.087819 | 2023-02-14T14:59:32 | 2023-02-14T14:59:32 | 12,901,526 | 89 | 22 | BSD-3-Clause | 2023-07-24T00:46:12 | 2013-09-17T17:06:00 | Python | UTF-8 | Python | false | false | 6,723 | py | """ An example showing how to use DEAP optimization (http://pythonhosted.org/deap/).
DEAP can be combined with *pypet* to keep track of all the data and the full trajectory
of points created by a genetic algorithm.
Note that *pypet* adds quite some overhead to the optimization algorithm.
Using *pypet* in combination with DEAP is only suitable in case the
evaluation of an individual (i.e. a single run) takes a considerable amount of time
(i.e. 1 second or longer) and, thus, pypet's overhead is only marginal.
This *OneMax* problem serves only as an example and is not a well suited problem.
Suitable would be the genetic optimization of neural networks where running and evaluating
the network may take a few seconds.
Here we avoid using an Environment and *manually* execute runs using multiprocessing.
"""
__author__ = 'Robert Meyer'
import random
import os
import multiprocessing as multip
try:
from itertools import izip
except ImportError:
# For Python 3
izip = zip
from deap import base
from deap import creator
from deap import tools
from pypet import Trajectory, cartesian_product, manual_run, MultiprocContext
@manual_run(store_meta_data=True) # Important decorator for manual execution of runs
def eval_one_max(traj, individual):
"""The fitness function"""
traj.f_add_result('$set.$.individual', list(individual))
fitness = sum(individual)
traj.f_add_result('$set.$.fitness', fitness)
traj.f_store()
return (fitness,) # DEAP wants a tuple here!
def eval_wrapper(the_tuple):
"""Wrapper function that unpacks a single tuple as arguments to the fitness function.
The pool's map function only allows a single iterable so we need to zip it first
and then unpack it here.
"""
return eval_one_max(*the_tuple)
def main():
# No environment here ;-)
filename = os.path.join('experiments', 'example_20.hdf5')
traj = Trajectory('onemax', filename=filename, overwrite_file=True)
# ------- Add parameters ------- #
traj.f_add_parameter('popsize', 100)
traj.f_add_parameter('CXPB', 0.5)
traj.f_add_parameter('MUTPB', 0.2)
traj.f_add_parameter('NGEN', 20)
traj.f_add_parameter('generation', 0)
traj.f_add_parameter('ind_idx', 0)
traj.f_add_parameter('ind_len', 50)
traj.f_add_parameter('indpb', 0.005)
traj.f_add_parameter('tournsize', 3)
traj.f_add_parameter('seed', 42)
traj.f_store(only_init=True)
# ------- Create and register functions with DEAP ------- #
creator.create("FitnessMax", base.Fitness, weights=(1.0,))
creator.create("Individual", list, fitness=creator.FitnessMax)
toolbox = base.Toolbox()
# Attribute generator
toolbox.register("attr_bool", random.randint, 0, 1)
# Structure initializers
toolbox.register("individual", tools.initRepeat, creator.Individual,
toolbox.attr_bool, traj.ind_len)
toolbox.register("population", tools.initRepeat, list, toolbox.individual)
# Operator registering
toolbox.register("mate", tools.cxTwoPoint)
toolbox.register("mutate", tools.mutFlipBit, indpb=traj.indpb)
toolbox.register("select", tools.selTournament, tournsize=traj.tournsize)
toolbox.register("evaluate", eval_wrapper)
pool = multip.Pool(4)
toolbox.register("map", pool.map) # We use the pool's map function!
# ------- Initialize Population -------- #
random.seed(traj.seed)
pop = toolbox.population(n=traj.popsize)
CXPB, MUTPB, NGEN = traj.CXPB, traj.MUTPB, traj.NGEN
start_idx = 0 # We need to count executed runs
print("Start of evolution")
for g in range(traj.NGEN):
print("-- Generation %i --" % g)
# Determine individuals that need to be evaluated
eval_pop = [ind for ind in pop if not ind.fitness.valid]
# Add as many explored runs as individuals that need to be evaluated
traj.f_expand(cartesian_product({'generation': [g], 'ind_idx': range(len(eval_pop))}))
# We need to make the storage service multiprocessing safe
mc = MultiprocContext(traj, wrap_mode='QUEUE')
mc.f_start()
# Create a single iterable to be passed to our fitness function (wrapper).
# `yields='copy'` is important, the pool's `map` function will
# go over the whole iterator at once and store it in memory.
# So for every run we need a copy of the trajectory.
# Alternatively, you could use `yields='self'` and use the pool's `imap` function.
zip_iterable = izip(traj.f_iter_runs(start_idx, yields='copy'), eval_pop)
fitnesses = toolbox.map(eval_wrapper, zip_iterable)
# fitnesses is just a list of tuples [(fitness,), ...]
for idx, fitness in enumerate(fitnesses):
# Update fitnesses
eval_pop[idx].fitness.values = fitness
# Finalize the multiproc wrapper
mc.f_finalize()
# Update start index
start_idx += len(eval_pop)
print(" Evaluated %i individuals" % len(eval_pop))
# Gather all the fitnesses in one list and print the stats
fits = [ind.fitness.values[0] for ind in pop]
length = len(pop)
mean = sum(fits) / length
sum2 = sum(x*x for x in fits)
std = abs(sum2 / length - mean**2)**0.5
print(" Min %s" % min(fits))
print(" Max %s" % max(fits))
print(" Avg %s" % mean)
print(" Std %s" % std)
# ------- Create the next generation by crossover and mutation -------- #
if g < traj.NGEN -1: # not necessary for the last generation
# Select the next generation individuals
offspring = toolbox.select(pop, len(pop))
# Clone the selected individuals
offspring = list(map(toolbox.clone, offspring))
# Apply crossover and mutation on the offspring
for child1, child2 in zip(offspring[::2], offspring[1::2]):
if random.random() < CXPB:
toolbox.mate(child1, child2)
del child1.fitness.values
del child2.fitness.values
for mutant in offspring:
if random.random() < MUTPB:
toolbox.mutate(mutant)
del mutant.fitness.values
# The population is entirely replaced by the offspring
pop[:] = offspring
# Stop the multiprocessing pool
pool.close()
pool.join()
print("-- End of (successful) evolution --")
best_ind = tools.selBest(pop, 1)[0]
print("Best individual is %s, %s" % (best_ind, best_ind.fitness.values))
traj.f_store() # And store all the rest of the data
if __name__ == "__main__":
main() | [
"[email protected]"
] | |
eb7ea1fa5ef9b6d3b9b41c49fb051d256edeeb0e | 41fd80f9ccc72a17c2db16b7019312a87d3181e8 | /zhang_local/pdep/network3396_1.py | cf88478cfa806d77eb44abbf591e5dc37db88509 | [] | no_license | aberdeendinius/n-heptane | 1510e6704d87283043357aec36317fdb4a2a0c34 | 1806622607f74495477ef3fd772908d94cff04d9 | refs/heads/master | 2020-05-26T02:06:49.084015 | 2019-07-01T15:12:44 | 2019-07-01T15:12:44 | 188,069,618 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 117,871 | py | species(
label = '[CH2]C=COC([CH2])[O](6739)',
structure = SMILES('[CH2]C=COC([CH2])[O]'),
E0 = (167.03,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,1380,1390,370,380,2900,435,2995,3025,975,1000,1300,1375,400,500,1630,1680,345.431,345.433,345.461,345.467],'cm^-1')),
HinderedRotor(inertia=(0.00141228,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.293775,'amu*angstrom^2'), symmetry=1, barrier=(24.8776,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.293655,'amu*angstrom^2'), symmetry=1, barrier=(24.8775,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.293655,'amu*angstrom^2'), symmetry=1, barrier=(24.8768,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.387926,0.0686611,-5.02055e-05,8.33984e-09,3.6305e-12,20228.6,28.3893], Tmin=(100,'K'), Tmax=(1000.22,'K')), NASAPolynomial(coeffs=[18.6967,0.0174656,-6.4574e-06,1.19483e-09,-8.59121e-14,15464.4,-65.4513], Tmin=(1000.22,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(167.03,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CCOJ) + radical(CJCO) + radical(Allyl_P)"""),
)
species(
label = 'C=C[O](594)',
structure = SMILES('C=C[O]'),
E0 = (-25.1807,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,180],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (43.0446,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3625.12,'J/mol'), sigma=(3.97,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=2.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.34719,0.00128739,5.39982e-05,-7.84138e-08,3.24083e-11,-2992.85,8.97297], Tmin=(100,'K'), Tmax=(914.213,'K')), NASAPolynomial(coeffs=[11.726,-0.0014735,2.90737e-06,-5.96989e-10,3.70275e-14,-5941.49,-38.4465], Tmin=(914.213,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-25.1807,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(133.032,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(C=COJ)"""),
)
species(
label = 'C=CC=O(5269)',
structure = SMILES('C=CC=O'),
E0 = (-81.3387,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2782.5,750,1395,475,1775,1000,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650],'cm^-1')),
HinderedRotor(inertia=(0.873408,'amu*angstrom^2'), symmetry=1, barrier=(20.0814,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (56.0633,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3136.31,'J/mol'), sigma=(5.14154,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=489.88 K, Pc=52.36 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.9738,0.0193269,-1.02836e-06,-7.40922e-09,2.6466e-12,-9743.32,12.1361], Tmin=(100,'K'), Tmax=(1315.19,'K')), NASAPolynomial(coeffs=[7.40832,0.0154746,-7.62321e-06,1.50372e-09,-1.06406e-13,-11743,-13.6408], Tmin=(1315.19,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-81.3387,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(178.761,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cd-Cd(CO)H) + group(Cds-O2d(Cds-Cds)H) + group(Cds-CdsHH)"""),
)
species(
label = '[CH2][CH]C1OC([CH2])O1(14763)',
structure = SMILES('[CH2][CH]C1OC([CH2])O1'),
E0 = (267.885,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.33428,0.0452373,2.3349e-06,-3.41814e-08,1.53732e-11,32326.8,28.8875], Tmin=(100,'K'), Tmax=(1036.19,'K')), NASAPolynomial(coeffs=[14.7399,0.0236938,-1.02052e-05,2.01969e-09,-1.48637e-13,27927,-44.0881], Tmin=(1036.19,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(267.885,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(Cyclobutane) + radical(RCCJ) + radical(CCJCO) + radical(CJCO)"""),
)
species(
label = '[CH2][CH]C1CC([O])O1(14764)',
structure = SMILES('[CH2][CH]C1CC([O])O1'),
E0 = (274.1,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.80415,0.0398087,6.90936e-06,-3.65842e-08,1.76597e-11,33053.5,27.1087], Tmin=(100,'K'), Tmax=(911.702,'K')), NASAPolynomial(coeffs=[9.85898,0.0267524,-8.27181e-06,1.32544e-09,-8.69102e-14,30658.7,-16.0855], Tmin=(911.702,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(274.1,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(324.264,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + ring(Oxetane) + radical(CCJCO) + radical(CCOJ) + radical(RCCJ)"""),
)
species(
label = 'H(8)',
structure = SMILES('[H]'),
E0 = (211.805,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (1.00794,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1205.6,'J/mol'), sigma=(2.05,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,25474.2,-0.444973], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,25474.2,-0.444973], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(211.805,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""H""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = '[CH2]C(=O)O[CH]C=C(12761)',
structure = SMILES('[CH2]C(=O)O[CH]C=C'),
E0 = (-31.4003,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,3025,407.5,1350,352.5,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (98.0999,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3501.16,'J/mol'), sigma=(5.80453,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=546.87 K, Pc=40.62 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.92643,0.045461,-2.48199e-05,6.03209e-09,-5.67048e-13,-3702.22,26.6594], Tmin=(100,'K'), Tmax=(2430.73,'K')), NASAPolynomial(coeffs=[18.7682,0.0177467,-7.71766e-06,1.34157e-09,-8.46359e-14,-11889.9,-69.5513], Tmin=(2430.73,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-31.4003,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-(Cds-Cds)OsHH) + group(Cs-(Cds-O2d)HHH) + group(Cds-CdsCsH) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CJCO) + radical(C=CCJ(O)C)"""),
)
species(
label = '[CH2]C([O])OC=C=C(14765)',
structure = SMILES('[CH2]C([O])OC=C=C'),
E0 = (192.135,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,540,610,2055,3010,987.5,1337.5,450,1655,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,256.466,256.585,256.602,256.733],'cm^-1')),
HinderedRotor(inertia=(0.471919,'amu*angstrom^2'), symmetry=1, barrier=(22.0371,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.471661,'amu*angstrom^2'), symmetry=1, barrier=(22.0362,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.471818,'amu*angstrom^2'), symmetry=1, barrier=(22.0366,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (98.0999,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.290657,0.0723323,-7.27596e-05,3.59159e-08,-6.87423e-12,23249.9,27.5162], Tmin=(100,'K'), Tmax=(1282.97,'K')), NASAPolynomial(coeffs=[18.733,0.0148328,-5.53274e-06,9.82586e-10,-6.70505e-14,18517.7,-66.0526], Tmin=(1282.97,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(192.135,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cdd-CdsCds) + radical(CCOJ) + radical(CJCO)"""),
)
species(
label = 'CH2(T)(28)',
structure = SMILES('[CH2]'),
E0 = (381.37,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1066.91,2790.99,3622.37],'cm^-1')),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (14.0266,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1197.29,'J/mol'), sigma=(3.8,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[4.01192,-0.000154979,3.26298e-06,-2.40422e-09,5.69497e-13,45867.7,0.5332], Tmin=(100,'K'), Tmax=(1104.58,'K')), NASAPolynomial(coeffs=[3.14983,0.00296674,-9.76056e-07,1.54115e-10,-9.50338e-15,46058.1,4.77808], Tmin=(1104.58,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(381.37,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(58.2013,'J/(mol*K)'), label="""CH2(T)""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'C=C[CH]OC=O(6118)',
structure = SMILES('C=C[CH]OC=O'),
E0 = (-187.12,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2782.5,750,1395,475,1775,1000,3010,987.5,1337.5,450,1655,2950,3100,1380,975,1025,1650,510.201,511.893,512],'cm^-1')),
HinderedRotor(inertia=(0.000649394,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.000644744,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.273992,'amu*angstrom^2'), symmetry=1, barrier=(50.5657,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (85.0813,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.41277,0.0299172,-1.27966e-06,-1.11624e-08,3.99297e-12,-22444.3,21.2798], Tmin=(100,'K'), Tmax=(1287.52,'K')), NASAPolynomial(coeffs=[7.96996,0.025828,-1.18657e-05,2.267e-09,-1.57909e-13,-24967.3,-11.1758], Tmin=(1287.52,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-187.12,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-(Cds-Cds)OsHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + group(Cds-OdOsH) + radical(C=CCJ(O)C)"""),
)
species(
label = 'O(T)(63)',
structure = SMILES('[O]'),
E0 = (243.034,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (15.9994,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(665.16,'J/mol'), sigma=(2.75,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,9.24385e-15,-1.3678e-17,6.66185e-21,-1.00107e-24,29230.2,4.09104], Tmin=(100,'K'), Tmax=(3459.6,'K')), NASAPolynomial(coeffs=[2.5,9.20456e-12,-3.58608e-15,6.15199e-19,-3.92042e-23,29230.2,4.09104], Tmin=(3459.6,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(243.034,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""O(T)""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'C=C[CH]OC=C(6503)',
structure = SMILES('C=C[CH]OC=C'),
E0 = (34.9912,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2995,3025,975,1000,1300,1375,400,500,1630,1680,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,370.801,371.2,371.495,371.793],'cm^-1')),
HinderedRotor(inertia=(0.268082,'amu*angstrom^2'), symmetry=1, barrier=(26.1652,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.267439,'amu*angstrom^2'), symmetry=1, barrier=(26.1658,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.268082,'amu*angstrom^2'), symmetry=1, barrier=(26.1667,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (83.1085,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.43522,0.0419787,9.71864e-06,-4.81203e-08,2.2894e-11,4313.9,21.927], Tmin=(100,'K'), Tmax=(956.054,'K')), NASAPolynomial(coeffs=[16.1489,0.0158741,-4.95219e-06,8.99655e-10,-6.74629e-14,-119.9,-56.871], Tmin=(956.054,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(34.9912,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(295.164,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-(Cds-Cds)OsHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(C=CCJ(O)C)"""),
)
species(
label = '[CH2]C=C[O](5266)',
structure = SMILES('[CH2]C=C[O]'),
E0 = (90.2929,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2995,3025,975,1000,1300,1375,400,500,1630,1680,3000,3100,440,815,1455,1000,180],'cm^-1')),
HinderedRotor(inertia=(1.57685,'amu*angstrom^2'), symmetry=1, barrier=(36.2549,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (56.0633,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.69019,0.0144913,4.15491e-05,-7.27602e-08,3.14101e-11,10920.2,13.4175], Tmin=(100,'K'), Tmax=(922.751,'K')), NASAPolynomial(coeffs=[14.044,0.00224417,1.35973e-06,-3.04875e-10,1.62832e-14,7250.86,-48.974], Tmin=(922.751,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(90.2929,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(178.761,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(Allyl_P) + radical(C=COJ)"""),
)
species(
label = '[CH2]C=CO[C]([CH2])O(13880)',
structure = SMILES('[CH2]C=CO[C]([CH2])O'),
E0 = (146.571,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,360,370,350,3615,1277.5,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.122663,0.0714593,-4.61269e-05,-7.08398e-09,1.22239e-11,17780.7,29.861], Tmin=(100,'K'), Tmax=(940.853,'K')), NASAPolynomial(coeffs=[22.7691,0.00944933,-1.90215e-06,2.94372e-10,-2.38937e-14,12002.5,-86.0753], Tmin=(940.853,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(146.571,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(Allyl_P) + radical(Cs_P) + radical(CJCO)"""),
)
species(
label = '[CH2][CH][CH]OC(C)=O(13711)',
structure = SMILES('[CH2][CH][CH]OC(C)=O'),
E0 = (111.808,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,3000,3050,390,425,1340,1360,335,370,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.21494,0.0581538,-4.43402e-05,1.7401e-08,-2.80505e-12,13550,30.5294], Tmin=(100,'K'), Tmax=(1437.1,'K')), NASAPolynomial(coeffs=[12.5612,0.0265727,-1.13767e-05,2.10931e-09,-1.44872e-13,10288.8,-28.3243], Tmin=(1437.1,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(111.808,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-(Cds-O2d)HHH) + group(Cds-OdCsOs) + radical(CCsJOC(O)) + radical(RCCJ) + radical(CCJCO)"""),
)
species(
label = '[CH2]C([O])OC=[C]C(14766)',
structure = SMILES('[CH2]C([O])OC=[C]C'),
E0 = (253.372,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.572198,0.0731758,-7.44235e-05,3.89679e-08,-8.13909e-12,30599,27.9229], Tmin=(100,'K'), Tmax=(1157.98,'K')), NASAPolynomial(coeffs=[14.9725,0.0234336,-9.99037e-06,1.87333e-09,-1.30736e-13,27263.9,-43.6629], Tmin=(1157.98,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(253.372,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CCOJ) + radical(Cds_S) + radical(CJCO)"""),
)
species(
label = '[CH2]C([O])O[C]=CC(14767)',
structure = SMILES('[CH2]C([O])O[C]=CC'),
E0 = (255.275,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.00905,0.0679662,-6.78973e-05,3.68765e-08,-8.27517e-12,30808.4,29.016], Tmin=(100,'K'), Tmax=(1059.77,'K')), NASAPolynomial(coeffs=[11.2191,0.0294291,-1.33517e-05,2.56353e-09,-1.80707e-13,28644.3,-20.8344], Tmin=(1059.77,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(255.275,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CJCO) + radical(C=CJO) + radical(CCOJ)"""),
)
species(
label = '[CH2]C=[C]OC([CH2])O(13882)',
structure = SMILES('[CH2]C=[C]OC([CH2])O'),
E0 = (181.069,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,1685,370,3010,987.5,1337.5,450,1655,3615,1277.5,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.268225,0.0728027,-6.36654e-05,2.0856e-08,-1.42733e-13,21920.2,31.0859], Tmin=(100,'K'), Tmax=(983.917,'K')), NASAPolynomial(coeffs=[19.0185,0.0155601,-5.34007e-06,9.46972e-10,-6.67773e-14,17311.5,-63.7391], Tmin=(983.917,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(181.069,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(C=CJO) + radical(CJCO) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C=[C]OC(C)[O](13713)',
structure = SMILES('[CH2]C=[C]OC(C)[O]'),
E0 = (195.185,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,1380,1390,370,380,2900,435,1685,370,3010,987.5,1337.5,450,1655,335.667,335.669,335.67,335.676],'cm^-1')),
HinderedRotor(inertia=(0.00149611,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00149611,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.312455,'amu*angstrom^2'), symmetry=1, barrier=(24.9826,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.312455,'amu*angstrom^2'), symmetry=1, barrier=(24.9826,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.795547,0.0646031,-5.46214e-05,2.35074e-08,-4.06167e-12,23595.5,29.4259], Tmin=(100,'K'), Tmax=(1378.52,'K')), NASAPolynomial(coeffs=[15.0184,0.0233326,-9.71351e-06,1.78924e-09,-1.22956e-13,19674.2,-43.7569], Tmin=(1378.52,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(195.185,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CCOJ) + radical(C=CJO) + radical(Allyl_P)"""),
)
species(
label = '[CH2][C]=COC([CH2])O(13881)',
structure = SMILES('[CH2][C]=COC([CH2])O'),
E0 = (179.166,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,1685,370,3010,987.5,1337.5,450,1655,3615,1277.5,1000,1380,1390,370,380,2900,435,200,800,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.616201,0.0830424,-8.67012e-05,4.29567e-08,-7.98602e-12,21730.7,31.6161], Tmin=(100,'K'), Tmax=(1478.21,'K')), NASAPolynomial(coeffs=[23.5091,0.00829358,-1.24465e-06,8.38728e-11,-2.5273e-15,15632.5,-90.7051], Tmin=(1478.21,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(179.166,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(Cds_S) + radical(CJCO) + radical(Allyl_P)"""),
)
species(
label = '[CH2][C]=COC(C)[O](13712)',
structure = SMILES('[CH2][C]=COC(C)[O]'),
E0 = (193.283,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2800,2850,1350,1500,750,1050,1375,1000,1380,1390,370,380,2900,435,1685,370,3010,987.5,1337.5,450,1655,421.589,421.607,421.608,421.638],'cm^-1')),
HinderedRotor(inertia=(0.134851,'amu*angstrom^2'), symmetry=1, barrier=(17.0113,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.134875,'amu*angstrom^2'), symmetry=1, barrier=(17.0114,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.134863,'amu*angstrom^2'), symmetry=1, barrier=(17.0111,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.948558,'amu*angstrom^2'), symmetry=1, barrier=(119.627,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.467111,0.0685096,-5.65876e-05,1.9878e-08,-1.63265e-12,23381.6,27.947], Tmin=(100,'K'), Tmax=(1072.15,'K')), NASAPolynomial(coeffs=[17.1054,0.0200675,-7.88678e-06,1.45493e-09,-1.02104e-13,19030.3,-57.1363], Tmin=(1072.15,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(193.283,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CCOJ) + radical(Cds_S) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C(=O)O[CH][CH]C(2373)',
structure = SMILES('[CH2]C(=O)O[CH][CH]C'),
E0 = (118.151,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,3000,3100,440,815,1455,1000,3000,3050,390,425,1340,1360,335,370,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.05771,0.0622505,-5.25325e-05,2.31575e-08,-4.17591e-12,14318.1,29.9403], Tmin=(100,'K'), Tmax=(1305.63,'K')), NASAPolynomial(coeffs=[12.7673,0.0263763,-1.13177e-05,2.1128e-09,-1.46306e-13,11260.4,-29.6745], Tmin=(1305.63,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(118.151,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-(Cds-O2d)HHH) + group(Cds-OdCsOs) + radical(CJCO) + radical(CCJCO) + radical(CCsJOC(O))"""),
)
species(
label = '[CH2][CH][O](719)',
structure = SMILES('[CH2][CH][O]'),
E0 = (361.021,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,3000,3100,440,815,1455,1000,1878.99],'cm^-1')),
HinderedRotor(inertia=(0.232981,'amu*angstrom^2'), symmetry=1, barrier=(5.35669,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (43.0446,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.03639,0.0272039,-5.17476e-05,5.40082e-08,-2.05139e-11,43449.8,12.3205], Tmin=(100,'K'), Tmax=(879.689,'K')), NASAPolynomial(coeffs=[2.12305,0.0164211,-7.89343e-06,1.47303e-09,-9.88046e-14,44188.4,19.8945], Tmin=(879.689,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(361.021,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(128.874,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + radical(CCsJOH) + radical(CJCO) + radical(CCOJ)"""),
)
species(
label = '[CH2]C([O])[O](696)',
structure = SMILES('[CH2]C([O])[O]'),
E0 = (206.197,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,1958.04,1961.92],'cm^-1')),
HinderedRotor(inertia=(0.117955,'amu*angstrom^2'), symmetry=1, barrier=(2.71202,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (59.044,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.98521,0.0307914,-6.07535e-05,7.05352e-08,-2.93746e-11,24828.1,16.2791], Tmin=(100,'K'), Tmax=(843.556,'K')), NASAPolynomial(coeffs=[-0.613396,0.0260677,-1.36113e-05,2.66003e-09,-1.84546e-13,26210.4,37.6228], Tmin=(843.556,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(206.197,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + radical(CJCO) + radical(CCOJ) + radical(CCOJ)"""),
)
species(
label = '[CH]C=C(8168)',
structure = SMILES('[CH]C=C'),
E0 = (376.808,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,192.655,193.544,193.915],'cm^-1')),
HinderedRotor(inertia=(1.88068,'amu*angstrom^2'), symmetry=1, barrier=(50.3487,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 3,
opticalIsomers = 1,
molecularWeight = (40.0639,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.32096,0.00806329,3.46645e-05,-4.52343e-08,1.64854e-11,45350.1,10.7121], Tmin=(100,'K'), Tmax=(975.253,'K')), NASAPolynomial(coeffs=[5.21066,0.0176207,-6.65616e-06,1.20944e-09,-8.49962e-14,44158.4,-2.57721], Tmin=(975.253,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(376.808,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(153.818,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH2][CH][CH]OC([CH2])=O(6733)',
structure = SMILES('[CH2][CH][CH]OC([CH2])=O'),
E0 = (323.397,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3050,390,425,1340,1360,335,370,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (98.0999,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.21376,0.0615629,-5.79898e-05,2.90132e-08,-5.95561e-12,38995.8,31.8624], Tmin=(100,'K'), Tmax=(1155.91,'K')), NASAPolynomial(coeffs=[11.5721,0.0257181,-1.14747e-05,2.18576e-09,-1.53391e-13,36601.1,-19.6115], Tmin=(1155.91,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(323.397,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(286.849,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-(Cds-O2d)HHH) + group(Cds-OdCsOs) + radical(CJCO) + radical(CCJCO) + radical(CCsJOC(O)) + radical(RCCJ)"""),
)
species(
label = '[CH2][C]=COC([CH2])[O](14446)',
structure = SMILES('[CH2][C]=COC([CH2])[O]'),
E0 = (404.872,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3010,987.5,1337.5,450,1655,1380,1390,370,380,2900,435,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,361.684,361.685,361.685,361.686],'cm^-1')),
HinderedRotor(inertia=(0.00128862,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.257862,'amu*angstrom^2'), symmetry=1, barrier=(23.9367,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.257852,'amu*angstrom^2'), symmetry=1, barrier=(23.9366,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.257853,'amu*angstrom^2'), symmetry=1, barrier=(23.9367,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (98.0999,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.312231,0.0737246,-7.63938e-05,3.90439e-08,-7.75853e-12,48833.8,29.1369], Tmin=(100,'K'), Tmax=(1234.55,'K')), NASAPolynomial(coeffs=[18.1576,0.0159045,-6.14082e-06,1.10646e-09,-7.60284e-14,44427.7,-60.7165], Tmin=(1234.55,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(404.872,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(Allyl_P) + radical(CCOJ) + radical(CJCO) + radical(Cds_S)"""),
)
species(
label = '[CH2]C=[C]OC([CH2])[O](14444)',
structure = SMILES('[CH2]C=[C]OC([CH2])[O]'),
E0 = (406.774,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,3010,987.5,1337.5,450,1655,1380,1390,370,380,2900,435,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,275.914,955.375,958.201,962.459],'cm^-1')),
HinderedRotor(inertia=(0.108252,'amu*angstrom^2'), symmetry=1, barrier=(5.67964,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.417623,'amu*angstrom^2'), symmetry=1, barrier=(19.0051,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.529444,'amu*angstrom^2'), symmetry=1, barrier=(25.7345,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.416024,'amu*angstrom^2'), symmetry=1, barrier=(19.0025,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (98.0999,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.815125,0.0678236,-6.78165e-05,3.47292e-08,-7.10159e-12,49040.2,29.9867], Tmin=(100,'K'), Tmax=(1180.27,'K')), NASAPolynomial(coeffs=[14.3518,0.0219467,-9.51138e-06,1.79576e-09,-1.25732e-13,45844.8,-37.5637], Tmin=(1180.27,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(406.774,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(Allyl_P) + radical(CCOJ) + radical(C=CJO) + radical(CJCO)"""),
)
species(
label = '[CH2]C([O])OC1[CH]C1(12058)',
structure = SMILES('[CH2]C([O])OC1[CH]C1'),
E0 = (289.766,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2883.33,3016.67,3150,900,966.667,1033.33,1100,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,300,800,800,800,800,800,800,1600,1600,1600,1600,1600,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.15566,0.0541371,-3.31902e-05,7.5488e-09,-2.53015e-15,34960.1,29.814], Tmin=(100,'K'), Tmax=(1272.35,'K')), NASAPolynomial(coeffs=[13.6507,0.0249566,-1.06971e-05,2.00275e-09,-1.3879e-13,30962.8,-36.6901], Tmin=(1272.35,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(289.766,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsOsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + ring(Cyclopropane) + radical(CJCO) + radical(CCJCO) + radical(CCOJ)"""),
)
species(
label = '[CH2]C1[CH]OC([CH2])O1(14679)',
structure = SMILES('[CH2]C1[CH]OC([CH2])O1'),
E0 = (194.31,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.640573,0.0799435,-9.49296e-05,5.39865e-08,-1.0948e-11,23556.4,26.2961], Tmin=(100,'K'), Tmax=(1504.04,'K')), NASAPolynomial(coeffs=[18.2218,0.00467628,5.17077e-06,-1.47995e-09,1.16105e-13,20721.7,-62.9639], Tmin=(1504.04,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(194.31,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(324.264,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(Cs-CsCsOsH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + ring(1,3-Dioxolane) + radical(CCsJOCs) + radical(CJCO) + radical(CJC(C)OC)"""),
)
species(
label = '[CH2]C1[CH]OC([O])C1(14768)',
structure = SMILES('[CH2]C1[CH]OC([O])C1'),
E0 = (179.629,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.72997,0.0398491,1.5466e-05,-5.40631e-08,2.70102e-11,21695.7,22.1897], Tmin=(100,'K'), Tmax=(866.457,'K')), NASAPolynomial(coeffs=[11.9439,0.0220722,-4.61331e-06,5.14547e-10,-2.6949e-14,18823.1,-31.9854], Tmin=(866.457,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(179.629,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(328.422,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsCsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + ring(Tetrahydrofuran) + radical(CCOJ) + radical(Isobutyl) + radical(CCsJOCs)"""),
)
species(
label = 'C=C[CH]OC(=C)O(13875)',
structure = SMILES('C=C[CH]OC(=C)O'),
E0 = (-122.146,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2950,3000,3050,3100,1330,1430,900,1050,1000,1050,1600,1700,3010,987.5,1337.5,450,1655,3615,1277.5,1000,350,440,435,1725,267.891,267.892,267.896,267.899],'cm^-1')),
HinderedRotor(inertia=(0.00234882,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.475001,'amu*angstrom^2'), symmetry=1, barrier=(24.1908,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.475002,'amu*angstrom^2'), symmetry=1, barrier=(24.1907,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.475025,'amu*angstrom^2'), symmetry=1, barrier=(24.1907,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.486889,0.0683947,-5.60492e-05,1.75959e-08,-1.71501e-13,-14556.4,25.1179], Tmin=(100,'K'), Tmax=(1012.33,'K')), NASAPolynomial(coeffs=[17.1856,0.0188209,-6.90594e-06,1.24305e-09,-8.69112e-14,-18778.1,-59.8009], Tmin=(1012.33,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-122.146,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-(Cds-Cd)H) + group(Cs-(Cds-Cds)OsHH) + group(Cds-CdsCsH) + group(Cds-CdsCsCs) + group(Cds-CdsHH) + group(Cds-CdsHH) + radical(C=CCJ(O)C)"""),
)
species(
label = 'C=C[CH]OC(C)=O(12663)',
structure = SMILES('C=C[CH]OC(C)=O'),
E0 = (-242.989,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,3025,407.5,1350,352.5,200,800,933.333,1066.67,1200,1333.33,1466.67,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.83242,0.0429711,-1.36453e-05,-3.11715e-09,1.76562e-12,-29143.1,25.6846], Tmin=(100,'K'), Tmax=(1451.82,'K')), NASAPolynomial(coeffs=[10.3014,0.0314055,-1.38542e-05,2.56181e-09,-1.73661e-13,-32842.4,-22.6021], Tmin=(1451.82,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-242.989,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-(Cds-Cds)OsHH) + group(Cs-(Cds-O2d)HHH) + group(Cds-CdsCsH) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(C=CCJ(O)C)"""),
)
species(
label = '[CH2]C(=O)OC=CC(14769)',
structure = SMILES('[CH2]C(=O)OC=CC'),
E0 = (-174.505,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.654622,0.0639404,-5.28859e-05,2.2034e-08,-3.64334e-12,-20859.7,26.5912], Tmin=(100,'K'), Tmax=(1452.89,'K')), NASAPolynomial(coeffs=[16.4635,0.0204154,-7.94858e-06,1.41387e-09,-9.51311e-14,-25453.3,-55.5828], Tmin=(1452.89,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-174.505,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-(Cds-O2d)(Cds-Cd)) + group(Cs-(Cds-Cds)HHH) + group(Cs-(Cds-O2d)HHH) + group(Cds-CdsCsH) + group(Cds-OdCsOs) + group(Cds-CdsOsH) + radical(CJCO)"""),
)
species(
label = '[CH2]C(O)OC=C=C(13876)',
structure = SMILES('[CH2]C(O)OC=C=C'),
E0 = (-33.5702,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3615,1277.5,1000,540,610,2055,2950,3100,1380,975,1025,1650,3000,3100,440,815,1455,1000,3010,987.5,1337.5,450,1655,1380,1390,370,380,2900,435,180,180,180],'cm^-1')),
HinderedRotor(inertia=(0.92561,'amu*angstrom^2'), symmetry=1, barrier=(21.2816,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.925306,'amu*angstrom^2'), symmetry=1, barrier=(21.2746,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.925681,'amu*angstrom^2'), symmetry=1, barrier=(21.2832,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.925806,'amu*angstrom^2'), symmetry=1, barrier=(21.2861,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[-0.659109,0.0818627,-8.36622e-05,4.04401e-08,-7.3093e-12,-3852.17,30.0751], Tmin=(100,'K'), Tmax=(1538.27,'K')), NASAPolynomial(coeffs=[23.613,0.00791995,-1.00102e-06,4.00513e-11,1.63196e-16,-10038.5,-93.3129], Tmin=(1538.27,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-33.5702,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cdd-CdsCds) + radical(CJCO)"""),
)
species(
label = 'C=C=COC(C)[O](13704)',
structure = SMILES('C=C=COC(C)[O]'),
E0 = (-19.4542,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,540,610,2055,3010,987.5,1337.5,450,1655,198.791,201.392,201.532,203.532],'cm^-1')),
HinderedRotor(inertia=(0.767291,'amu*angstrom^2'), symmetry=1, barrier=(21.3284,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.726889,'amu*angstrom^2'), symmetry=1, barrier=(21.3142,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.746902,'amu*angstrom^2'), symmetry=1, barrier=(21.3235,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.56137,0.0658027,-4.86037e-05,1.14738e-08,1.34223e-12,-2207.48,25.9075], Tmin=(100,'K'), Tmax=(1042.71,'K')), NASAPolynomial(coeffs=[17.037,0.02004,-7.86037e-06,1.46496e-09,-1.04017e-13,-6591.42,-58.813], Tmin=(1042.71,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-19.4542,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + group(Cdd-CdsCds) + radical(CCOJ)"""),
)
species(
label = '[CH2][CH]CO[C]([CH2])[O](2383)',
structure = SMILES('[CH2][CH]CO[C]([CH2])[O]'),
E0 = (550.305,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,360,370,350,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3025,407.5,1350,352.5,211.509,829.515,1178.27,1554.05,1957.14],'cm^-1')),
HinderedRotor(inertia=(0.113644,'amu*angstrom^2'), symmetry=1, barrier=(3.18827,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.113644,'amu*angstrom^2'), symmetry=1, barrier=(3.18827,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.113644,'amu*angstrom^2'), symmetry=1, barrier=(3.18827,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.113644,'amu*angstrom^2'), symmetry=1, barrier=(3.18827,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.113644,'amu*angstrom^2'), symmetry=1, barrier=(3.18827,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(3800.62,'J/mol'), sigma=(6.68442,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=593.65 K, Pc=28.87 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.879862,0.076051,-0.000110871,9.89353e-08,-3.59728e-11,66291.6,34.0529], Tmin=(100,'K'), Tmax=(793.721,'K')), NASAPolynomial(coeffs=[5.97549,0.0389327,-1.91066e-05,3.7034e-09,-2.58547e-13,65843,12.9164], Tmin=(793.721,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(550.305,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(CCJCO) + radical(CCOJ) + radical(RCCJ) + radical(CJCO) + radical(Cs_P)"""),
)
species(
label = '[CH2]C[CH]O[C]([CH2])[O](6734)',
structure = SMILES('[CH2]C[CH]O[C]([CH2])[O]'),
E0 = (530.859,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2850,1437.5,1250,1305,750,350,360,370,350,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,3025,407.5,1350,352.5,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 6,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.360017,0.0885477,-0.000139438,1.22732e-07,-4.26708e-11,63970.5,32.8686], Tmin=(100,'K'), Tmax=(826.94,'K')), NASAPolynomial(coeffs=[8.34678,0.0355725,-1.733e-05,3.31612e-09,-2.28548e-13,63139.9,-1.18025], Tmin=(826.94,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(530.859,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-CsHHH) + radical(RCCJ) + radical(CCOJ) + radical(Cs_P) + radical(CJCO) + radical(CCsJOCs)"""),
)
species(
label = '[CH2]C=COC1CO1(6594)',
structure = SMILES('[CH2]C=COC1CO1'),
E0 = (-80.7007,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.816098,0.049352,2.61097e-05,-9.12505e-08,4.63827e-11,-9571.77,22.4043], Tmin=(100,'K'), Tmax=(884.124,'K')), NASAPolynomial(coeffs=[23.419,0.00465321,4.28543e-06,-1.15428e-09,8.37417e-14,-15818.3,-96.5807], Tmin=(884.124,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-80.7007,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(320.107,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-Cs(Cds-Cd)) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + ring(Ethylene_oxide) + radical(Allyl_P)"""),
)
species(
label = '[CH2]C1OC=CCO1(14722)',
structure = SMILES('[CH2]C1OC=CCO1'),
E0 = (-110.249,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.40996,0.0307789,7.35608e-05,-1.35027e-07,5.96454e-11,-13142.2,19.0172], Tmin=(100,'K'), Tmax=(910.323,'K')), NASAPolynomial(coeffs=[22.9455,0.00476339,3.37117e-06,-8.28296e-10,5.24612e-14,-19906,-98.4706], Tmin=(910.323,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-110.249,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(328.422,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-Cs(Cds-Cd)) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + ring(24dihydro13dioxin) + radical(CJCO)"""),
)
species(
label = '[O]C1CCC=CO1(14770)',
structure = SMILES('[O]C1CCC=CO1'),
E0 = (-128.912,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.97555,0.0255627,6.22009e-05,-1.01353e-07,4.15606e-11,-15414.3,18.7861], Tmin=(100,'K'), Tmax=(940.553,'K')), NASAPolynomial(coeffs=[14.6001,0.019935,-5.47364e-06,9.44085e-10,-7.09505e-14,-19915,-52.6474], Tmin=(940.553,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-128.912,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(332.579,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-(Cds-Cds)CsHH) + group(Cs-CsOsOsH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + ring(3,4-Dihydro-2H-pyran) + radical(CCOJ)"""),
)
species(
label = '[CH2]C([O])C([CH2])C=O(12644)',
structure = SMILES('[CH2]C([O])C([CH2])C=O'),
E0 = (223.346,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2782.5,750,1395,475,1775,1000,1380,1383.33,1386.67,1390,370,373.333,376.667,380,2800,3000,430,440,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,237.377,2887.88],'cm^-1')),
HinderedRotor(inertia=(0.31931,'amu*angstrom^2'), symmetry=1, barrier=(12.7646,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00299155,'amu*angstrom^2'), symmetry=1, barrier=(0.119627,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.319235,'amu*angstrom^2'), symmetry=1, barrier=(12.7648,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.788709,'amu*angstrom^2'), symmetry=1, barrier=(31.5423,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(4030.69,'J/mol'), sigma=(6.74566,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with Tc=629.58 K, Pc=29.8 bar (from Joback method)"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.624896,0.0788821,-0.000101868,7.41622e-08,-2.20348e-11,26979.6,29.0184], Tmin=(100,'K'), Tmax=(817.816,'K')), NASAPolynomial(coeffs=[10.399,0.0310773,-1.41882e-05,2.68947e-09,-1.86674e-13,25380.9,-16.1705], Tmin=(817.816,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(223.346,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsH) + group(Cs-(Cds-O2d)CsCsH) + group(Cs-CsCsOsH) + group(Cs-CsHHH) + group(Cs-CsHHH) + group(Cds-OdCsH) + radical(CJCO) + radical(CC(C)OJ) + radical(CJC(C)C=O)"""),
)
species(
label = '[CH2][CH]OC=C[CH2](6363)',
structure = SMILES('[CH2][CH]OC=C[CH2]'),
E0 = (335.483,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3025,407.5,1350,352.5,2995,3025,975,1000,1300,1375,400,500,1630,1680,3000,3033.33,3066.67,3100,415,465,780,850,1435,1475,900,1100,180,180,180],'cm^-1')),
HinderedRotor(inertia=(0.981069,'amu*angstrom^2'), symmetry=1, barrier=(22.5567,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.981067,'amu*angstrom^2'), symmetry=1, barrier=(22.5567,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.981059,'amu*angstrom^2'), symmetry=1, barrier=(22.5565,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.981065,'amu*angstrom^2'), symmetry=1, barrier=(22.5566,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (83.1085,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.374208,0.0653949,-3.60498e-05,-1.81815e-08,1.69684e-11,40493.2,24.5856], Tmin=(100,'K'), Tmax=(920.64,'K')), NASAPolynomial(coeffs=[22.8909,0.00525337,5.31957e-07,-2.04763e-10,1.18042e-14,34750,-90.8571], Tmin=(920.64,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(335.483,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CCsJOC(O)) + radical(Allyl_P) + radical(CJCO)"""),
)
species(
label = '[CH2][CH][CH]OC=O(6547)',
structure = SMILES('[CH2][CH][CH]OC=O'),
E0 = (168.429,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3050,390,425,1340,1360,335,370,3000,3100,440,815,1455,1000,2782.5,750,1395,475,1775,1000,250.409,1067.4,1067.5],'cm^-1')),
HinderedRotor(inertia=(0.00524154,'amu*angstrom^2'), symmetry=1, barrier=(4.23753,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.0052383,'amu*angstrom^2'), symmetry=1, barrier=(4.23745,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.590347,'amu*angstrom^2'), symmetry=1, barrier=(26.263,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.590471,'amu*angstrom^2'), symmetry=1, barrier=(26.2629,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (85.0813,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.5531,0.04703,-3.6011e-05,1.22399e-08,-1.21273e-12,20351.1,27.0842], Tmin=(100,'K'), Tmax=(1165.4,'K')), NASAPolynomial(coeffs=[12.7055,0.0165408,-6.79333e-06,1.26092e-09,-8.78008e-14,17222.7,-30.6957], Tmin=(1165.4,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(168.429,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(241.12,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-CsCsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + group(Cds-OdOsH) + radical(CCsJOC(O)H) + radical(CCJCO) + radical(RCCJ)"""),
)
species(
label = '[CH]=COC([CH2])[O](4648)',
structure = SMILES('[CH]=COC([CH2])[O]'),
E0 = (298.652,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3120,650,792.5,1650,3010,987.5,1337.5,450,1655,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,373.66,375.843,376.452],'cm^-1')),
HinderedRotor(inertia=(0.193374,'amu*angstrom^2'), symmetry=1, barrier=(19.3668,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.194489,'amu*angstrom^2'), symmetry=1, barrier=(19.3527,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.196222,'amu*angstrom^2'), symmetry=1, barrier=(19.3407,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (85.0813,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.979736,0.0588882,-5.41492e-05,1.9832e-08,-1.24517e-12,36035,24.5814], Tmin=(100,'K'), Tmax=(1000.58,'K')), NASAPolynomial(coeffs=[16.534,0.0110573,-3.95674e-06,7.22918e-10,-5.18627e-14,32204,-54.0573], Tmin=(1000.58,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(298.652,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(245.277,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cds-CdsOsH) + group(Cds-CdsHH) + radical(CJCO) + radical(CCOJ) + radical(Cds_P)"""),
)
species(
label = '[CH]C([O])OC=C[CH2](14771)',
structure = SMILES('[CH]C([O])OC=C[CH2]'),
E0 = (403.656,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,2995,3025,975,1000,1300,1375,400,500,1630,1680,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (98.0999,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.496166,0.0664275,-4.96174e-05,8.39362e-09,3.6094e-12,48684.2,28.3712], Tmin=(100,'K'), Tmax=(997.504,'K')), NASAPolynomial(coeffs=[18.7271,0.0152563,-5.65372e-06,1.05631e-09,-7.67575e-14,43955.8,-65.0073], Tmin=(997.504,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(403.656,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CCJ2_triplet) + radical(CCOJ) + radical(Allyl_P)"""),
)
species(
label = '[CH]C=COC([CH2])[O](14772)',
structure = SMILES('[CH]C=COC([CH2])[O]'),
E0 = (386.215,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,1380,1390,370,380,2900,435,2995,3025,975,1000,1300,1375,400,500,1630,1680,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (98.0999,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.175571,0.0734367,-6.58772e-05,2.97381e-08,-5.29333e-12,46597.6,29.9889], Tmin=(100,'K'), Tmax=(1363.15,'K')), NASAPolynomial(coeffs=[18.1352,0.0207364,-7.886e-06,1.37676e-09,-9.18771e-14,41701.3,-62.2196], Tmin=(1363.15,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(386.215,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(291.007,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CJCO) + radical(CCOJ) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH2]C([O])OC[C]=C(14773)',
structure = SMILES('[CH2]C([O])OC[C]=C'),
E0 = (311.091,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1685,370,2750,2850,1437.5,1250,1305,750,350,2950,3100,1380,975,1025,1650,1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.978458,0.0753823,-0.000114422,1.07287e-07,-4.02056e-11,37515.7,30.058], Tmin=(100,'K'), Tmax=(811.442,'K')), NASAPolynomial(coeffs=[4.22253,0.0420569,-2.07759e-05,4.0235e-09,-2.8009e-13,37559.9,18.6014], Tmin=(811.442,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(311.091,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(CJCO) + radical(CCOJ) + radical(Cds_S)"""),
)
species(
label = '[CH2][C]([O])OCC=C(2374)',
structure = SMILES('[CH2][C]([O])OCC=C'),
E0 = (278.496,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2850,1437.5,1250,1305,750,350,2950,3100,1380,975,1025,1650,3010,987.5,1337.5,450,1655,360,370,350,200,800,1066.67,1333.33,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.08282,0.0710946,-9.85501e-05,8.82697e-08,-3.28719e-11,33593.6,29.8991], Tmin=(100,'K'), Tmax=(776.972,'K')), NASAPolynomial(coeffs=[5.02484,0.0405831,-1.99202e-05,3.87783e-09,-2.72026e-13,33289.5,13.8606], Tmin=(776.972,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(278.496,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(CJCO) + radical(Cs_P) + radical(CCOJ)"""),
)
species(
label = '[CH]=CCOC([CH2])[O](14774)',
structure = SMILES('[CH]=CCOC([CH2])[O]'),
E0 = (320.345,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2750,2850,1437.5,1250,1305,750,350,1380,1390,370,380,2900,435,3120,650,792.5,1650,3010,987.5,1337.5,450,1655,304.7,307.307,307.351,314.699],'cm^-1')),
HinderedRotor(inertia=(0.00294434,'amu*angstrom^2'), symmetry=1, barrier=(6.90859,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.106754,'amu*angstrom^2'), symmetry=1, barrier=(6.89096,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.00290334,'amu*angstrom^2'), symmetry=1, barrier=(6.86965,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.425213,'amu*angstrom^2'), symmetry=1, barrier=(28.6042,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.961142,0.0740055,-0.000105143,9.37954e-08,-3.4444e-11,38631.1,30.0512], Tmin=(100,'K'), Tmax=(784.791,'K')), NASAPolynomial(coeffs=[5.56673,0.0398799,-1.95585e-05,3.7988e-09,-2.65859e-13,38236.3,11.0378], Tmin=(784.791,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(320.345,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-(Cds-Cds)OsHH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + radical(CCOJ) + radical(Cds_P) + radical(CJCO)"""),
)
species(
label = '[CH]C=COC([CH2])O(13888)',
structure = SMILES('[CH]C=COC([CH2])O'),
E0 = (160.51,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([3000,3100,440,815,1455,1000,2995,3025,975,1000,1300,1375,400,500,1630,1680,3615,1277.5,1000,1380,1390,370,380,2900,435,200,800,1000,1200,1400,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.0800845,0.0732131,-4.4098e-05,-6.1937e-09,1.07408e-11,19457.8,29.4608], Tmin=(100,'K'), Tmax=(953.249,'K')), NASAPolynomial(coeffs=[20.6461,0.017853,-5.66945e-06,9.78589e-10,-6.96117e-14,14131.2,-76.1469], Tmin=(953.249,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(160.51,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(311.793,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CJCO) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH]C=COC(C)[O](13718)',
structure = SMILES('[CH]C=COC(C)[O]'),
E0 = (174.626,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2750,2800,2850,1350,1500,750,1050,1375,1000,1380,1390,370,380,2900,435,2995,3025,975,1000,1300,1375,400,500,1630,1680,200,800,960,1120,1280,1440,1600],'cm^-1')),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.156089,'amu*angstrom^2'), symmetry=1, barrier=(3.5888,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 4,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[0.51617,0.0661435,-3.93276e-05,2.57275e-09,3.92875e-12,21137.1,28.1253], Tmin=(100,'K'), Tmax=(1046.47,'K')), NASAPolynomial(coeffs=[15.8307,0.0269007,-1.07349e-05,1.97711e-09,-1.38314e-13,16875.4,-51.5018], Tmin=(1046.47,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(174.626,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-Cd)) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-(Cds-Cds)HHH) + group(Cds-CdsCsH) + group(Cds-CdsOsH) + radical(CCOJ) + radical(AllylJ2_triplet)"""),
)
species(
label = '[CH2]C1O[CH][CH]CO1(14726)',
structure = SMILES('[CH2]C1O[CH][CH]CO1'),
E0 = (183.754,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.52756,0.0371434,4.44963e-05,-1.01943e-07,4.9584e-11,22206.2,21.3823], Tmin=(100,'K'), Tmax=(855.761,'K')), NASAPolynomial(coeffs=[17.9576,0.0105652,3.05821e-06,-1.08665e-09,8.68723e-14,17555.4,-66.0676], Tmin=(855.761,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(183.754,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(328.422,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + group(Cs-CsOsHH) + group(Cs-CsHHH) + ring(1,3-Dioxane) + radical(CCsJOCs) + radical(CJCO) + radical(CCJCO)"""),
)
species(
label = '[O]C1CC[CH][CH]O1(14775)',
structure = SMILES('[O]C1CC[CH][CH]O1'),
E0 = (161.67,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.8747,0.0340844,3.22358e-05,-6.78274e-08,3.0024e-11,19532.7,22.4052], Tmin=(100,'K'), Tmax=(907.223,'K')), NASAPolynomial(coeffs=[11.6615,0.0241867,-6.37946e-06,9.50597e-10,-6.2256e-14,16388.5,-31.3994], Tmin=(907.223,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(161.67,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(332.579,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cs-CsOsHH) + ring(Oxane) + radical(CCJCO) + radical(CCsJOCs) + radical(CCOJ)"""),
)
species(
label = '[CH2]C(=O)OCC=C(6109)',
structure = SMILES('[CH2]C(=O)OCC=C'),
E0 = (-142.339,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.43117,0.0510405,-3.06177e-05,8.76767e-09,-1.00368e-12,-17022.6,27.5112], Tmin=(100,'K'), Tmax=(1980.67,'K')), NASAPolynomial(coeffs=[15.1892,0.0232561,-9.57623e-06,1.68545e-09,-1.09769e-13,-22472.6,-48.2662], Tmin=(1980.67,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-142.339,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(315.95,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-Cs(Cds-O2d)) + group(Cs-(Cds-Cds)OsHH) + group(Cs-(Cds-O2d)HHH) + group(Cds-CdsCsH) + group(Cds-OdCsOs) + group(Cds-CdsHH) + radical(CJCO)"""),
)
species(
label = '[CH2]C1OC(C=C)O1(12658)',
structure = SMILES('[CH2]C1OC(C=C)O1'),
E0 = (-10.6155,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.35377,0.0455231,9.02785e-09,-2.99542e-08,1.33839e-11,-1170.6,24.3426], Tmin=(100,'K'), Tmax=(1057.31,'K')), NASAPolynomial(coeffs=[14.1266,0.0252917,-1.11403e-05,2.20318e-09,-1.61046e-13,-5441.7,-45.4158], Tmin=(1057.31,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-10.6155,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(324.264,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsCs) + group(Cs-CsOsOsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + ring(Cyclobutane) + radical(CJCO)"""),
)
species(
label = 'C=CC1CC([O])O1(12647)',
structure = SMILES('C=CC1CC([O])O1'),
E0 = (-3.60279,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (99.1079,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.91685,0.0334418,3.01206e-05,-6.31108e-08,2.7444e-11,-346.892,23.8039], Tmin=(100,'K'), Tmax=(922.717,'K')), NASAPolynomial(coeffs=[11.2837,0.0246091,-7.17158e-06,1.15092e-09,-7.7891e-14,-3428.06,-27.9616], Tmin=(922.717,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(-3.60279,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(328.422,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-(Cds-Cds)CsOsH) + group(Cs-CsCsHH) + group(Cs-CsOsOsH) + group(Cds-CdsCsH) + group(Cds-CdsHH) + ring(Oxetane) + radical(CCOJ)"""),
)
species(
label = '[CH]OC([CH2])[O](1022)',
structure = SMILES('[CH]OC([CH2])[O]'),
E0 = (462.226,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([1380,1390,370,380,2900,435,3000,3100,440,815,1455,1000,180,180,1120.97,1123.08,1124.4,3203.45],'cm^-1')),
HinderedRotor(inertia=(0.140235,'amu*angstrom^2'), symmetry=1, barrier=(3.22428,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.140732,'amu*angstrom^2'), symmetry=1, barrier=(3.23572,'kJ/mol'), semiclassical=False),
HinderedRotor(inertia=(0.141736,'amu*angstrom^2'), symmetry=1, barrier=(3.25879,'kJ/mol'), semiclassical=False),
],
spinMultiplicity = 5,
opticalIsomers = 1,
molecularWeight = (72.0627,'amu'),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[1.89406,0.0535552,-9.25983e-05,8.89474e-08,-3.30686e-11,55661.8,21.1964], Tmin=(100,'K'), Tmax=(822.987,'K')), NASAPolynomial(coeffs=[4.79137,0.0246598,-1.29331e-05,2.5429e-09,-1.77544e-13,55686.6,10.8309], Tmin=(822.987,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(462.226,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(195.39,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(O2s-CsCs) + group(O2s-CsH) + group(Cs-CsOsOsH) + group(Cs-CsHHH) + group(Cs-OsHHH) + radical(CCOJ) + radical(CH2_triplet) + radical(CJCO)"""),
)
species(
label = '[CH]=C(64)',
structure = SMILES('[CH]=C'),
E0 = (289.245,'kJ/mol'),
modes = [
HarmonicOscillator(frequencies=([2950,3100,1380,975,1025,1650,826.012,826.012,3240.27],'cm^-1')),
],
spinMultiplicity = 2,
opticalIsomers = 1,
molecularWeight = (27.0452,'amu'),
collisionModel = TransportData(shapeIndex=2, epsilon=(1737.73,'J/mol'), sigma=(4.1,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=1.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.90671,-0.00406241,3.8678e-05,-4.62976e-08,1.729e-11,34797.2,6.09789], Tmin=(100,'K'), Tmax=(931.962,'K')), NASAPolynomial(coeffs=[5.44797,0.00498356,-1.08821e-06,1.79837e-10,-1.45096e-14,33829.8,-4.87808], Tmin=(931.962,'K'), Tmax=(5000,'K'))], Tmin=(100,'K'), Tmax=(5000,'K'), E0=(289.245,'kJ/mol'), Cp0=(33.2579,'J/(mol*K)'), CpInf=(108.088,'J/(mol*K)'), comment="""Thermo group additivity estimation: group(Cds-CdsHH) + group(Cds-CdsHH) + radical(Cds_P)"""),
)
species(
label = 'N2',
structure = SMILES('N#N'),
E0 = (-8.64289,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (28.0135,'amu'),
collisionModel = TransportData(shapeIndex=1, epsilon=(810.913,'J/mol'), sigma=(3.621,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(1.76,'angstroms^3'), rotrelaxcollnum=4.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[3.53101,-0.000123661,-5.02999e-07,2.43531e-09,-1.40881e-12,-1046.98,2.96747], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.95258,0.0013969,-4.92632e-07,7.8601e-11,-4.60755e-15,-923.949,5.87189], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-8.64289,'kJ/mol'), Cp0=(29.1007,'J/(mol*K)'), CpInf=(37.4151,'J/(mol*K)'), label="""N2""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'Ne',
structure = SMILES('[Ne]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (20.1797,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1235.53,'J/mol'), sigma=(3.758e-10,'m'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0, comment="""Epsilon & sigma estimated with fixed Lennard Jones Parameters. This is the fallback method! Try improving transport databases!"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,3.35532], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ne""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'He',
structure = SMILES('[He]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (4.0026,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(84.8076,'J/mol'), sigma=(2.576,'angstroms'), dipoleMoment=(0,'De'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""NOx2018"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,0.928724], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,0.928724], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""He""", comment="""Thermo library: primaryThermoLibrary"""),
)
species(
label = 'Ar',
structure = SMILES('[Ar]'),
E0 = (-6.19738,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
molecularWeight = (39.348,'amu'),
collisionModel = TransportData(shapeIndex=0, epsilon=(1134.93,'J/mol'), sigma=(3.33,'angstroms'), dipoleMoment=(0,'C*m'), polarizability=(0,'angstroms^3'), rotrelaxcollnum=0.0, comment="""GRI-Mech"""),
energyTransferModel = SingleExponentialDown(alpha0=(3.5886,'kJ/mol'), T0=(300,'K'), n=0.85),
thermo = NASA(polynomials=[NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(200,'K'), Tmax=(1000,'K')), NASAPolynomial(coeffs=[2.5,0,0,0,0,-745.375,4.37967], Tmin=(1000,'K'), Tmax=(6000,'K'))], Tmin=(200,'K'), Tmax=(6000,'K'), E0=(-6.19738,'kJ/mol'), Cp0=(20.7862,'J/(mol*K)'), CpInf=(20.7862,'J/(mol*K)'), label="""Ar""", comment="""Thermo library: primaryThermoLibrary"""),
)
transitionState(
label = 'TS1',
E0 = (167.03,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS2',
E0 = (289.073,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS3',
E0 = (274.1,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS4',
E0 = (201.973,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS5',
E0 = (415.739,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS6',
E0 = (250.337,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS7',
E0 = (278.025,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS8',
E0 = (167.03,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS9',
E0 = (281.002,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS10',
E0 = (308.518,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS11',
E0 = (374.761,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS12',
E0 = (422.254,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS13',
E0 = (225.377,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS14',
E0 = (288.185,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS15',
E0 = (223.464,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS16',
E0 = (233.182,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS17',
E0 = (255.312,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS18',
E0 = (451.314,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS19',
E0 = (598.425,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS20',
E0 = (535.202,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS21',
E0 = (616.676,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS22',
E0 = (619.037,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS23',
E0 = (392.966,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS24',
E0 = (224.382,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS25',
E0 = (241.313,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS26',
E0 = (189.891,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS27',
E0 = (189.891,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS28',
E0 = (192.003,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS29',
E0 = (192.003,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS30',
E0 = (192.003,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS31',
E0 = (613.705,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS32',
E0 = (555.832,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS33',
E0 = (172.424,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS34',
E0 = (174.561,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS35',
E0 = (175.23,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS36',
E0 = (480.83,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS37',
E0 = (742.364,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS38',
E0 = (584.114,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS39',
E0 = (714.338,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS40',
E0 = (615.461,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS41',
E0 = (598.02,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS42',
E0 = (300.602,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS43',
E0 = (450.446,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS44',
E0 = (437.906,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS45',
E0 = (465.53,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS46',
E0 = (295.751,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS47',
E0 = (306.903,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS48',
E0 = (227.916,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS49',
E0 = (254.507,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS50',
E0 = (255.998,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS51',
E0 = (174.938,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS52',
E0 = (174.938,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
transitionState(
label = 'TS53',
E0 = (785.788,'kJ/mol'),
spinMultiplicity = 1,
opticalIsomers = 1,
)
reaction(
label = 'reaction1',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['C=C[O](594)', 'C=CC=O(5269)'],
transitionState = 'TS1',
kinetics = Arrhenius(A=(5e+12,'s^-1'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Exact match found for rate rule [RJJ]
Euclidian distance = 0
family: 1,4_Linear_birad_scission"""),
)
reaction(
label = 'reaction2',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2][CH]C1OC([CH2])O1(14763)'],
transitionState = 'TS2',
kinetics = Arrhenius(A=(2.724e+10,'s^-1','*|/',3), n=0.478, Ea=(122.043,'kJ/mol'), T0=(1,'K'), Tmin=(600,'K'), Tmax=(2000,'K'), comment="""Estimated using an average for rate rule [R5_SS_D;doublebond_intra;radadd_intra_O]
Euclidian distance = 0
family: Intra_R_Add_Exocyclic"""),
)
reaction(
label = 'reaction3',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2][CH]C1CC([O])O1(14764)'],
transitionState = 'TS3',
kinetics = Arrhenius(A=(177207,'s^-1'), n=1.88643, Ea=(107.07,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5_SS;multiplebond_intra;radadd_intra_cs2H] for rate rule [R5_SS_D;doublebond_intra;radadd_intra_cs2H]
Euclidian distance = 1.41421356237
family: Intra_R_Add_Exocyclic
Ea raised from 103.0 to 107.1 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction4',
reactants = ['H(8)', '[CH2]C(=O)O[CH]C=C(12761)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS4',
kinetics = Arrhenius(A=(92.1383,'m^3/(mol*s)'), n=1.68375, Ea=(21.5685,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [CO_O;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction5',
reactants = ['H(8)', '[CH2]C([O])OC=C=C(14765)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS5',
kinetics = Arrhenius(A=(4.42e+08,'cm^3/(mol*s)'), n=1.64, Ea=(11.7989,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 2713 used for Ca_Cds-HH;HJ
Exact match found for rate rule [Ca_Cds-HH;HJ]
Euclidian distance = 0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction6',
reactants = ['CH2(T)(28)', 'C=C[CH]OC=O(6118)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS6',
kinetics = Arrhenius(A=(0.0201871,'m^3/(mol*s)'), n=2.2105, Ea=(56.0866,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [CO-NdH_O;YJ] for rate rule [CO-NdH_O;CH2_triplet]
Euclidian distance = 2.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction7',
reactants = ['O(T)(63)', 'C=C[CH]OC=C(6503)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS7',
kinetics = Arrhenius(A=(53.4257,'m^3/(mol*s)'), n=1.6025, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Cds_Cds;O_atom_triplet] for rate rule [Cds-OsH_Cds;O_atom_triplet]
Euclidian distance = 1.0
family: R_Addition_MultipleBond
Ea raised from -5.8 to 0 kJ/mol."""),
)
reaction(
label = 'reaction8',
reactants = ['C=C[O](594)', '[CH2]C=C[O](5266)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS8',
kinetics = Arrhenius(A=(1.3e+11,'cm^3/(mol*s)'), n=0, Ea=(101.918,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using an average for rate rule [CO_O;O_rad/OneDe]
Euclidian distance = 0
family: R_Addition_MultipleBond
Ea raised from 99.5 to 101.9 kJ/mol to match endothermicity of reaction."""),
)
reaction(
label = 'reaction9',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C=CO[C]([CH2])O(13880)'],
transitionState = 'TS9',
kinetics = Arrhenius(A=(2.15e+14,'s^-1','+|-',2), n=-0.27, Ea=(113.972,'kJ/mol'), T0=(1,'K'), Tmin=(700,'K'), Tmax=(1800,'K'), comment="""Estimated using an average for rate rule [R2H_S;O_rad_out;XH_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction10',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2][CH][CH]OC(C)=O(13711)'],
transitionState = 'TS10',
kinetics = Arrhenius(A=(17481.2,'s^-1'), n=2.56136, Ea=(141.488,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R2H_S;C_rad_out_2H;XH_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction11',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C([O])OC=[C]C(14766)'],
transitionState = 'TS11',
kinetics = Arrhenius(A=(1.63e+08,'s^-1'), n=1.73, Ea=(207.731,'kJ/mol'), T0=(1,'K'), comment="""From training reaction 123 used for R2H_S;C_rad_out_2H;Cd_H_out_doubleC
Exact match found for rate rule [R2H_S;C_rad_out_2H;Cd_H_out_doubleC]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction12',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C([O])O[C]=CC(14767)'],
transitionState = 'TS12',
kinetics = Arrhenius(A=(1.91e+11,'s^-1'), n=0.63, Ea=(255.224,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""From training reaction 199 used for R3H_SD;C_rad_out_2H;Cd_H_out_singleNd
Exact match found for rate rule [R3H_SD;C_rad_out_2H;Cd_H_out_singleNd]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction13',
reactants = ['[CH2]C=[C]OC([CH2])O(13882)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS13',
kinetics = Arrhenius(A=(37100,'s^-1'), n=2.23, Ea=(44.3086,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4H_RSS;Cd_rad_out;XH_out] for rate rule [R4H_SSS_OCs;Cd_rad_out_Cd;O_H_out]
Euclidian distance = 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction14',
reactants = ['[CH2]C=[C]OC(C)[O](13713)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS14',
kinetics = Arrhenius(A=(2.74832e+07,'s^-1'), n=1.435, Ea=(93,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R4H_SSS_OCs;Y_rad_out;Cs_H_out_2H] + [R4H_RSS;Cd_rad_out;Cs_H_out] for rate rule [R4H_SSS_OCs;Cd_rad_out_Cd;Cs_H_out_2H]
Euclidian distance = 3.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction15',
reactants = ['[CH2][C]=COC([CH2])O(13881)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS15',
kinetics = Arrhenius(A=(380071,'s^-1'), n=1.62386, Ea=(44.2978,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_RSSR;Y_rad_out;XH_out] for rate rule [R5H_DSSS;Cd_rad_out;O_H_out]
Euclidian distance = 2.44948974278
family: intra_H_migration"""),
)
reaction(
label = 'reaction16',
reactants = ['[CH2][C]=COC(C)[O](13712)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS16',
kinetics = Arrhenius(A=(263079,'s^-1'), n=1.73643, Ea=(39.8993,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5H_RSSR;Y_rad_out;Cs_H_out_2H] for rate rule [R5H_DSSS;Cd_rad_out;Cs_H_out_2H]
Euclidian distance = 2.2360679775
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction17',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C(=O)O[CH][CH]C(2373)'],
transitionState = 'TS17',
kinetics = Arrhenius(A=(126000,'s^-1'), n=1.85, Ea=(88.2824,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R5H_SMSS;C_rad_out_2H;XH_out]
Euclidian distance = 0
family: intra_H_migration"""),
)
reaction(
label = 'reaction18',
reactants = ['[CH2][CH][O](719)', '[CH2]C=C[O](5266)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS18',
kinetics = Arrhenius(A=(1.63841e+06,'m^3/(mol*s)'), n=0.151, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [O_rad/OneDe;Y_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -0.7 to 0 kJ/mol."""),
)
reaction(
label = 'reaction19',
reactants = ['[CH2]C([O])[O](696)', '[CH]C=C(8168)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS19',
kinetics = Arrhenius(A=(7.15767e+07,'m^3/(mol*s)'), n=0.0716491, Ea=(15.4197,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;O_rad/NonDe] for rate rule [Cd_rad;O_rad/NonDe]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: R_Recombination"""),
)
reaction(
label = 'reaction20',
reactants = ['H(8)', '[CH2][CH][CH]OC([CH2])=O(6733)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS20',
kinetics = Arrhenius(A=(4.34078e+06,'m^3/(mol*s)'), n=0.278577, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;H_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -1.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction21',
reactants = ['H(8)', '[CH2][C]=COC([CH2])[O](14446)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS21',
kinetics = Arrhenius(A=(4.34078e+06,'m^3/(mol*s)'), n=0.278577, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;H_rad]
Euclidian distance = 0
family: R_Recombination
Ea raised from -1.4 to 0 kJ/mol."""),
)
reaction(
label = 'reaction22',
reactants = ['H(8)', '[CH2]C=[C]OC([CH2])[O](14444)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS22',
kinetics = Arrhenius(A=(5.78711e+07,'m^3/(mol*s)'), n=0.0433333, Ea=(0.458029,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Cd_rad;H_rad]
Euclidian distance = 0
family: R_Recombination"""),
)
reaction(
label = 'reaction23',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C([O])OC1[CH]C1(12058)'],
transitionState = 'TS23',
kinetics = Arrhenius(A=(1.05e+08,'s^-1'), n=1.192, Ea=(225.936,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1600,'K'), comment="""Estimated using template [R3_D;doublebond_intra_pri;radadd_intra_cs2H] for rate rule [R3_D;doublebond_intra_pri_HNd_O;radadd_intra_cs2H]
Euclidian distance = 2.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction24',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C1[CH]OC([CH2])O1(14679)'],
transitionState = 'TS24',
kinetics = Arrhenius(A=(1.66591e+07,'s^-1'), n=1.01661, Ea=(57.3526,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5_SS_D;doublebond_intra_pri;radadd_intra] for rate rule [R5_SS_D;doublebond_intra_pri;radadd_intra_O]
Euclidian distance = 1.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction25',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C1[CH]OC([O])C1(14768)'],
transitionState = 'TS25',
kinetics = Arrhenius(A=(4.47079e+07,'s^-1'), n=0.909323, Ea=(74.2834,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R5_SS_D;doublebond_intra_pri;radadd_intra_cs2H]
Euclidian distance = 0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction26',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['C=C[CH]OC(=C)O(13875)'],
transitionState = 'TS26',
kinetics = Arrhenius(A=(1.949e+11,'s^-1'), n=0.486, Ea=(22.8614,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R2radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction27',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['C=C[CH]OC(C)=O(12663)'],
transitionState = 'TS27',
kinetics = Arrhenius(A=(1.949e+11,'s^-1'), n=0.486, Ea=(22.8614,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R2radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction28',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C(=O)OC=CC(14769)'],
transitionState = 'TS28',
kinetics = Arrhenius(A=(2.1261e+09,'s^-1'), n=0.137, Ea=(24.9733,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad] for rate rule [R5radExo;Y_rad;XH_Rrad]
Euclidian distance = 1.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction29',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C(O)OC=C=C(13876)'],
transitionState = 'TS29',
kinetics = Arrhenius(A=(2.1261e+09,'s^-1'), n=0.137, Ea=(24.9733,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad] for rate rule [R5radExo;Y_rad;XH_Rrad]
Euclidian distance = 1.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction30',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['C=C=COC(C)[O](13704)'],
transitionState = 'TS30',
kinetics = Arrhenius(A=(2.1261e+09,'s^-1'), n=0.137, Ea=(24.9733,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R5;Y_rad;XH_Rrad] for rate rule [R5radExo;Y_rad;XH_Rrad]
Euclidian distance = 1.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction11',
reactants = ['[CH2][CH]CO[C]([CH2])[O](2383)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS31',
kinetics = Arrhenius(A=(1.4874e+09,'s^-1'), n=1.045, Ea=(63.4002,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R3radExo;Y_rad;XH_Rrad]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction32',
reactants = ['[CH2]C[CH]O[C]([CH2])[O](6734)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS32',
kinetics = Arrhenius(A=(1.02844e+09,'s^-1'), n=0.311, Ea=(24.9733,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R4;Y_rad;XH_Rrad] for rate rule [R4radEndo;Y_rad;XH_Rrad]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction33',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C=COC1CO1(6594)'],
transitionState = 'TS33',
kinetics = Arrhenius(A=(5.94212e+13,'s^-1'), n=0.0123667, Ea=(5.39457,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [Rn;Y_rad_out;Cpri_rad_out_2H] + [R3_SS;Y_rad_out;Ypri_rad_out] for rate rule [R3_SS;O_rad;Cpri_rad_out_2H]
Euclidian distance = 2.2360679775
family: Birad_recombination"""),
)
reaction(
label = 'reaction34',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C1OC=CCO1(14722)'],
transitionState = 'TS34',
kinetics = Arrhenius(A=(2e+12,'s^-1'), n=0, Ea=(7.5312,'kJ/mol'), T0=(1,'K'), Tmin=(550,'K'), Tmax=(650,'K'), comment="""Estimated using template [R6_SSSDS;Y_rad_out;Cpri_rad_out_2H] for rate rule [R6_SSSDS;O_rad;Cpri_rad_out_2H]
Euclidian distance = 1.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction35',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[O]C1CCC=CO1(14770)'],
transitionState = 'TS35',
kinetics = Arrhenius(A=(2.53377e+11,'s^-1'), n=0.0685, Ea=(8.20064,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R6;C_rad_out_2H;Cpri_rad_out_2H] + [R6_SSSDS;C_rad_out_single;Cpri_rad_out_2H] for rate rule [R6_SSSDS;C_rad_out_2H;Cpri_rad_out_2H]
Euclidian distance = 1.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction44',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C([O])C([CH2])C=O(12644)'],
transitionState = 'TS36',
kinetics = Arrhenius(A=(7040,'s^-1'), n=2.66, Ea=(313.8,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using an average for rate rule [R_ROR;R1_doublebond;R2_doublebond_H;R_O_C]
Euclidian distance = 0
family: ketoenol"""),
)
reaction(
label = 'reaction37',
reactants = ['O(T)(63)', '[CH2][CH]OC=C[CH2](6363)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS37',
kinetics = Arrhenius(A=(93609.6,'m^3/(mol*s)'), n=1.13083, Ea=(163.847,'kJ/mol'), T0=(1,'K'), Tmin=(303.03,'K'), Tmax=(2000,'K'), comment="""From training reaction 2 used for Y_rad;O_birad
Exact match found for rate rule [Y_rad;O_birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction38',
reactants = ['CH2(T)(28)', '[CH2][CH][CH]OC=O(6547)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS38',
kinetics = Arrhenius(A=(1.14854e+06,'m^3/(mol*s)'), n=0.575199, Ea=(34.3157,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [Y_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction39',
reactants = ['CH2(T)(28)', '[CH]=COC([CH2])[O](4648)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS39',
kinetics = Arrhenius(A=(1.14854e+06,'m^3/(mol*s)'), n=0.575199, Ea=(34.3157,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [Cd_pri_rad;Birad]
Euclidian distance = 2.0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction40',
reactants = ['H(8)', '[CH]C([O])OC=C[CH2](14771)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS40',
kinetics = Arrhenius(A=(1e+07,'m^3/(mol*s)'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [H_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction41',
reactants = ['H(8)', '[CH]C=COC([CH2])[O](14772)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS41',
kinetics = Arrhenius(A=(1e+07,'m^3/(mol*s)'), n=0, Ea=(0,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [H_rad;Birad]
Euclidian distance = 0
family: Birad_R_Recombination"""),
)
reaction(
label = 'reaction42',
reactants = ['[CH2][CH][O](719)', 'C=CC=O(5269)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS42',
kinetics = Arrhenius(A=(373000,'cm^3/(mol*s)'), n=2.53, Ea=(20.92,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [Od_CO-CdH;YJ] for rate rule [Od_CO-CdH;CJ]
Euclidian distance = 1.0
family: R_Addition_MultipleBond"""),
)
reaction(
label = 'reaction43',
reactants = ['[CH2]C([O])OC[C]=C(14773)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS43',
kinetics = Arrhenius(A=(1.89098e+10,'s^-1'), n=0.9884, Ea=(139.355,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R2H_S;Cd_rad_out_Cd;Cs_H_out_1H] for rate rule [R2H_S;Cd_rad_out_Cd;Cs_H_out_H/NonDeO]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction44',
reactants = ['[CH2][C]([O])OCC=C(2374)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS44',
kinetics = Arrhenius(A=(3.32e+07,'s^-1'), n=1.69, Ea=(159.41,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using an average for rate rule [R3H_SS_O;Y_rad_out;Cs_H_out_H/Cd]
Euclidian distance = 0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction45',
reactants = ['[CH]=CCOC([CH2])[O](14774)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS45',
kinetics = Arrhenius(A=(1.846e+10,'s^-1'), n=0.74, Ea=(145.185,'kJ/mol'), T0=(1,'K'), Tmin=(300,'K'), Tmax=(1500,'K'), comment="""Estimated using template [R3H_DS;Cd_rad_out_singleH;Cs_H_out_1H] for rate rule [R3H_DS;Cd_rad_out_singleH;Cs_H_out_H/NonDeO]
Euclidian distance = 1.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction46',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH]C=COC([CH2])O(13888)'],
transitionState = 'TS46',
kinetics = Arrhenius(A=(3.427,'s^-1'), n=3.311, Ea=(128.721,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;O_rad_out;Cd_H_out_singleH] for rate rule [R6HJ_3;O_rad_out;Cd_H_out_singleH]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 2.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction47',
reactants = ['[CH]C=COC(C)[O](13718)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS47',
kinetics = Arrhenius(A=(22.7193,'s^-1'), n=3.21897, Ea=(132.277,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [RnH;Cd_rad_out_singleH;Cs_H_out_2H] for rate rule [R6HJ_2;Cd_rad_out_singleH;Cs_H_out_2H]
Euclidian distance = 2.0
Multiplied by reaction path degeneracy 3.0
family: intra_H_migration"""),
)
reaction(
label = 'reaction48',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C1O[CH][CH]CO1(14726)'],
transitionState = 'TS48',
kinetics = Arrhenius(A=(9.91671e+09,'s^-1'), n=0.30082, Ea=(60.8864,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [R6_linear;doublebond_intra_pri_2H;radadd_intra] for rate rule [R6_linear;doublebond_intra_pri_2H;radadd_intra_O]
Euclidian distance = 1.0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction49',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[O]C1CC[CH][CH]O1(14775)'],
transitionState = 'TS49',
kinetics = Arrhenius(A=(9.63396e+08,'s^-1'), n=0.483333, Ea=(87.4777,'kJ/mol'), T0=(1,'K'), comment="""Estimated using an average for rate rule [R6_linear;doublebond_intra_pri_2H;radadd_intra_cs2H]
Euclidian distance = 0
family: Intra_R_Add_Endocyclic"""),
)
reaction(
label = 'reaction50',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C(=O)OCC=C(6109)'],
transitionState = 'TS50',
kinetics = Arrhenius(A=(2.6374e+09,'s^-1'), n=0.37, Ea=(88.9686,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [R3;Y_rad_De;XH_Rrad] + [R3radExo;Y_rad;XH_Rrad] for rate rule [R3radExo;Y_rad_De;XH_Rrad]
Euclidian distance = 1.0
family: Intra_Disproportionation"""),
)
reaction(
label = 'reaction51',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['[CH2]C1OC(C=C)O1(12658)'],
transitionState = 'TS51',
kinetics = Arrhenius(A=(1.8e+12,'s^-1'), n=-0.1525, Ea=(7.90776,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [Rn;C_rad_out_H/OneDe;Ypri_rad_out] + [R4_SSS;C_rad_out_single;Ypri_rad_out] for rate rule [R4_SSS;C_rad_out_H/OneDe;Opri_rad]
Euclidian distance = 2.2360679775
family: Birad_recombination"""),
)
reaction(
label = 'reaction52',
reactants = ['[CH2]C=COC([CH2])[O](6739)'],
products = ['C=CC1CC([O])O1(12647)'],
transitionState = 'TS52',
kinetics = Arrhenius(A=(1.8e+12,'s^-1'), n=-0.1525, Ea=(7.90776,'kJ/mol'), T0=(1,'K'), comment="""Estimated using average of templates [Rn;C_rad_out_H/OneDe;Cpri_rad_out_2H] + [R4_SSS;C_rad_out_single;Cpri_rad_out_2H] for rate rule [R4_SSS;C_rad_out_H/OneDe;Cpri_rad_out_2H]
Euclidian distance = 2.0
family: Birad_recombination"""),
)
reaction(
label = 'reaction53',
reactants = ['[CH]OC([CH2])[O](1022)', '[CH]=C(64)'],
products = ['[CH2]C=COC([CH2])[O](6739)'],
transitionState = 'TS53',
kinetics = Arrhenius(A=(1.14854e+06,'m^3/(mol*s)'), n=0.575199, Ea=(34.3157,'kJ/mol'), T0=(1,'K'), comment="""Estimated using template [Y_rad;Birad] for rate rule [Cd_pri_rad;Birad]
Euclidian distance = 2.0
family: Birad_R_Recombination"""),
)
network(
label = '3396',
isomers = [
'[CH2]C=COC([CH2])[O](6739)',
],
reactants = [
('C=C[O](594)', 'C=CC=O(5269)'),
],
bathGas = {
'N2': 0.25,
'Ne': 0.25,
'He': 0.25,
'Ar': 0.25,
},
)
pressureDependence(
label = '3396',
Tmin = (1200,'K'),
Tmax = (1500,'K'),
Tcount = 10,
Tlist = ([1201.48,1213.22,1236.21,1269.31,1310.55,1356.92,1404.16,1447.02,1479.84,1497.7],'K'),
Pmin = (1,'atm'),
Pmax = (10,'atm'),
Pcount = 10,
Plist = ([1.02771,1.14872,1.41959,1.89986,2.67608,3.83649,5.40396,7.23219,8.93758,9.98989],'bar'),
maximumGrainSize = (0.5,'kcal/mol'),
minimumGrainCount = 250,
method = 'modified strong collision',
interpolationModel = ('Chebyshev', 6, 4),
activeKRotor = True,
activeJRotor = True,
rmgmode = True,
)
| [
"[email protected]"
] | |
a607cfd0c82f0951367c489f47dad4d25eb49d58 | 00829e1ff78f73dab073a201d68139960c1d1922 | /tools/toolset/tool/rigging/pipline_tool/ui/his/ui_create_character.py | 42c99cc1874dedb0e213f77abca01d3414e1a31e | [] | no_license | liangyongg/Beam_Tools | a021ceb4187107508536c46726da5b9629ffd1cf | 21b5d06e660f058434e589ae4f672f96296b7540 | refs/heads/master | 2018-11-04T04:43:02.523654 | 2018-08-26T12:33:09 | 2018-08-26T12:33:09 | 115,005,481 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,908 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'ui_create_character.ui'
#
# Created: Thu Apr 26 11:29:46 2018
# by: pyside-uic 0.2.15 running on PySide 1.2.4
#
# WARNING! All changes made in this file will be lost!
from PySide import QtCore, QtGui
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName("Form")
Form.resize(430, 262)
self.verticalLayout = QtGui.QVBoxLayout(Form)
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout = QtGui.QHBoxLayout()
self.horizontalLayout.setContentsMargins(-1, -1, 50, -1)
self.horizontalLayout.setObjectName("horizontalLayout")
self.label = QtGui.QLabel(Form)
self.label.setObjectName("label")
self.horizontalLayout.addWidget(self.label)
self.lineEdit = QtGui.QLineEdit(Form)
self.lineEdit.setObjectName("lineEdit")
self.horizontalLayout.addWidget(self.lineEdit)
self.verticalLayout.addLayout(self.horizontalLayout)
self.verticalLayout_2 = QtGui.QVBoxLayout()
self.verticalLayout_2.setContentsMargins(50, -1, 50, -1)
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.pushButton = QtGui.QPushButton(Form)
self.pushButton.setObjectName("pushButton")
self.verticalLayout_2.addWidget(self.pushButton)
self.verticalLayout.addLayout(self.verticalLayout_2)
self.retranslateUi(Form)
QtCore.QMetaObject.connectSlotsByName(Form)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "name:", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setText(QtGui.QApplication.translate("Form", "create", None, QtGui.QApplication.UnicodeUTF8))
| [
"hhhh"
] | hhhh |
ce74a238c917af6de5cfc93964163002750f06d8 | 59de7788673ade984b9c9fbc33664a7cbdba67d3 | /res/scripts/client_common/shared_utils/__init__.py | 9b417b171ce7cf3e50a53c5b6006973b705af2f6 | [] | no_license | webiumsk/WOT-0.9.15-CT | 3fa24ab37a6c91b7073034afb2f355efa5b7fe36 | fbd194fbaa6bdece51c7a68fc35bbb5257948341 | refs/heads/master | 2020-12-24T21:27:23.175774 | 2016-05-01T13:47:44 | 2016-05-01T13:47:44 | 57,600,180 | 0 | 0 | null | null | null | null | WINDOWS-1250 | Python | false | false | 4,366 | py | # 2016.05.01 15:25:53 Střední Evropa (letní čas)
# Embedded file name: scripts/client_common/shared_utils/__init__.py
import weakref
import itertools
import types
import BigWorld
from debug_utils import LOG_ERROR, LOG_WARNING
ScalarTypes = (types.IntType,
types.LongType,
types.FloatType,
types.BooleanType) + types.StringTypes
IntegralTypes = (types.IntType, types.LongType)
def makeTupleByDict(ntClass, data):
unsupportedFields = set(data) - set(ntClass._fields)
supported = {}
for k, v in data.iteritems():
if k not in unsupportedFields:
supported[k] = v
return ntClass(**supported)
class BoundMethodWeakref(object):
def __init__(self, func):
self.methodName = func.__name__
raise not self.methodName.startswith('__') or AssertionError('BoundMethodWeakref: private methods are not supported')
self.wrefCls = weakref.ref(func.__self__)
def __call__(self, *args, **kwargs):
return getattr(self.wrefCls(), self.methodName)(*args, **kwargs)
def forEach(function, sequence):
for e in sequence:
function(e)
def isEmpty(sequence):
try:
next(sequence)
except StopIteration:
return True
return False
def safeCancelCallback(callbackID):
try:
BigWorld.cancelCallback(callbackID)
except ValueError:
LOG_ERROR('Cannot cancel BigWorld callback: incorrect callback ID.')
def prettyPrint(dict, sort_keys = True, indent = 4):
import json
return json.dumps(dict, sort_keys=sort_keys, indent=indent)
def findFirst(function_or_None, sequence, default = None):
try:
return next(itertools.ifilter(function_or_None, sequence))
except StopIteration:
return default
def first(sequence, default = None):
return findFirst(None, sequence, default)
class CONST_CONTAINER(object):
__keyByValue = None
@classmethod
def getIterator(cls):
for k, v in cls.__dict__.iteritems():
if not k.startswith('_') and type(v) in ScalarTypes:
yield (k, v)
@classmethod
def getKeyByValue(cls, value):
cls.__doInit()
return cls.__keyByValue.get(value)
@classmethod
def hasKey(cls, key):
return key in cls.__dict__
@classmethod
def hasValue(cls, value):
cls.__doInit()
return value in cls.__keyByValue
@classmethod
def ALL(cls):
return tuple([ v for k, v in cls.getIterator() ])
@classmethod
def __doInit(cls):
if cls.__keyByValue is None:
cls.__keyByValue = dict(((v, k) for k, v in cls.getIterator()))
return
class BitmaskHelper(object):
@classmethod
def add(cls, mask, flag):
if not mask & flag:
mask |= flag
return mask
return -1
@classmethod
def addIfNot(cls, mask, flag):
if not mask & flag:
mask |= flag
return mask
@classmethod
def remove(cls, mask, flag):
if mask & flag > 0:
mask ^= flag
return mask
return -1
@classmethod
def removeIfHas(cls, mask, flag):
if mask & flag > 0:
mask ^= flag
return mask
class AlwaysValidObject(object):
def __init__(self, name = ''):
self.__name = name
def __getattr__(self, item):
if item in self.__dict__:
return self.__dict__[item]
return AlwaysValidObject(self._makeName(self.__name, item))
def __call__(self, *args, **kwargs):
return AlwaysValidObject()
def getName(self):
return self.__name
@classmethod
def _makeName(cls, parentName, nodeName):
return '%s/%s' % (parentName, nodeName)
def isDefaultDict(sourceDict, defaultDict):
for k, v in defaultDict.iteritems():
if k not in sourceDict:
return False
if sourceDict[k] != v:
return False
return True
def nextTick(func):
"""
Moves function calling to the next frame
"""
def wrapper(*args, **kwargs):
BigWorld.callback(0.01, lambda : func(*args, **kwargs))
return wrapper
# okay decompyling c:\Users\PC\wotsources\files\originals\res\scripts\client_common\shared_utils\__init__.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2016.05.01 15:25:53 Střední Evropa (letní čas)
| [
"[email protected]"
] | |
4cec80ef5706409c00bd637115c1cd8fac68858c | aab3df1e96ef417a6f85837eca1d640b787e1276 | /sklearn_porter/estimator/classifier/KNeighborsClassifier/__init__.py | 401a6844995e4c7e01075a246cbf79d7bff3a43d | [
"MIT"
] | permissive | apasanen/sklearn-porter | 88634b6f4985e1cd1ee09869495a8a3a3f6c1134 | 43a403c45c7538876df5dbe3c2f79e4aca965d47 | refs/heads/master | 2020-04-10T21:28:19.501112 | 2018-12-11T08:39:40 | 2018-12-11T09:21:05 | 161,297,343 | 0 | 0 | MIT | 2018-12-11T07:52:21 | 2018-12-11T07:52:21 | null | UTF-8 | Python | false | false | 8,456 | py | # -*- coding: utf-8 -*-
import os
import json
from json import encoder
from sklearn_porter.estimator.classifier.Classifier import Classifier
class KNeighborsClassifier(Classifier):
"""
See also
--------
sklearn.neighbors.KNeighborsClassifier
http://scikit-learn.org/stable/modules/generated/
sklearn.neighbors.KNeighborsClassifier.html
"""
SUPPORTED_METHODS = ['predict']
# @formatter:off
TEMPLATES = {
'java': {
'type': '{0}',
'arr': '{{{0}}}',
'arr[]': '{type}[] {name} = {{{values}}};',
'arr[][]': '{type}[][] {name} = {{{values}}};',
'indent': ' ',
},
'js': {
'type': '{0}',
'arr': '[{0}]',
'arr[]': 'var {name} = [{values}];',
'arr[][]': 'var {name} = [{values}];',
'indent': ' ',
},
}
# @formatter:on
def __init__(self, estimator, target_language='java',
target_method='predict', **kwargs):
"""
Port a trained estimator to the syntax of a chosen programming
language.
Parameters
----------
:param estimator : KNeighborsClassifier
An instance of a trained KNeighborsClassifier estimator.
:param target_language : string, default: 'java'
The target programming language.
:param target_method : string, default: 'predict'
The target method of the estimator.
"""
super(KNeighborsClassifier, self).__init__(
estimator, target_language=target_language,
target_method=target_method, **kwargs)
if estimator.weights != 'uniform':
msg = "Only 'uniform' weights are supported for this classifier."
raise NotImplementedError(msg)
self.estimator = estimator
def export(self, class_name, method_name, export_data=False,
export_dir='.', export_filename='data.json',
export_append_checksum=False, **kwargs):
"""
Port a trained estimator to the syntax of a chosen programming language.
Parameters
----------
:param class_name : string
The name of the class in the returned result.
:param method_name : string
The name of the method in the returned result.
:param export_data : bool, default: False
Whether the model data should be saved or not.
:param export_dir : string, default: '.' (current directory)
The directory where the model data should be saved.
:param export_filename : string, default: 'data.json'
The filename of the exported model data.
:param export_append_checksum : bool, default: False
Whether to append the checksum to the filename or not.
Returns
-------
:return : string
The transpiled algorithm with the defined placeholders.
"""
# Arguments:
self.class_name = class_name
self.method_name = method_name
# Estimator:
est = self.estimator
# Basic parameters:
self.metric = est.metric
self.n_classes = len(est.classes_)
self.n_templates = len(est._fit_X) # pylint: disable=W0212
self.n_features = len(est._fit_X[0]) # pylint: disable=W0212
self.n_neighbors = est.n_neighbors
self.algorithm = est.algorithm
self.power_param = est.p
if self.algorithm != 'brute':
from sklearn.neighbors.kd_tree import KDTree # pylint: disable-msg=E0611
from sklearn.neighbors.ball_tree import BallTree # pylint: disable-msg=E0611
tree = est._tree # pylint: disable=W0212
if isinstance(tree, (KDTree, BallTree)):
self.tree = tree
if self.target_method == 'predict':
# Exported:
if export_data and os.path.isdir(export_dir):
self.export_data(export_dir, export_filename,
export_append_checksum)
return self.predict('exported')
# Separated:
return self.predict('separated')
def export_data(self, directory, filename, with_md5_hash=False):
"""
Save model data in a JSON file.
Parameters
----------
:param directory : string
The directory.
:param filename : string
The filename.
:param with_md5_hash : bool, default: False
Whether to append the checksum to the filename or not.
"""
model_data = {
'X': self.estimator._fit_X.tolist(), # pylint: disable=W0212
'y': self.estimator._y.tolist(), # pylint: disable=W0212
'kNeighbors': self.n_neighbors,
'nClasses': self.n_classes,
'power': self.power_param
}
encoder.FLOAT_REPR = lambda o: self.repr(o)
json_data = json.dumps(model_data, sort_keys=True)
if with_md5_hash:
import hashlib
json_hash = hashlib.md5(json_data).hexdigest()
filename = filename.split('.json')[0] + '_' + json_hash + '.json'
path = os.path.join(directory, filename)
with open(path, 'w') as fp:
fp.write(json_data)
def predict(self, temp_type):
"""
Transpile the predict method.
Parameters
----------
:param temp_type : string
The kind of export type (embedded, separated, exported).
Returns
-------
:return : string
The transpiled predict method as string.
"""
# Exported:
if temp_type == 'exported':
temp = self.temp('exported.class')
return temp.format(class_name=self.class_name,
method_name=self.method_name,
n_features=self.n_features)
# Separated:
if temp_type == 'separated':
meth = self.create_method()
return self.create_class(meth)
def create_method(self):
"""
Build the estimator method or function.
Returns
-------
:return : string
The built method as string.
"""
# Distance computation
metric_name = '.'.join(['separated', 'metric', self.metric])
distance_comp = self.temp(metric_name, n_indents=1, skipping=True)
temp_method = self.temp('separated.method.predict', n_indents=1,
skipping=True)
return temp_method.format(class_name=self.class_name,
method_name=self.method_name,
distance_computation=distance_comp)
def create_class(self, method):
"""
Build the estimator class.
Returns
-------
:return : string
The built class as string.
"""
temp_type = self.temp('type')
temp_arr = self.temp('arr')
temp_arr_ = self.temp('arr[]')
temp_arr__ = self.temp('arr[][]')
# Samples:
temps = []
for atts in enumerate(self.estimator._fit_X): # pylint: disable=W0212
tmp = [temp_type.format(self.repr(a)) for a in atts[1]]
tmp = temp_arr.format(', '.join(tmp))
temps.append(tmp)
temps = ', '.join(temps)
temps = temp_arr__.format(type='double', name='X', values=temps,
n=self.n_templates, m=self.n_features)
# Classes:
classes = self.estimator._y # pylint: disable=W0212
classes = [temp_type.format(int(c)) for c in classes]
classes = ', '.join(classes)
classes = temp_arr_.format(type='int', name='y', values=classes,
n=self.n_templates)
temp_class = self.temp('separated.class')
return temp_class.format(class_name=self.class_name,
method_name=self.method_name, method=method,
n_features=self.n_features, X=temps, y=classes,
n_neighbors=self.n_neighbors,
n_templates=self.n_templates,
n_classes=self.n_classes,
power=self.power_param)
| [
"[email protected]"
] | |
01bc8dd81cafcbbf52dd9b8525c0fd40f828b6f4 | 274521d5ccfbaebb97cdfbfa340d951eee7c9efa | /Python/PythonProgrammingLanguage/Encapsulation/encap_env/bin/jsonschema | 116515a0218c94456db568d63ab738fffe5c5f5e | [
"MIT"
] | permissive | nitin-cherian/LifeLongLearning | ef8e1ed61e4bf8b6ae4a0ae642c559ab47be84b4 | 84084792058358365162c645742c70064a2d5fd6 | refs/heads/master | 2021-01-21T10:38:41.797326 | 2018-08-23T01:28:10 | 2018-08-23T01:28:10 | 91,701,351 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 323 | #!/home/nitin/Learn/Repositories/Github/LifeLongLearning/Python/PythonProgrammingLanguage/Encapsulation/encap_env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from jsonschema.cli import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
76ba202c34534ea332d0d9c2b7c22175514cb943 | bf331831c2c532d76b91c11127cc4c76cf9f0031 | /166/D/ans_errorneous.py | eac398768204dd7b212a8fb9e6f37ee62331d50c | [] | no_license | mugenen/Codeforces-Solution | 519899d658a52dc87bfdba81110e9851ccf3b6de | f69874ad46acc511f4485dc29249f7010f562ea9 | refs/heads/master | 2021-01-22T04:49:48.986989 | 2013-02-25T12:36:10 | 2013-02-25T12:36:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,288 | py | import sys
import collections
import bisect
import math
class Trie:
class Node:
def __init__(self, x, bros = None, child = None):
self.data = x
self.bros = bros
self.child = child
def get_child(self, x):
child = self.child
while child:
if child.data == x: break
child = child.bros
return child
def set_child(self, x):
child = Trie.Node(x, self.child)
self.child = child
return child
def traverse(self, leaf, filter, count, k):
# print self.data
if self.data == '$':
yield []
else:
child = self.child
while child:
temp = count
if self.data in filter:
temp += 1
if temp > k:
child = child.bros
continue
for x in child.traverse(leaf, filter, temp, k):
yield [self.data] + x
child = child.bros
def __init__(self, x = None):
self.root = Trie.Node(None)
self.leaf = x
def insert(self, seq):
node = self.root
for x in seq:
child = node.get_child(x)
if not child:
child = node.set_child(x)
node = child
if not node.get_child(self.leaf):
node.set_child(self.leaf)
def traverse(self, filter, k):
node = self.root.child
while node:
for x in node.traverse(self.leaf, filter, 0, k):
yield x
node = node.bros
string = raw_input()
filter_txt = raw_input()
k = int(raw_input())
filter = set()
A = ord('a')
for i in xrange(len(filter_txt)):
if filter_txt[i] == '0':
filter.add(chr(A + i))
trie = Trie()
for i in xrange(len(string)):
for j in xrange(i + 1, len(string) + 1):
trie.insert(string[i:j] + '$')
# print string[i:j] + '$', i, j
result = 0
check = set()
for s in trie.traverse(filter, k):
if s != []:
# print s
check.add(''.join(s))
# result += 1
#print result
print len(check)
| [
"[email protected]"
] | |
627649476ff37a030466b373ef750b7e153b0eb0 | 498fcf34fa4482be5c9fefc488666e60edcf46c7 | /supervised_learning/0x01-classification/17-deep_neural_network.py~ | 90473c634dabec13840cc70707d19fee907312fb | [] | no_license | MansourKef/holbertonschool-machine_learning | 7dbc465def04c311c1afb0e8b8903cbe34c72ad3 | 19f78fc09f0ebeb9f27f3f76b98e7a0e9212fd22 | refs/heads/main | 2023-03-12T16:18:08.919099 | 2021-03-05T09:42:09 | 2021-03-05T09:42:09 | 317,303,125 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,201 | #!/usr/bin/env python3
"""module"""
import numpy as np
class DeepNeuralNetwork:
"""Deep Neural Network"""
def __init__(self, nx, layers):
"""Constructor"""
if not type(nx) is int:
raise TypeError("nx must be an integer")
if nx < 1:
raise ValueError("nx must be a positive integer")
if not type(layers) is list or len(layers) == 0:
raise TypeError("layers must be a list of positive integers")
self.L = len(layers)
self.cache = {}
self.weights = {}
for i in range(len(layers)):
if layers[i] <= 0 or not type(layers[i]) is int:
raise TypeError("layers must be a list of positive integers")
if i == 0:
self.weights['W{}'.format(i+1)] = \
np.random.randn(layers[i], nx) * np.sqrt(2/(nx))
self.weights['b{}'.format(i+1)] = np.zeros([layers[i], 1])
else:
self.weights['W{}'.format(i+1)] = \
np.random.randn(layers[i], layers[i-1]) * \
np.sqrt(2/(layers[i-1]))
self.weights['b{}'.format(i+1)] = np.zeros([layers[i], 1])
| [
"[email protected]"
] | ||
0c98c3fa06970c85f3b2a81e02355551274fcf41 | 5b22437902bffa0f62b375d56bfb2b4485ef43f0 | /src/video_inpainting/padded_masked_video_tar_dataset.py | 93491de023c768894243ad89932a5aa1d0875600 | [
"MIT",
"CC-BY-SA-3.0",
"CC-BY-SA-4.0"
] | permissive | JohnsonzxChang/devil | eafa09f5258b4f33eda9564077814c6e63473a0f | 296115cd5f4952c7dc65bbcaaf2d1d5c55ef5d35 | refs/heads/public | 2023-07-03T12:07:58.917440 | 2021-08-10T00:06:38 | 2021-08-10T00:06:38 | 555,846,483 | 1 | 0 | MIT | 2022-10-22T13:22:43 | 2022-10-22T13:22:42 | null | UTF-8 | Python | false | false | 1,437 | py | import tarfile
from itertools import cycle
from .padded_masked_video_dataset import PaddedMaskedVideoDataset
class PaddedMaskedVideoTarDataset(PaddedMaskedVideoDataset):
def __init__(self, frames_dataset_path, masks_dataset_path):
self._frames_dataset_tar = tarfile.open(frames_dataset_path, 'r')
self._masks_dataset_tar = tarfile.open(masks_dataset_path, 'r')
frame_video_names = sorted([info.name for info in self._frames_dataset_tar.getmembers() if info.isdir()])
mask_video_names = sorted([info.name for info in self._masks_dataset_tar.getmembers() if info.isdir()])
super().__init__(frame_video_names, mask_video_names)
def video_frame_files_iter(self, frame_video_name):
frame_paths = sorted([info.name for info in self._frames_dataset_tar.getmembers()
if info.name.startswith(frame_video_name) and info.isfile()])
for frame_path in frame_paths:
yield self._frames_dataset_tar.extractfile(frame_path)
def video_mask_files_iter(self, mask_video_name):
mask_paths = sorted([info.name for info in self._masks_dataset_tar.getmembers()
if info.name.startswith(mask_video_name) and info.isfile()])
mask_paths_c = cycle(mask_paths + mask_paths[len(mask_paths)-2:0:-1])
for mask_path in mask_paths_c:
yield self._masks_dataset_tar.extractfile(mask_path)
| [
"[email protected]"
] | |
6579e872948f51bf21e7c2ea85957dcf238da3da | 1b862f34c125ce200244dd79e4fda4b5b605ce2e | /.history/ML_T2_Validation_20210612133304.py | f4916063cc50283bddabf46d8c1a1f2ed57ad41b | [] | no_license | edwino26/CoreImages | 26085a49cf1cb79442ae563a88354b2fdceace87 | 6bf6e68cac8ab36c87b1e6ea702bfe6882b0f40e | refs/heads/master | 2023-06-22T12:53:37.344895 | 2021-07-21T04:31:44 | 2021-07-21T04:31:44 | 309,553,247 | 0 | 4 | null | 2021-04-29T23:23:15 | 2020-11-03T02:45:07 | Lasso | UTF-8 | Python | false | false | 9,517 | py | #T2 TEST DATA
# %%
import pandas as pd
import numpy as np
import matplotlib.pyplot as plt
import pickle
from scipy import interpolate
from scipy.integrate import simps
from numpy import trapz
from sklearn.metrics import mean_squared_error
# %%
#Load Stack
UVStack = pd.read_excel('./ML_Results/T2_test/ImgStack.xls')
ImgStackk = UVStack.copy().to_numpy()
# %%
def integrate(y_vals, h):
i = 1
total = y_vals[0] + y_vals[-1]
for y in y_vals[1:-1]:
if i % 2 == 0:
total += 2 * y
else:
total += 4 * y
i += 1
return total * (h / 3.0)
# %% Load and resample "results" (res) file
sub = pd.read_excel('./ML_Results/T2_test/sub.xls')
res = pd.read_excel('./ML_Results/T2_test/Results.xls')
res = res[res.Well == 'T2']
res.sort_values(by=['DEPT'])
res.drop(['Unnamed: 0', 'Set'], axis=1, inplace=True)
res.reset_index(inplace=True, drop=True)
dep = np.arange(min(res.DEPT), max(res.DEPT),0.5) #res is not at 0.5 thanks to balancing
res_rs = pd.DataFrame(columns=[res.columns])
res_rs.DEPT = dep
for i in range(len(res.columns)):
if i != 8:
f = interpolate.interp1d(res.DEPT, res.iloc[:,i])
res_rs.iloc[:,i] =f(dep)
else:
res_rs.iloc[:,i] = res.Well[0]
#T2_rs.dropna(inplace=True)
res = res_rs.copy()
difference = res.DEPT.diff()
difference.describe()
# %%
TT = pd.read_excel('./ML_Results/Train_Test_Results.xls')
istr = 0
iend = 42344
dplot_o = 3671
dplot_n = 3750
shading = 'bone'
# %% Load Log Calculations
T2_x = pd.read_excel('./Excel_Files/T2.xls',sheet_name='T2_data')
T2_x = T2_x[['DEPTH','GR_EDTC','RHOZ','AT90','NPHI','Vsh','Vclay','grain_density','porosity',
'RW2','Sw_a','Sw_a1','Sw_p','Sw_p1','SwWS','Swsim','Swsim1','PAY_archie',
'PAY_poupon','PAY_waxman','PAY_simandoux']]
# %%
T2_rs = pd.DataFrame(columns=[T2_x.columns])
T2_rs.iloc[:,0] = dep
for i in range(len(T2_x.columns)):
f = interpolate.interp1d(T2_x.DEPTH, T2_x.iloc[:,i])
T2_rs.iloc[:,i] =f(dep)
#T2_rs.dropna(inplace=True)
T2_x = T2_rs.copy()
difference_T2 = T2_x.DEPTH.diff()
difference.describe()
# %%
plt.figure()
plt.subplot2grid((1, 10), (0, 0), colspan=3)
plt.plot(sub['GRAY'], sub['DEPTH'], 'mediumseagreen', linewidth=0.5);
plt.axis([50, 250, dplot_o, dplot_n]);
plt.gca().invert_yaxis();
plt.fill_between(sub['GRAY'], 0, sub['DEPTH'], facecolor='green', alpha=0.5)
plt.xlabel('Gray Scale RGB')
plt.subplot2grid((1, 10), (0, 3), colspan=7)
plt.imshow(ImgStackk[istr:iend,80:120], aspect='auto', origin='upper', extent=[0,1,dplot_n,dplot_o], cmap=shading);
plt.axis([0, 1, dplot_o, dplot_n]);
plt.gca().invert_yaxis()
plt.xlabel('Processed Image')
plt.colorbar()
p_50 = np.percentile(sub['DEPTH'], 50)
plt.yticks([]); plt.xticks([])
plt.subplots_adjust(wspace = 20, left = 0.1, right = 0.9, bottom = 0.1, top = 0.9)
plt.show()
# %%
CORE =pd.read_excel('./CORE/CORE.xlsx',sheet_name='XRD')
mask = CORE.Well.isin(['T2'])
T2_Core = CORE[mask]
prof=T2_Core['Depth']
clays=T2_Core['Clays']
xls1 = pd.read_excel ('./CORE/CORE.xlsx', sheet_name='Saturation')
mask = xls1.Well.isin(['T2'])
T2_sat = xls1[mask]
long=T2_sat ['Depth']
poro=T2_sat ['PHIT']
grain=T2_sat ['RHOG']
sw_core=T2_sat ['Sw']
klinkenberg = T2_sat ['K']
minimo=grain.min()
maximo=grain.max()
c=2.65
d=2.75
norm=(((grain-minimo)*(d-c)/(maximo-minimo))+c)
xls2 = pd.read_excel ('./CORE/CORE.xlsx', sheet_name='Gamma')
mask = xls2.Well.isin(['T2'])
T2_GR = xls2[mask]
h=T2_GR['Depth']
cg1=T2_GR['GR_Scaled']
# %%
# ~~~~~~~~~~~~~~~~~~ Plot Results ~~~~~~~~~~~~~~~~~~~~~~
ct = 0
top= dplot_o
bottom= dplot_n
no_plots = 9
ct+=1
plt.figure(figsize=(13,9))
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.GR_EDTC,T2_x.DEPTH,'g', lw=3)
#plt.fill_between(T2_x.GR_EDTC.values.reshape(-1), T2_x.DEPTH.values.reshape(-1), y2=0,color='g', alpha=0.8)
plt.title('$Gamma Ray$',fontsize=8)
plt.axis([40,130,top,bottom])
plt.xticks(fontsize=8)
plt.yticks(fontsize=8)
plt.xlabel('Gamma Ray ',fontsize=6)
plt.gca().invert_yaxis()
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_poupon,T2_x.DEPTH,'r',lw=0.5)
h_P = integrate(T2_x.PAY_poupon.values, 0.5)
plt.title('$PAY Poupon$',fontsize=8)
plt.fill_between(T2_x.PAY_poupon.values.reshape(-1),T2_x.DEPTH.values.reshape(-1), color='r', alpha=0.8)
plt.axis([0.01,0.0101,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
#Waxman-Smits
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_waxman,T2_x.DEPTH,'g',lw=0.5)
h_WS = integrate(T2_x.PAY_waxman.values, 0.5)
plt.title('$PAY Waxman$',fontsize=8)
plt.fill_between(T2_x.PAY_waxman.values.reshape(-1),T2_x.DEPTH.values.reshape(-1), color='g', alpha=0.8)
plt.axis([0.01,0.0101,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
#Simandoux
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (T2_x.PAY_simandoux,T2_x.DEPTH,'y',lw=0.5)
h_S = integrate(T2_x.PAY_simandoux.values, 0.5)
plt.title('$PAY Simandoux$',fontsize=8)
plt.fill_between(T2_x.PAY_simandoux.values.reshape(-1),T2_x.DEPTH.values.reshape(-1), color='y', alpha=0.8)
plt.axis([0.01,0.0101,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1 #RGB Gray from Image
plt.subplot(1,no_plots,ct)
plt.plot(sub['GRAY'], sub['DEPTH'], 'mediumseagreen', linewidth=0.5);
plt.axis([50, 250, dplot_o, dplot_n]);
plt.xticks(fontsize=8)
#plt.title('$Core Img$',fontsize=8)
plt.gca().invert_yaxis();
plt.gca().yaxis.set_visible(False)
plt.fill_between(sub['GRAY'], 0, sub['DEPTH'], facecolor='green', alpha=0.5)
plt.xlabel('Gray Scale RGB', fontsize=7)
ct+=1 # True UV from Image
plt.subplot(1,no_plots,ct, facecolor='#302f43')
corte= 170
PAY_Gray_scale = res['GRAY'].copy()
PAY_Gray_scale.GRAY[PAY_Gray_scale.GRAY<corte] = 0
PAY_Gray_scale.GRAY[PAY_Gray_scale.GRAY>=corte] = 1
h_TRUE_UV = integrate(PAY_Gray_scale.values, 0.5)
plt.plot (PAY_Gray_scale,res.DEPT,'#7d8d9c',lw=0.5)
plt.title('$OBJETIVO (suavizado-a-2.5ft)$',fontsize=10)
plt.fill_between(PAY_Gray_scale.values.reshape(-1),res.DEPT.values.reshape(-1), color='#7d8d9c', alpha=0.8)
plt.axis([0.01,0.0101,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
ct+=1
plt.subplot(1,no_plots,ct)
plt.imshow(ImgStackk[istr:iend,80:120], aspect='auto', origin='upper', extent=[0,1,dplot_n,dplot_o], cmap=shading);
plt.axis([0, 1, dplot_o, dplot_n]);
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.xlabel('Stacked UV Photos', fontsize=7)
plt.colorbar()
p_50 = np.percentile(sub['DEPTH'], 50)
plt.yticks([]); plt.xticks([])
ct+=1
plt.subplot(1,no_plots,ct)
plt.plot (res['RandomForest'],res.DEPT,'r',lw=1)
plt.plot (res.GRAY,res.DEPT,'k',lw=0.5)
plt.title('ML: GRIS',fontsize=12)
plt.axis([0,2,top,bottom])
plt.xticks(fontsize=8)
plt.xlabel('RandomForest',fontsize=7)
plt.gca().invert_yaxis()
plt.gca().invert_xaxis()
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.xlim(0, 255)
plt.hlines(y=3665.65, xmin=0, xmax=130)
plt.hlines(y=3889.5, xmin=0, xmax=130)
ct+=1
plt.subplot(1,no_plots,ct, facecolor='#302f43')
PAY_Gray_scale2 = res['RandomForest'].copy().rename(columns={'RandomForest':'GRAY'})
PAY_Gray_scale2.GRAY[PAY_Gray_scale2.GRAY<corte] = 0
PAY_Gray_scale2.GRAY[PAY_Gray_scale2.GRAY>=corte] = 1
h_ML = integrate(PAY_Gray_scale2.values, 0.5)
plt.plot (PAY_Gray_scale2, res.DEPT,'#7d8d9c',lw=0.5)
plt.title('$RESULTADO$',fontsize=8)
plt.fill_between(PAY_Gray_scale2.values.reshape(-1),res.DEPT.values.reshape(-1), color='#7d8d9c', alpha=0.8)
plt.axis([0.01,0.0101,top,bottom])
plt.xticks(fontsize=8)
plt.gca().invert_yaxis()
plt.gca().xaxis.set_visible(False)
plt.gca().yaxis.set_visible(False)
plt.grid(True)
plt.suptitle('Pozo T2: Comparación Final')
plt.show()
# %%
plt.figure(figsize=(10,9))
plt.subplot(1,1,1)
plt.plot(res.GRAY, res['RandomForest'], 'ko')
plt.plot(res.GRAY, res.GRAY, 'r')
plt.xlim(0, 255)
plt.ylim(0, 255)
plt.xlabel('Valor en Escala de Gris Suavizado a res. de Registros',fontsize=17)
plt.ylabel('Predicción de Escala de Gris usando Random Forest',fontsize=17)
plt.show()
# %% Erro Calculation
# T2_x.PAY_poupon,T2_x.DEPTH
# T2_x.PAY_waxman
# T2_x.PAY_simandoux
# %%
pay = pd.DataFrame(columns=['Poupon', 'Waxman_Smits', 'Simandoux', 'Machine_L', 'True_UV'], index=['ft','RMSE'])
pay.loc['ft', 'Poupon'] = h_P.round(2)
pay.loc['ft', 'Waxman_Smits'] = h_WS.round(2)
pay.loc['ft', 'Simandoux'] = h_S.round(2)
pay.loc['ft', 'Machine_L'] = h_ML.round(2)
pay.loc['ft', 'True_UV'] = h_TRUE_UV.round(2)
pay.loc['RMSE', 'Poupon'] = pay.iloc[0,0] - pay.iloc[0,4]
pay.loc['RMSE', 'Waxman_Smits'] = pay.iloc[0,1] - pay.iloc[0,4]
pay.loc['RMSE', 'Simandoux'] = (pay.iloc[0,2] - pay.iloc[0,4]).round(2)
pay.loc['RMSE', 'Machine_L'] = pay.iloc[0,3] - pay.iloc[0,4]
pay.loc['RMSE', 'True_UV'] = pay.iloc[0,4] - pay.iloc[0,4]
pay.head()
# %%
payN = pay.T.copy()
payN.reset_index(inplace=True)
plt.figure()
ax = payN.plot.bar(x='index', y='RMSE', rot=0)
# %%
| [
"[email protected]"
] | |
dbe01cfd78374273c1c4be47f16e8c86a9962fcb | 13d222bc3332378d433835914da26ed16b583c8b | /src/pemjh/challenge52/main.py | b1407abc5c0d32694b4aaf0241a641dcaad75fcd | [] | no_license | mattjhussey/pemjh | c27a09bab09cd2ade31dc23fffac07374bea9366 | 2ebb0a525d2d1c0ee28e83fdc2638c2bec97ac99 | refs/heads/master | 2023-04-16T03:08:59.390698 | 2023-04-08T10:54:00 | 2023-04-08T10:54:00 | 204,912,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 681 | py | """ Challenge052 """
def get_sorted_string(unsorted):
"""
>>> get_sorted_string(54326)
'23456'
>>> get_sorted_string("aBayU")
'BUaay'
"""
return "".join(sorted(str(unsorted)))
def main():
""" challenge052 """
root = 0
found = False
while not found:
root += 1
root_sorted = get_sorted_string(root)
found = True
for i in range(2, 7):
# Try i * root
multiple = root * i
multiple_sorted = get_sorted_string(multiple)
if root_sorted != multiple_sorted:
found = False
break
return root
| [
"[email protected]"
] | |
fcf6d5b203f22c6e42690390171431383fde3627 | 9b328903c7ce1ddfc957c6db4a5fef265bce1dad | /preprocess.py | 2d04c659dfe88bfdce2082cc1a99285c36834611 | [] | no_license | matatabinoneko/viral_tweet_generation | 4a610b0327d7ce0e8e2b94eec0f82aa9f1c35ca1 | 1e26de293420dbed6f50f161b3210c9d14e3b2d4 | refs/heads/main | 2023-03-12T16:11:14.187622 | 2021-03-02T00:11:47 | 2021-03-02T00:11:47 | 330,305,509 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,466 | py | '''
ツイートの前処理を行う
'''
import argparse
import logzero
from logzero import logger
import logging
from os import path
from typing import List
from filtering_type import EmoticonFilter
import json
import MeCab
from collections import defaultdict
import re
logger.setLevel(logging.INFO)
mecabTagger = MeCab.Tagger("-Ochasen")
hiragana = re.compile('[ぁ-ゟ]+')
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument(
'-i', '--input', type=path.abspath, help='input file path')
parser.add_argument(
'-o', '--output', type=path.abspath, help='output file path')
parser.add_argument(
"--tokenizer", type=str, default="char", help="tokenizer. Select mecab if you want to use mecab"
)
args = parser.parse_args()
return args
def full_width2half_width(text: str) -> str:
'''
全角文字を半角文字に変換
'''
# 変換
text = text.translate(str.maketrans(
{chr(0xFF01 + i): chr(0x21 + i) for i in range(94)}))
return text
def test_full_width2half_width():
text = "!"#$%&'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\]^_`>?@abcdefghijklmnopqrstuvwxyz{|}~"
trans_text = full_width2half_width(text)
answer = '!"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ[\\]^_`>?@abcdefghijklmnopqrstuvwxyz{|}~'
assert trans_text == answer, f"{trans_text}\n{answer}"
def is_char_length(text: str, max_length=140) -> bool:
'''
max_length以上のツイートの場合はFalseを返す
'''
return len(text) <= 140
def test_is_char_length():
text_list = ["", ''.join(['a' for _ in range(139)]), ''.join(
['a' for _ in range(140)]), ''.join(['a' for _ in range(141)])]
answer_list = [True, True, True, False]
for text, answer in zip(text_list, answer_list):
assert is_char_length(text) == answer
def get_keywords(text: str) -> List[str]:
"""
ツイートからキーワードを抽出
Parameters
----------
text : str
ツイート
Returns
-------
keywords : List[str]
キーワードのリスト
"""
keywords = []
node = mecabTagger.parseToNode(text)
while node:
word = node.surface
hinshi = node.feature.split(",")
if hinshi[0] == "名詞" and hinshi[1] != "代名詞" and not hiragana.fullmatch(word):
keywords.append(word)
node = node.next
keywords = list(set(keywords))
return keywords
def test_get_keywords():
queries = ["私のご飯", 'あれとこれ', 'ももとすもも']
answers = [["ご飯"], [], []]
for q, a in zip(queries, answers):
q = get_keywords(q)
assert set(q) == set(a), f"{q},{a}"
def main():
args = parse_args()
logger.info(args)
def tokenizer(text): return self.mecab.parse(text).split(
) if args.tokenizer == 'mecab' else ' '.join(list(text))
filter = EmoticonFilter()
cnt_dic = defaultdict(int)
with open(args.input, 'r') as fin, open(args.output, 'w') as fout:
for line in fin:
try:
line = json.loads(line)
text = line["text"]
# 顔文字を含むツイートは除外
if filter._has_emoticon(text):
cnt_dic['emoji'] += 1
continue
if not is_char_length(text):
logger.debug(f"this tweet is exceed 140 chars. \n{text}")
cnt_dic["more_than_140"] += 1
continue
# user nameを削除
text = filter._username_filter(text)
# スペースなどを置換
text = filter._normalization(text)
keywords = list(map(tokenizer, get_keywords(text)))
text = tokenizer(text)
print(json.dumps(
{"keywords": keywords, "tweet": text}, ensure_ascii=False), file=fout)
except:
cnt_dic['error'] += 1
logger.error(f"this data is skipped {line}")
logger.info(
f"emoji tweet: {cnt_dic['emoji']}\nmore than 140 tweet:{cnt_dic['more_than_140']}\nerror:{cnt_dic['error']}")
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
caa4886993b2a6034b738129474f78353d70e2af | c427d9142df033af2b509412153dae35706ede61 | /recognition/pytorch_crnn/models/layers.py | fbaa09d9385382391ff58e1b8a380ebc4e74d249 | [] | no_license | brahimbellahcen/ocr_toolkit | 0b68776fe20b05f48807f856fffac752e3e08e66 | b4516d4193132eb48f821926dd6ef5d368f53899 | refs/heads/master | 2022-11-13T10:21:14.083497 | 2020-06-26T15:31:38 | 2020-06-26T15:31:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,032 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
class blockCNN(nn.Module):
def __init__(self, in_nc, out_nc, kernel_size, padding, stride=1):
super(blockCNN, self).__init__()
self.in_nc = in_nc
self.out_nc = out_nc
self.kernel_size = kernel_size
self.padding = padding
# layers
self.conv = nn.Conv2d(in_nc, out_nc,
kernel_size=kernel_size,
stride=stride,
padding=padding)
self.bn = nn.BatchNorm2d(out_nc)
def forward(self, batch, use_bn=False, use_relu=False,
use_maxpool=False, maxpool_kernelsize=None):
"""
in:
batch - [batch_size, in_nc, H, W]
out:
batch - [batch_size, out_nc, H', W']
"""
batch = self.conv(batch)
if use_bn:
batch = self.bn(batch)
if use_relu:
batch = F.relu(batch)
if use_maxpool:
assert maxpool_kernelsize is not None
batch = F.max_pool2d(batch, kernel_size=maxpool_kernelsize, stride=2)
return batch
class blockRNN(nn.Module):
def __init__(self, in_size, hidden_size, out_size, bidirectional, dropout=0):
super(blockRNN, self).__init__()
self.in_size = in_size
self.hidden_size = hidden_size
self.out_size = out_size
self.bidirectional = bidirectional
# layers
self.gru = nn.GRU(in_size, hidden_size, bidirectional=bidirectional)
def forward(self, batch, add_output=False):
"""
in array:
batch - [seq_len , batch_size, in_size]
out array:
out - [seq_len , batch_size, out_size]
"""
# batch_size = batch.size(1)
outputs, hidden = self.gru(batch)
out_size = int(outputs.size(2) / 2)
if add_output:
outputs = outputs[:, :, :out_size] + outputs[:, :, out_size:]
return outputs
| [
"[email protected]"
] | |
3e4af5c3428191b0f79157993cb4dc07ac9263b8 | bb983b38f9be7b6fd4ab1a651484db37c1aeff39 | /1122/my_library.py | 4e67830a25ee13ef2bb56196db5322f8047d4396 | [] | no_license | nakanishi-akitaka/python2018_backup | c214df78372cca993d69f8001010ec2f6dcaf1be | 45766d3c3777de2a91b3e2cf50c6bfedca8627da | refs/heads/master | 2023-02-18T08:04:28.625532 | 2022-06-07T01:02:53 | 2022-06-07T01:02:53 | 201,399,236 | 5 | 30 | null | 2023-02-10T21:06:51 | 2019-08-09T05:48:22 | Jupyter Notebook | UTF-8 | Python | false | false | 20,812 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Aug 8 10:29:27 2018
@author: Akitaka
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import mean_squared_error
from sklearn.metrics import r2_score
from sklearn.model_selection import KFold
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import cross_val_predict
from sklearn.metrics import confusion_matrix, accuracy_score
from sklearn.neighbors import NearestNeighbors
from sklearn.svm import OneClassSVM
from scipy.spatial.distance import cdist
def print_gscv_score(gscv):
"""
print score of results of GridSearchCV
Parameters
----------
gscv :
GridSearchCV (scikit-learn)
Returns
-------
None
"""
print("Best parameters set found on development set:")
print()
print(gscv.best_params_)
print()
print("Grid scores on development set:")
print()
# means = gscv.cv_results_['mean_test_score']
# stds = gscv.cv_results_['std_test_score']
# for mean, std, params in zip(means, stds, gscv.cv_results_['params']):
# print("{:.3f} (+/-{:.03f}) for {:}".format(mean, std * 2, params))
def print_gscv_score_rgr(gscv, X_train, X_test, y_train, y_test, cv):
"""
print score of results of GridSearchCV (regression)
Parameters
----------
gscv :
GridSearchCV (scikit-learn)
X_train : array-like, shape = [n_samples, n_features]
X training data
y_train : array-like, shape = [n_samples]
y training data
X_test : array-like, sparse matrix, shape = [n_samples, n_features]
X test data
y_test : array-like, shape = [n_samples]
y test data
cv : int, cross-validation generator or an iterable
ex: 3, 5, KFold(n_splits=5, shuffle=True)
Returns
-------
None
"""
lgraph = False
print()
print("Best parameters set found on development set:")
print(gscv.best_params_)
y_calc = gscv.predict(X_train)
rmse = np.sqrt(mean_squared_error (y_train, y_calc))
mae = mean_absolute_error(y_train, y_calc)
r2 = r2_score (y_train, y_calc)
print('C: RMSE, MAE, R^2 = {:6.3f}, {:6.3f}, {:6.3f}'\
.format(rmse, mae, r2))
if(lgraph):
yyplot(y_train, y_calc)
y_incv = cross_val_predict(gscv, X_train, y_train, cv=cv)
rmse = np.sqrt(mean_squared_error (y_train, y_incv))
mae = mean_absolute_error(y_train, y_incv)
r2 = r2_score (y_train, y_incv)
print('CV: RMSE, MAE, R^2 = {:6.3f}, {:6.3f}, {:6.3f}'\
.format(rmse, mae, r2))
if(lgraph):
yyplot(y_train, y_incv)
y_pred = gscv.predict(X_test)
rmse = np.sqrt(mean_squared_error (y_test, y_pred))
mae = mean_absolute_error(y_test, y_pred)
r2 = r2_score (y_test, y_pred)
print('TST:RMSE, MAE, R^2 = {:6.3f}, {:6.3f}, {:6.3f}'\
.format(rmse, mae, r2))
if(lgraph):
yyplot(y_test, y_pred)
# y_calc = gscv.predict(X_train)
# gscv.fit(X_train, y_train, cv=3)
# -> split X_train, y_train & optimize hyper parameters
# -> finally, learn with all X_train, y_train
# C: RMSE, MAE, R^2 = score for training data
# CV: RMSE, MAE, R^2 = score for validation data
# Validation data is not used, but CV is used.
# TST:RMSE, MAE, R^2 = score for test data
# In dcv_rgr,
# DCV:RMSE, MAE, R^2 = average and standard deviation of score for test data
print()
def print_gscv_score_clf(gscv, X_train, X_test, y_train, y_test, cv):
"""
print score of results of GridSearchCV (classification)
Parameters
----------
gscv :
GridSearchCV (scikit-learn)
X_train : array-like, shape = [n_samples, n_features]
X training data
y_train : array-like, shape = [n_samples]
y training data
X_test : array-like, sparse matrix, shape = [n_samples, n_features]
X test data
y_test : array-like, shape = [n_samples]
y test data
cv : int, cross-validation generator or an iterable
ex: 3, 5, KFold(n_splits=5, shuffle=True)
Returns
-------
None
"""
print()
print("Best parameters set found on development set:")
print(gscv.best_params_)
y_calc = gscv.predict(X_train)
tn, fp, fn, tp = confusion_matrix(y_train, y_calc).ravel()
print('C: TP, FP, FN, TN, Acc. = {0}, {1}, {2}, {3}, {4:.3f}'.\
format(tp, fp, fn, tn, accuracy_score(y_train, y_calc)))
y_incv = cross_val_predict(gscv, X_train, y_train, cv=cv)
tn, fp, fn, tp = confusion_matrix(y_train, y_incv).ravel()
print('CV: TP, FP, FN, TN, Acc. = {0}, {1}, {2}, {3}, {4:.3f}'.\
format(tp, fp, fn, tn, accuracy_score(y_train, y_incv)))
y_pred = gscv.predict(X_test)
tn, fp, fn, tp = confusion_matrix(y_test, y_pred).ravel()
print('TST:TP, FP, FN, TN, Acc. = {0}, {1}, {2}, {3}, {4:.3f}'.\
format(tp, fp, fn, tn, accuracy_score(y_test, y_pred)))
print()
def print_score_rgr(y_test,y_pred):
"""
print score of results of regression
Parameters
----------
y_test : array-like, shape = [n_samples]
y test data
y_pred : array-like, shape = [n_samples]
y predicted data
Returns
-------
None
"""
rmse = np.sqrt(mean_squared_error (y_test,y_pred))
mae = mean_absolute_error(y_test,y_pred)
if(mae > 0):
rmae = np.sqrt(mean_squared_error (y_test,y_pred))/mae
else:
rmae = 0.0
r2 = r2_score (y_test,y_pred)
print('RMSE, MAE, RMSE/MAE, R^2 = {:.3f}, {:.3f}, {:.3f}, {:.3f}'\
.format(rmse, mae, rmae, r2))
if(rmae > np.sqrt(np.pi/2.0)):
print("RMSE/MAE = {:.3f} > sqrt(pi/2), some sample have large error?"\
.format(rmae))
elif(rmae < np.sqrt(np.pi/2.0)):
print("RMSE/MAE = {:.3f} < sqrt(pi/2), each sample have same error?"\
.format(rmae))
elif(rmae == np.sqrt(np.pi/2.0)):
print("RMSE/MAE = {:.3f} = sqrt(pi/2), normal distribution error?"\
.format(rmae))
def yyplot(y_obs, y_pred):
"""
print yy-plot
Parameters
----------
y_obs : array-like, shape = [n_samples]
y observed data
y_pred : array-like, shape = [n_samples]
y predicted data
Returns
-------
Figure object
"""
fig = plt.figure(figsize=(9,4))
plt.subplot(1,2,1)
plt.title("yy-plot")
plt.scatter(y_obs, y_pred)
y_all = np.concatenate([y_obs, y_pred])
ylowlim = np.amin(y_all) - 0.05 * np.ptp(y_all)
yupplim = np.amax(y_all) + 0.05 * np.ptp(y_all)
plt.plot([ylowlim, yupplim],
[ylowlim, yupplim],'k-')
plt.ylim( ylowlim, yupplim)
plt.xlim( ylowlim, yupplim)
plt.xlabel("y_observed")
plt.ylabel("y_predicted")
plt.subplot(1,2,2)
error = np.array(y_pred-y_obs)
plt.hist(error)
plt.title("Error histogram")
plt.xlabel('prediction error')
plt.ylabel('Frequency')
plt.tight_layout()
plt.show()
return fig
def dcv(X,y,mod,param_grid):
"""
Double cross validation
Parameters
----------
X : array-like, shape = [n_samples, n_features]
X training+test data
y : array-like, shape = [n_samples]
y training+test data
mod :
machine learning model (scikit-learn)
param_grid : dict or list of dictionaries
Dictionary with parameters names (string) as keys and lists of
parameter settings to try as values, or a list of such dictionaries,
in which case the grids spanned by each dictionary in the list are
explored.
Returns
-------
None
"""
# parameters
ns_in = 3 # n_splits for inner loop
ns_ou = 3 # n_splits for outer loop
i = 1 # index of loop
scores = np.array([]) # list of test scores in outer loop
kf_ou = KFold(n_splits=ns_ou, shuffle=True)
# [start] outer loop for test of the generalization error
for train_index, test_index in kf_ou.split(X):
X_train, X_test = X[train_index], X[test_index] # inner loop CV
y_train, y_test = y[train_index], y[test_index] # outer loop
# [start] inner loop CV for hyper parameter optimization
kf_in = KFold(n_splits=ns_in, shuffle=True)
gscv = GridSearchCV(mod, param_grid, cv=kf_in)
gscv.fit(X_train, y_train)
# [end] inner loop CV for hyper parameter optimization
# test of the generalization error
score = gscv.score(X_test, y_test)
scores = np.append(scores, score)
# print('dataset: {}/{} accuracy of inner CV: {:.3f} time: {:.3f} s'.\
# format(i,ns_ou,score,(time() - start)))
i+=1
# [end] outer loop for test of the generalization error
print(' ave, std of accuracy of inner CV: {:.3f} (+/-{:.3f})'\
.format(scores.mean(), scores.std()*2 ))
def dcv_rgr(X, y, model, param_grid, niter):
"""
Double cross validation (regression)
Parameters
----------
X : array-like, shape = [n_samples, n_features]
X training+test data
y : array-like, shape = [n_samples]
y training+test data
model:
machine learning model (scikit-learn)
param_grid : dict or list of dictionaries
Dictionary with parameters names (string) as keys and lists of
parameter settings to try as values, or a list of such dictionaries,
in which case the grids spanned by each dictionary in the list are
explored.
niter : int
number of DCV iteration
Returns
-------
None
"""
# parameters
ns_in = 3 # n_splits for inner loop
ns_ou = 3 # n_splits for outer loop
scores = np.zeros((niter,3))
for iiter in range(niter):
ypreds = np.array([]) # list of predicted y in outer loop
ytests = np.array([]) # list of y_test in outer loop
kf_ou = KFold(n_splits=ns_ou, shuffle=True)
# [start] outer loop for test of the generalization error
for train_index, test_index in kf_ou.split(X):
X_train, X_test = X[train_index], X[test_index] # inner loop CV
y_train, y_test = y[train_index], y[test_index] # outer loop
# [start] inner loop CV for hyper parameter optimization
kf_in = KFold(n_splits=ns_in, shuffle=True)
gscv = GridSearchCV(model, param_grid, cv=kf_in)
gscv.fit(X_train, y_train)
# [end] inner loop CV for hyper parameter optimization
# test of the generalization error
ypred = gscv.predict(X_test)
ypreds = np.append(ypreds, ypred)
ytests = np.append(ytests, y_test)
# [end] outer loop for test of the generalization error
rmse = np.sqrt(mean_squared_error (ytests, ypreds))
mae = mean_absolute_error(ytests, ypreds)
r2 = r2_score (ytests, ypreds)
# print('DCV:RMSE, MAE, R^2 = {:.3f}, {:.3f}, {:.3f}'\
# .format(rmse, mae, r2))
scores[iiter,:] = np.array([rmse,mae,r2])
means, stds = np.mean(scores, axis=0),np.std(scores, axis=0)
print()
print('Double Cross Validation')
print('In {:} iterations, average +/- standard deviation'.format(niter))
# print('RMSE: {:6.3f} (+/-{:6.3f})'.format(means[0], stds[0]))
# print('MAE : {:6.3f} (+/-{:6.3f})'.format(means[1], stds[1]))
# print('R^2 : {:6.3f} (+/-{:6.3f})'.format(means[2], stds[2]))
print('DCV:RMSE, MAE, R^2 = {:6.3f}, {:6.3f}, {:6.3f} (ave)'\
.format(means[0], means[1], means[2]))
print('DCV:RMSE, MAE, R^2 = {:6.3f}, {:6.3f}, {:6.3f} (std)'\
.format(stds[0], stds[1], stds[2]))
def dcv_clf(X, y, model, param_grid, niter):
"""
Double cross validation (classification)
Parameters
----------
X : array-like, shape = [n_samples, n_features]
X training+test data
y : array-like, shape = [n_samples]
y training+test data
model: estimator object.
This is assumed to implement the scikit-learn estimator interface.
param_grid : dict or list of dictionaries
Dictionary with parameters names (string) as keys and lists of
parameter settings to try as values, or a list of such dictionaries,
in which case the grids spanned by each dictionary in the list are
explored.
niter : int
number of DCV iteration
Returns
-------
None
"""
# parameters
ns_in = 3 # n_splits for inner loop
ns_ou = 3 # n_splits for outer loop
scores = np.zeros((niter,5))
for iiter in range(niter):
ypreds = np.array([]) # list of predicted y in outer loop
ytests = np.array([]) # list of y_test in outer loop
kf_ou = KFold(n_splits=ns_ou, shuffle=True)
# [start] outer loop for test of the generalization error
for train_index, test_index in kf_ou.split(X):
X_train, X_test = X[train_index], X[test_index] # inner loop CV
y_train, y_test = y[train_index], y[test_index] # outer loop
# [start] inner loop CV for hyper parameter optimization
kf_in = KFold(n_splits=ns_in, shuffle=True)
gscv = GridSearchCV(model, param_grid, cv=kf_in)
gscv.fit(X_train, y_train)
# [end] inner loop CV for hyper parameter optimization
# test of the generalization error
ypred = gscv.predict(X_test)
ypreds = np.append(ypreds, ypred)
ytests = np.append(ytests, y_test)
# [end] outer loop for test of the generalization error
tn, fp, fn, tp = confusion_matrix(ytests, ypreds).ravel()
acc = accuracy_score(ytests, ypreds)
scores[iiter,:] = np.array([tp,fp,fn,tn,acc])
means, stds = np.mean(scores, axis=0),np.std(scores, axis=0)
print()
print('Double Cross Validation')
print('In {:} iterations, average +/- standard deviation'.format(niter))
print('TP DCV: {:.3f} (+/-{:.3f})'.format(means[0], stds[0]))
print('FP DCV: {:.3f} (+/-{:.3f})'.format(means[1], stds[1]))
print('FN DCV: {:.3f} (+/-{:.3f})'.format(means[2], stds[2]))
print('TN DCV: {:.3f} (+/-{:.3f})'.format(means[3], stds[3]))
print('Acc. DCV: {:.3f} (+/-{:.3f})'.format(means[4], stds[4]))
def optimize_gamma(X, gammas):
"""
Optimize gamma by maximizing variance in Gram matrix
Parameters
----------
X : array-like, shape = [n_samples, n_features]
X training+test data
gammas : list
list of gammas
Returns
-------
real
optimized gamma
"""
var_matrix = list()
for gamma in gammas:
gram_matrix = np.exp(-gamma*((X[:, np.newaxis] - X)**2).sum(axis=2))
var_matrix.append(gram_matrix.var(ddof=1))
return gammas[ np.where( var_matrix == np.max(var_matrix) )[0][0] ]
def ad_knn(X_train, X_test):
"""
Determination of Applicability Domain (k-Nearest Neighbor)
Parameters
----------
X_train : array-like, shape = [n_samples, n_features]
X training data
X_test : array-like, shape = [n_samples, n_features]
X test data
Returns
-------
array-like, shape = [n_samples]
-1 (outer of AD) or 1 (inner of AD)
"""
n_neighbors = 5 # number of neighbors
r_ad = 0.9 # ratio of X_train inside AD / all X_train
# ver.1
neigh = NearestNeighbors(n_neighbors=n_neighbors+1)
neigh.fit(X_train)
dist_list = np.mean(neigh.kneighbors(X_train)[0][:,1:], axis=1)
dist_list.sort()
ad_thr = dist_list[round(X_train.shape[0] * r_ad) - 1]
neigh = NearestNeighbors(n_neighbors=n_neighbors)
neigh.fit(X_train)
dist = np.mean(neigh.kneighbors(X_test)[0], axis=1)
y_appd = 2 * (dist < ad_thr) -1
# ver.2
if(False):
# ref
# https://datachemeng.com/wp-content/uploads/assignment15.py
dist_matrix = cdist(X_train, X_train)
dist_matrix.sort()
dist_list = np.mean(dist_matrix[:, 1:n_neighbors+1], axis=1)
dist_list.sort()
ad_thr = dist_list[round(X_train.shape[0] * r_ad) - 1]
dist_matrix = cdist(X_test, X_train)
dist_matrix.sort()
dist = np.mean(dist_matrix[:, 0:n_neighbors], axis=1)
y_appd2 = 2 * (dist < ad_thr) -1
print(np.allclose(y_appd,y_appd2))
return y_appd
def ad_knn_list(X_train, X_test, max_neighbors):
"""
Determination of Applicability Domain (k-Nearest Neighbor)
Parameters
----------
X_train : array-like, shape = [n_samples, n_features]
X training data
X_test : array-like, shape = [n_samples, n_features]
X test data
max_neighbors : maximum of neighbors
Returns
-------
array-like, shape = [n_samples, max_neighbors]
-1 (outer of AD) or 1 (inner of AD) for k=1, ..., max_neighbors
"""
# ref
# https://datachemeng.com/wp-content/uploads/assignment15.py
r_ad = 0.997 # ratio of X_train inside AD / all X_train
y_appd = np.zeros((X_test.shape[0], max_neighbors))
for i in range(max_neighbors):
n_neighbors = i + 1 # number of neighbors
# ver.1
neigh = NearestNeighbors(n_neighbors=n_neighbors+1)
neigh.fit(X_train)
dist_list = np.mean(neigh.kneighbors(X_train)[0][:,1:], axis=1)
# neigh.kneighbors[0] = distances [nsample, n_neighbors]
# neigh.kneighbors[1] = indices [nsample, n_neighbors]
# http://gratk.hatenablog.jp/entry/2017/12/10/205033
dist_list.sort()
ad_thr = dist_list[round(X_train.shape[0] * r_ad) - 1]
neigh = NearestNeighbors(n_neighbors=n_neighbors)
neigh.fit(X_train)
dist = np.mean(neigh.kneighbors(X_test)[0], axis=1)
y_appd_test1 = 2 * (dist < ad_thr) -1
if(False):
# ver.2
# ref
# https://datachemeng.com/wp-content/uploads/assignment15.py
dist_matrix_train = cdist(X_train, X_train)
dist_matrix_train.sort()
dist_list = np.mean(dist_matrix_train[:, 1:n_neighbors+1], axis=1)
# skip [:,0] = 0.0 = distance from self.
dist_list.sort()
ad_thr = dist_list[round(X_train.shape[0] * r_ad) - 1]
dist_matrix_test = cdist(X_test, X_train)
dist_matrix_test.sort()
dist = np.mean(dist_matrix_test[:, 0:n_neighbors], axis=1)
y_appd_test2 = 2 * (dist < ad_thr) -1
print(np.allclose(y_appd_test1,y_appd_test2))
y_appd[:,i] = 2 * (dist < ad_thr) -1
return y_appd
def ad_ocsvm(X_train, X_test):
"""
Determination of Applicability Domains (One-Class Support Vector Machine)
Parameters
----------
X_train : array-like, shape = [n_samples, n_features]
X training data
X_test : array-like, shape = [n_samples, n_features]
X test data
Returns
-------
array-like, shape = [n_samples]
-1 (outer of AD) or 1 (inner of AD)
"""
range_g = 2**np.arange( -20, 11, dtype=float)
optgamma = optimize_gamma(X_train, range_g)
clf = OneClassSVM(nu=0.003, gamma=optgamma)
clf.fit(X_train)
y_appd = clf.predict(X_test) # outliers = -1
return y_appd
def y_randamization_rgr(X,y,model,param_grid,niter):
# parameters
scores = np.zeros((niter,3))
for iiter in range(niter):
y_rand = np.random.permutation(y)
gscv = GridSearchCV(model, param_grid, cv=KFold(n_splits=3, shuffle=True))
gscv.fit(X, y_rand)
y_pred = gscv.predict(X)
rmse = np.sqrt(mean_squared_error (y_rand, y_pred))
mae = mean_absolute_error(y_rand, y_pred)
r2 = r2_score (y_rand, y_pred)
scores[iiter,:] = np.array([rmse,mae,r2])
means, stds = np.mean(scores, axis=0),np.std(scores, axis=0)
print()
print("y-randomization")
print('In {:} iterations, average +/- standard deviation'.format(niter))
# print('RMSE: {:6.3f} (+/-{:.3f})'.format(means[0], stds[0]))
# print('MAE : {:6.3f} (+/-{:.3f})'.format(means[1], stds[1]))
# print('R^2 : {:6.3f} (+/-{:.3f})'.format(means[2], stds[2]))
print('rnd:RMSE, MAE, R^2 = {:6.3f}, {:6.3f}, {:6.3f} (ave)'\
.format(means[0], means[1], means[2]))
print('rnd:RMSE, MAE, R^2 = {:6.3f}, {:6.3f}, {:6.3f} (std)'\
.format(stds[0], stds[1], stds[2]))
return
if __name__ == '__main__':
print('Hello world')
| [
"[email protected]"
] | |
39aeb6e973594342d29c7e0c62856e9bdb055bea | e5ee4f343d9523129298e1cd989b52a142028cfe | /samples/contrib/azure-samples/databricks-pipelines/databricks_cluster_pipeline.py | 4cbd45e50b4cab9d62b7e153d21fb6660ecc37ea | [
"Apache-2.0"
] | permissive | joaoalvarenga/pipelines | 67c5b2c906134be8d4814a0851e4b60dfa4adf44 | 493c3d4e980b94a963a257247c6eb2d970b3dafa | refs/heads/master | 2020-12-08T18:22:52.433779 | 2020-01-10T05:00:34 | 2020-01-10T05:00:34 | 233,059,234 | 1 | 0 | Apache-2.0 | 2020-01-10T14:03:36 | 2020-01-10T14:03:35 | null | UTF-8 | Python | false | false | 1,780 | py | """Create a cluster in Databricks. Then submit a one-time Run to that cluster."""
import kfp.dsl as dsl
import kfp.compiler as compiler
import databricks
def create_cluster(cluster_name):
return databricks.CreateClusterOp(
name="createcluster",
cluster_name=cluster_name,
spark_version="5.3.x-scala2.11",
node_type_id="Standard_D3_v2",
spark_conf={
"spark.speculation": "true"
},
num_workers=2
)
def submit_run(run_name, cluster_id, parameter):
return databricks.SubmitRunOp(
name="submitrun",
run_name=run_name,
existing_cluster_id=cluster_id,
libraries=[{"jar": "dbfs:/docs/sparkpi.jar"}],
spark_jar_task={
"main_class_name": "org.apache.spark.examples.SparkPi",
"parameters": [parameter]
}
)
def delete_run(run_name):
return databricks.DeleteRunOp(
name="deleterun",
run_name=run_name
)
def delete_cluster(cluster_name):
return databricks.DeleteClusterOp(
name="deletecluster",
cluster_name=cluster_name
)
@dsl.pipeline(
name="DatabricksCluster",
description="A toy pipeline that computes an approximation to pi with Azure Databricks."
)
def calc_pipeline(cluster_name="test-cluster", run_name="test-run", parameter="10"):
create_cluster_task = create_cluster(cluster_name)
submit_run_task = submit_run(run_name, create_cluster_task.outputs["cluster_id"], parameter)
delete_run_task = delete_run(run_name)
delete_run_task.after(submit_run_task)
delete_cluster_task = delete_cluster(cluster_name)
delete_cluster_task.after(delete_run_task)
if __name__ == "__main__":
compiler.Compiler().compile(calc_pipeline, __file__ + ".tar.gz")
| [
"[email protected]"
] | |
5b46996c84345b6406c48c03241e97ddfdbd1ac8 | d066f7fe739fb78f74ec2de8ccbfefdd4270f60f | /appimagebuilder/__init__.py | 5b4148a08e2776e617d6700c316eba5fc4629b93 | [
"MIT"
] | permissive | AppImageCrafters/appimage-builder | 666e75363a74f615cdb3673b3ca9d51a6d292a49 | f38699ef3644fa5409a5a262b7b6d99d6fb85db9 | refs/heads/main | 2023-08-17T06:34:54.029664 | 2023-06-03T17:51:04 | 2023-06-03T17:51:04 | 218,847,680 | 270 | 54 | MIT | 2023-09-06T17:04:18 | 2019-10-31T19:44:17 | Python | UTF-8 | Python | false | false | 651 | py | #!/usr/bin/env python3
# Copyright 2020 Alexis Lopez Zubieta
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
| [
"[email protected]"
] | |
15d2b575651bdea86b38c0e958fcaf83eaae4442 | 760fbf0e4675212a89dbba28ef771bf7ff7a0d91 | /Leetcode2019/145. 二叉树的后序遍历.py | 06d8fc7c4c1096758d26866d0f867745ac54876d | [] | no_license | chixujohnny/Leetcode | 1a420e318005140a2be036ab7c3fcd054b4ae011 | 3faa41556f13f45a08b49d4dcd371ed590f9cb14 | refs/heads/master | 2021-06-19T14:44:28.464335 | 2021-01-11T08:16:26 | 2021-01-11T08:16:26 | 155,142,704 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 563 | py | # coding: utf-8
# Definition for a binary tree node.
# class TreeNode(object):
# def __init__(self, x):
# self.val = x
# self.left = None
# self.right = None
class Solution(object):
def postorderTraversal(self, root):
"""
:type root: TreeNode
:rtype: List[int]
"""
res = []
def helper(root):
if root == None:
return
helper(root.left)
helper(root.right)
res.append(root.val)
helper(root)
return res | [
"[email protected]"
] | |
7dcbcaa847c475cb4d1f139f4cd8eb41abab09cb | 2f989d067213e7a1e19904d482a8f9c15590804c | /lib/python3.4/site-packages/django/contrib/contenttypes/apps.py | e5708adc99840e3b4a4bc3212d26d134a8462203 | [
"MIT"
] | permissive | levabd/smart4-portal | beb1cf8847134fdf169ab01c38eed7e874c66473 | 2c18ba593ce7e9a1e17c3559e6343a14a13ab88c | refs/heads/master | 2023-02-18T05:49:40.612697 | 2022-08-02T09:35:34 | 2022-08-02T09:35:34 | 116,001,098 | 0 | 1 | MIT | 2023-02-15T21:34:01 | 2018-01-02T10:00:07 | Roff | UTF-8 | Python | false | false | 693 | py | from django.apps import AppConfig
from django.contrib.contenttypes.checks import check_generic_foreign_keys
from django.core import checks
from django.db.models.signals import post_migrate, pre_migrate
from django.utils.translation import ugettext_lazy as _
from .management import (
inject_rename_contenttypes_operations, update_contenttypes,
)
class ContentTypesConfig(AppConfig):
name = 'django.contrib.contenttypes'
verbose_name = _("Content Types")
def ready(self):
pre_migrate.connect(inject_rename_contenttypes_operations, sender=self)
post_migrate.connect(update_contenttypes)
checks.register(check_generic_foreign_keys, checks.Tags.models)
| [
"[email protected]"
] | |
59e22f98d350ea5b45fcfb9fc47ea110043bdec0 | 9556f7e1d81a305d71a66b9768eba199e396d733 | /Thread/venv/bin/pip | 698fcef12df25482e9901a7aecb433703142f6b8 | [] | no_license | gitgaoqian/Python | 301a2823b50ec754a2c1a3f47c39ae8b0b8e6890 | 164f5271044b235d256a9bbe0a34caacf1e81fc8 | refs/heads/master | 2023-01-08T21:23:59.640828 | 2020-11-01T13:06:21 | 2020-11-01T13:06:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | #!/home/ros/pycharm/MyFiles/thread/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==9.0.1','console_scripts','pip'
__requires__ = 'pip==9.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==9.0.1', 'console_scripts', 'pip')()
)
| [
"[email protected]"
] | ||
61d841a540a06f74ec8d73ed015cc090e6283cd5 | 55c250525bd7198ac905b1f2f86d16a44f73e03a | /Python/Apps/Riddle Game App/temp.py | f841eb3bfa477531b1280da74ffd298624106bfc | [] | no_license | NateWeiler/Resources | 213d18ba86f7cc9d845741b8571b9e2c2c6be916 | bd4a8a82a3e83a381c97d19e5df42cbababfc66c | refs/heads/master | 2023-09-03T17:50:31.937137 | 2023-08-28T23:50:57 | 2023-08-28T23:50:57 | 267,368,545 | 2 | 1 | null | 2022-09-08T15:20:18 | 2020-05-27T16:18:17 | null | UTF-8 | Python | false | false | 127 | py | version https://git-lfs.github.com/spec/v1
oid sha256:dfddd312c5f04ffd65ec672ab6d6d80e68a8e6e69b98e472123679a87dc1e254
size 52
| [
"[email protected]"
] | |
01b2be76c7a39b05d6db36b19dba8018456848d3 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_125/ch22_2020_03_02_20_28_19_768242.py | 6c3da9295a0c587ec5b4d1b65f83ce4882ba77e4 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 143 | py | dias = int(input('quantos cigarros voce fuma por dia ? '))
anos = int(input('há quantos anos voce fuma?' ))
print ((anos*365*24*60)*dias*144) | [
"[email protected]"
] | |
dba2f8e1a2489ee8595497efbce2fbe54822fbb2 | c8a04384030c3af88a8e16de4cedc4ef8aebfae5 | /stubs/pandas/tests/indexes/test_setops.pyi | 3493fb22582660788efeadee60d6a7bc60cb4307 | [
"MIT"
] | permissive | Accern/accern-xyme | f61fce4b426262b4f67c722e563bb4297cfc4235 | 6ed6c52671d02745efabe7e6b8bdf0ad21f8762c | refs/heads/master | 2023-08-17T04:29:00.904122 | 2023-05-23T09:18:09 | 2023-05-23T09:18:09 | 226,960,272 | 3 | 2 | MIT | 2023-07-19T02:13:18 | 2019-12-09T20:21:59 | Python | UTF-8 | Python | false | false | 660 | pyi | # Stubs for pandas.tests.indexes.test_setops (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
# pylint: disable=unused-argument,redefined-outer-name,no-self-use,invalid-name
# pylint: disable=relative-beyond-top-level
from typing import Any
COMPATIBLE_INCONSISTENT_PAIRS: Any
def index_pair(request: Any) -> Any:
...
def test_union_same_types(indices: Any) -> None:
...
def test_union_different_types(index_pair: Any) -> None:
...
def test_compatible_inconsistent_pairs(idx_fact1: Any, idx_fact2: Any) -> None:
...
def test_union_dtypes(left: Any, right: Any, expected: Any) -> None:
...
| [
"[email protected]"
] | |
cc6016e65e7b3e125b87d996e95b98ff5f2a1e52 | c17ca7a7824056f7ad58d0f71abc25670b20c1fc | /spirit/urls/admin/__init__.py | 1bc65e9f0daf9e9e944ad49a9811b0f1d6942c43 | [
"Apache-2.0"
] | permissive | Si-elegans/Web-based_GUI_Tools | cd35b72e80aa400105593c5c819355437e204a81 | 58a9b7a76bc46467554192a38ff5329a94e2b627 | refs/heads/master | 2023-01-11T09:11:21.896172 | 2017-07-18T11:10:31 | 2017-07-18T11:10:31 | 97,445,306 | 3 | 1 | Apache-2.0 | 2022-12-26T20:14:59 | 2017-07-17T07:03:13 | JavaScript | UTF-8 | Python | false | false | 536 | py | #-*- coding: utf-8 -*-
from django.conf.urls import patterns, include, url
urlpatterns = patterns('',
url(r'^$', 'spirit.views.admin.index.dashboard', name='admin'),
url(r'^index/', include('spirit.urls.admin.index')),
url(r'^category/', include('spirit.urls.admin.category')),
url(r'^comment/flag/', include('spirit.urls.admin.comment_flag')),
url(r'^config/', include('spirit.urls.admin.config')),
url(r'^topic/', include('spirit.urls.admin.topic')),
url(r'^user/', include('spirit.urls.admin.user')),
) | [
"[email protected]"
] | |
1d8542da9fe05431ce0785b0c97e19b60e7aec39 | e15ec378360536d5215bf0f0a8fa9ab8a41ff6cc | /ch06/p6-3-1.py | 344f657d4d90d4170ca1d407f4091c37e6166324 | [] | no_license | michlin0825/book-mP21926 | 2ece5685ded2c913f51c830fd6f3280b8394646f | 5c4ebd828e593efd5fc7ba40bbcf606babd52640 | refs/heads/main | 2023-01-02T05:48:49.832014 | 2020-10-12T02:22:21 | 2020-10-12T02:22:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | from selenium import webdriver
from bs4 import BeautifulSoup
import time
url = 'https://www.cwb.gov.tw/V8/C/W/OBS_County.html?ID=menu'
web = webdriver.Chrome('chromedriver.exe')
web.implicitly_wait(60)
web.get(url)
html = web.page_source
web.quit()
soup = BeautifulSoup(html, 'html.parser')
target = soup.select('#County option')
counties = list()
for item in target:
counties.append((item.text,item['value']))
print(counties)
| [
"[email protected]"
] | |
0c826b71c27b17e526b9807cbca19ce674539404 | b57d337ddbe946c113b2228a0c167db787fd69a1 | /scr/py00468notices.py | c8f23c8ddfd5182e7ed018cb580fa0c212a0c0c1 | [] | no_license | aademchenko/ToEE | ebf6432a75538ae95803b61c6624e65b5cdc53a1 | dcfd5d2de48b9d9031021d9e04819b309d71c59e | refs/heads/master | 2020-04-06T13:56:27.443772 | 2018-11-14T09:35:57 | 2018-11-14T09:35:57 | 157,520,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,612 | py | from toee import *
from utilities import *
import _include
from co8Util.TimedEvent import *
from combat_standard_routines import *
from py00439script_daemon import get_f, set_f, get_v, set_v, tpsts, record_time_stamp
def san_use( attachee, triggerer ):
if (attachee.name == 11063):
game.quests[110].state = qs_mentioned
game.new_sid = 0
elif (attachee.name == 11064):
game.quests[90].state = qs_mentioned
game.new_sid = 0
elif (attachee.name == 11065):
game.quests[111].state = qs_mentioned
game.new_sid = 0
elif (attachee.name == 11066):
game.quests[112].state = qs_mentioned
game.new_sid = 0
elif (attachee.name == 11067):
game.quests[108].state = qs_mentioned
game.global_vars[939] = 1
game.new_sid = 0
elif (attachee.name == 11068):
if (game.quests[97].state != qs_botched):
game.quests[97].state = qs_botched
if (game.party[0].reputation_has(53) == 0):
game.party[0].reputation_add( 53 )
game.global_vars[510] = 2
game.global_flags[504] = 1
game.new_sid = 0
elif (attachee.name == 11069):
triggerer.money_adj(-10000)
attachee.destroy()
elif (attachee.name == 11070):
game.quests[106].state = qs_mentioned
game.new_sid = 0
elif (attachee.name == 11071):
game.quests[95].state = qs_completed
game.new_sid = 0
elif (attachee.name == 11072):
game.quests[105].state = qs_mentioned
set_bethany()
game.new_sid = 0
elif (attachee.name == 11073):
game.quests[105].state = qs_mentioned
set_bethany()
game.new_sid = 0
return RUN_DEFAULT
def set_bethany():
game.encounter_queue.append(3447)
set_f('s_bethany_scheduled')
return RUN_DEFAULT | [
"[email protected]"
] | |
4981a7e806be3173914d83131e900e93b70cefac | c89f5856fe74cff49a9d96dde9ed0117109e3845 | /A-bit-of-py/exceptions_raise.py | 18c685701366068a4acd2f51c1194d541cbb8930 | [] | no_license | sinoclover/python | b2b3f435d15840ec16a34c62d50308bdfb9d6c3e | 02f5347bc8219f1df52486077adf0017fe6d5211 | refs/heads/master | 2020-08-27T22:14:20.672846 | 2019-12-01T13:27:42 | 2019-12-01T13:27:42 | 158,791,898 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 647 | py | # encoding=UTF-8
class ShortInputException(Exception):
'''一个由用户定义的异常类'''
def __init__(self, length, atleast):
Exception.__init__(self)
self.length = length
self.atleast = atleast
try:
text = input('Enter something --> ')
if len(text) < 3:
raise ShortInputException(len(text), 3)
# 其他工作能在此处继续正常运行
except EOFError:
print('Why did you do an EOF on me?')
except ShortInputException as ex:
print(('ShortInputException: The input was {0} long, expected at least {1}').format(ex.length, ex.atleast))
else:
print('No exception was raised') | [
"[email protected]"
] | |
4fc31a6ff5a7263512d12b5b20ad20f35c45dff3 | 2c510687bdc03fbb8383130e68cc796bfef1088c | /3_basic_ds/exercises.py | 328dff7335a429dab792cb7b5a6f8adc62aeda37 | [] | no_license | jimjshields/pswads | 59758a0972fe71ca6f77305ff8ab86673d9b5d46 | 9568622805e24416f4a227cbecc1ef4927fa7ba3 | refs/heads/master | 2016-09-06T12:37:53.254464 | 2015-02-17T02:13:00 | 2015-02-17T02:13:00 | 30,149,564 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,772 | py | # Chapter 3 Programming Exercises
# Skip all pre-/post-/infix questions; not worth the time.
# Also skip any 'experiment' questions. Maybe come back to them.
# 5. Implement the Queue ADT, using a list such that the rear of the queue is at the end of the list.
class Queue(object):
"""Represents a queue ADT. The rear of the queue is the end of the list used.
Necessary methods: enqueue, dequeue, size, is_empty."""
def __init__(self):
"""Initializes an empty queue using a list."""
self.items = []
def enqueue(self, item):
"""Adds an item to the rear of the queue."""
self.items.append(item)
def dequeue(self):
"""Removes and returns an item from the front of the queue."""
return self.items.pop(0)
def size(self):
"""Returns the number of items in the queue."""
return len(self.items)
def is_empty(self):
"""Checks whether the queue has no items."""
return self.items == []
# q = Queue()
# q.enqueue(1)
# q.enqueue(2)
# q.enqueue(3)
# q.enqueue(4)
# q.enqueue(5)
# print q.items
# print q.dequeue()
# print q.dequeue()
# print q.dequeue()
# print q.dequeue()
# print q.dequeue()
# print q.is_empty()
# 7. It is possible to implement a queue such that both enqueue and dequeue have O(1) performance on average. In this case it means that most of the time enqueue and dequeue will be O(1) except in one particular circumstance where dequeue will be O(n).
class Queue_2(object):
"""Represents a queue ADT with O(1) enqueue and dequeue time on average."""
def __init__(self):
"""Initializes an empty queue with a list.
Also initializes the dequeue variable for O(1) access time."""
self.items = []
self.to_be_dequeued = ''
def enqueue(self, item):
self.items.append(item)
self.to_be_dequeued = self.items[0]
| [
"[email protected]"
] | |
e103e52c8d1db2ceda089eb62bb3d134391fee80 | 102a33464fd3a16ceedd134e9c64fea554ca5273 | /apps/config/models.py | 39260020ed66b83fc8e77d4bf08ecae9ee053a6b | [] | no_license | pythonguru101/django-ecommerce | b688bbe2b1a53c906aa80f86f764cf9787e6c2fe | f94de9c21223716db5ffcb86ba87219da88d2ff4 | refs/heads/master | 2020-07-24T14:57:02.047702 | 2020-06-10T06:06:23 | 2020-06-10T06:06:23 | 207,961,132 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,658 | py | # encoding: utf-8
from django.db import models
from django.utils.translation import ugettext as _
class ConfigAbstractManager(models.Manager):
def get_config(self):
try:
return self.get(pk=1)
except self.model.DoesNotExist:
return {}
class ConfigAbstract(models.Model):
text_main_bot = models.TextField(_(u'текст на главной внизу'), blank=True)
phone = models.CharField(_(u'номер телефона'), max_length=32, blank=True)
email = models.EmailField(_(u'email'), blank=True)
title_page = models.CharField(_(u'заголовок страницы'), max_length=140,
blank=True)
meta_keywords = models.CharField(_(u'meta keywords'), max_length=200,
blank=True)
meta_description = models.TextField(_(u'meta description'), blank=True)
yandex_verification = models.CharField(_(u'Yandex Verification'),
max_length=100, blank=True)
yml_name = models.CharField(_(u'YML: name'), max_length=250)
yml_email = models.EmailField(_(u'YML: email'))
yml_company = models.CharField(_(u'YML: company'), max_length=250)
objects = ConfigAbstractManager()
class Meta:
abstract = True
verbose_name = _(u'настройки')
verbose_name_plural = _(u'настройки')
def __unicode__(self):
return u'настройки'
def save(self, *args, **kwargs):
self.pk = 1
return super(ConfigAbstract, self).save(*args, **kwargs)
class ConfigManagerManager(models.Manager):
def get_emails(self):
return [m['email'] for m in self.values('email')]
class Config(ConfigAbstract):
title_blog = models.CharField(_(u'заголовок блога'), max_length=140,
blank=True)
facebook_app_id = models.CharField(_(u'FaceBook App ID'), max_length=100,
blank=True)
afrek_id = models.CharField(_(u'Партнёрка afrek.ru'), max_length=100,
blank=True)
class ConfigManager(models.Model):
config = models.ForeignKey(Config,
verbose_name=_(u'менеджер'), on_delete=models.CASCADE)
name = models.CharField(_(u'имя'), max_length=100)
email = models.EmailField(_(u'email'))
objects = ConfigManagerManager()
class Meta:
verbose_name = _(u'менеджер')
verbose_name_plural = _(u'менеджеры')
def __unicode__(self):
return "%s <%s>" % (self.name, self.email)
| [
"[email protected]"
] | |
fa27bce37bd4a31c0734171d2f4b56a9949bad56 | d52413173437ba73ecdf822ca895e659f00a8ce7 | /kiwibackend/application/module/artifact/migrations/0003_auto_20151209_1606.py | 66ae6b692b0125f17dbbc5170a7fce62dbc9ac44 | [] | no_license | whiteprism/mywork | 2329b3459c967c079d6185c5acabd6df80cab8ea | a8e568e89744ca7acbc59e4744aff2a0756d7252 | refs/heads/master | 2021-01-21T11:15:49.090408 | 2017-03-31T03:28:13 | 2017-03-31T03:28:13 | 83,540,646 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 946 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('artifact', '0002_auto_20150914_0908'),
]
operations = [
migrations.AddField(
model_name='artifact',
name='heroTypeList_int',
field=models.CharField(default=b'', max_length=200, verbose_name='\u88c5\u5907\u7684\u82f1\u96c4\u7684\u7c7b\u578b'),
),
migrations.AddField(
model_name='artifact',
name='searchDifficuty_int',
field=models.CharField(default=b'', max_length=200, verbose_name='\u6389\u843d\u5173\u5361\u96be\u5ea6'),
),
migrations.AddField(
model_name='artifact',
name='searchInstances_int',
field=models.CharField(default=b'', max_length=200, verbose_name='\u6389\u843d\u5173\u5361'),
),
]
| [
"[email protected]"
] | |
cb1b755acd76f9db92cf7cb4a054a194126f2c56 | 2cf87feeebfe128d6c60067e82e5b28b3a84ae45 | /aracle/data/make3dslices.py | a5b16c308b96da0c083019c4adf28e64496bd654 | [
"MIT"
] | permissive | jiwoncpark/aracle | b536fbea39480b7af96daff1a9c51d2a7f131866 | 20aabe27ce65b738b77192242dc89eda612f945e | refs/heads/master | 2020-06-03T15:21:35.386628 | 2019-11-12T17:49:34 | 2019-11-12T17:49:34 | 191,626,657 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 4,320 | py | import drms #pip install drms, astropy, sunpy , skvideo
import numpy as np
import astropy.units as u
import shutil
import os
import datetime
import matplotlib.pyplot as plt
import skvideo.io
from astropy.io import fits
from matplotlib.pyplot import imshow
from PIL import Image
from sunpy.map import Map
from datetime import date, time, datetime, timedelta
workdir = 'C:/Users/alexf/Desktop/HMI_Data/'
fits_dir = workdir + 'fits/'
if not os.path.exists(workdir):
os.mkdir(workdir)
print("Directory " + workdir + "does not exist. Creating...")
start = datetime(2010,5,1,1,0,0)#date time object format is year, month, day, hour, minute, second
end = datetime(2018,5,1,0,0,0)
time_interval = timedelta(minutes = 60) #timedelta will accept weeks,days,hours,minutes and seconds as input
chunk_size = 480 #chunk size is the number of hmi files downloaded in each export call. must be at least 1
export_protocol = 'fits'#using as-is instead of fits will result in important metadata not being downloaded
email = '[email protected]'#use a group email
series = 'hmi.M_720s'
if (end < start):
print("The end date is before the start date. Please select an end date after the start date")
#sys.exit()
if not os.path.exists(fits_dir):
os.mkdir(fits_dir)
print("Directory " + fits_dir + "does not exist. Creating...")
c = drms.Client(email=email, verbose = True)
total = (end-start) // time_interval + 1
print('Downloading ' + str(total) + ' files')
missing_files = []
def download(start,end,chunk_size,time_interval):
current_time = start
while(current_time<end):
if (end-current_time > (time_interval * chunk_size)):
time_chunk = (time_interval * chunk_size)
else:
time_chunk = end-current_time
end_time = current_time + time_chunk
current_timestring = current_time.strftime('%Y' + '.' + '%m' + '.'+'%d'+'_'+'%X') + '_UT'
end_timestring = end_time.strftime('%Y' + '.' + '%m' + '.'+'%d'+'_'+'%X') + '_UT'
query = series + '[' + current_timestring + '-' + end_timestring + '@' + str(time_interval.total_seconds()) + 's]'
print('Query string: ' + query)
try:
r = c.export(query, protocol = export_protocol)
r.download(fits_dir)
exists = os.path.isfile(fits_dir + '.1')
if exists:#if a fits file no longer exists, it will be downloaded as an empty .1 file. this deletes .1 files
os.remove(fits_dir + '.1')
raise ValueError('Fits file no longer exists. Deleting downloaded file...')
except:#if files are missing from the server, the export call fails. this keeps track of missing files
if (chunk_size == 1):
missing_files.append(current_timestring)
else:
download(current_time,end_time,chunk_size//2,time_interval)
current_time = end_time
download(start,end,chunk_size,time_interval)
print(missing_files)
#delete all duplicate files
test = os.listdir(fits_dir)
for item in test:
if item.endswith(".1"):
os.remove(os.path.join(fits_dir, item))
Xdata_dir = workdir + 'Xdata/'
if not os.path.exists(Xdata_dir):
os.mkdir(Xdata_dir)
print("Directory " + Xdata_dir + "does not exist. Creating...")
fits_filenames = os.listdir(fits_dir)
resizing = [256]
for resize in resizing:
resize_dir = Xdata_dir + str(resize)
if os.path.exists(resize_dir):#delete any resizing directories matching the new resizes
shutil.rmtree(resize_dir)
os.makedirs(resize_dir)#creates new resize directories
for filename in fits_filenames: #iterates over fits files and converts to a numpy array
hmi_map = Map(fits_dir + filename)
rotateddata90 = hmi_map.rotate(angle=90*u.deg, order = 0)
rotateddata180 = rotateddata90.rotate(angle=90*u.deg, order = 0)
data = rotateddata180.data
data[np.where(np.isnan(data))] = 0.0 # replacing nans with 0s
print('saving '+filename +' in sizes'+ str(resizing))
for resize in resizing:#resizes and saves numpy array data into given resizes
resized_image = np.array(Image.fromarray(data).resize((resize,resize),Image.LANCZOS))
np.save(Xdata_dir + str(resize) + '/' + filename[:26] + '_'+ str(resize), resized_image)#saves series,time,and resize | [
"[email protected]"
] | |
97176f4b2cf2a2706ba0989eee781b449a4cf6b0 | 14cdc1353affd01ec9f96c31cd51549d82364b2c | /test/IECore/OptionsTest.py | f257f594de42cc75781eb2db60bfa267e5f96a44 | [] | no_license | dsparrow27/cortex | f787cdcc271388986cd24ee27b48999ae71ef194 | 5e985efa860aec22a0c8ec6cebf9e682f65eca73 | refs/heads/master | 2021-08-19T06:30:36.881268 | 2017-11-23T08:26:13 | 2017-11-23T08:26:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,627 | py | ##########################################################################
#
# Copyright (c) 2012, John Haddon. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# * Neither the name of Image Engine Design nor the names of any
# other contributors to this software may be used to endorse or
# promote products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO,
# THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
##########################################################################
import unittest
import IECore
class OptionsTest( unittest.TestCase ) :
def testCopy( self ) :
o = IECore.Options()
o.options["test"] = IECore.FloatData( 10 )
oo = o.copy()
self.assertEqual( o, oo )
def testConstructFromDict( self ) :
o = IECore.Options( {
"a" : IECore.StringData( "a" ),
"b" : IECore.IntData( 10 ),
} )
self.assertEqual( len( o.options ), 2 )
self.assertEqual( o.options["a"], IECore.StringData( "a" ) )
self.assertEqual( o.options["b"], IECore.IntData( 10 ) )
def testHash( self ) :
o1 = IECore.Options()
o2 = IECore.Options()
self.assertEqual( o1.hash(), o2.hash() )
o1.options["a"] = IECore.StringData( "a" )
self.assertNotEqual( o1.hash(), o2.hash() )
o2.options["a"] = IECore.StringData( "a" )
self.assertEqual( o1.hash(), o2.hash() )
if __name__ == "__main__":
unittest.main()
| [
"[email protected]"
] | |
49716fffe1d3d747365e83abe99f48f2a62222a2 | be5c86e8fe3f5836b7d2097dd5272c72b5b28f15 | /binary-search/Python/0374-guess-number-higher-or-lower.py | bca7b49cffd223314f15f7da64b8be501150cf07 | [
"Apache-2.0"
] | permissive | lemonnader/LeetCode-Solution-Well-Formed | d24674898ceb5441c036016dc30afc58e4a1247a | baabdb1990fd49ab82a712e121f49c4f68b29459 | refs/heads/master | 2021-04-23T18:49:40.337569 | 2020-03-24T04:50:27 | 2020-03-24T04:50:27 | 249,972,064 | 1 | 0 | Apache-2.0 | 2020-03-25T12:26:25 | 2020-03-25T12:26:24 | null | UTF-8 | Python | false | false | 523 | py | # The guess API is already defined for you.
# @param num, your guess
# @return -1 if my number is lower, 1 if my number is higher, otherwise return 0
def guess(num):
pass
class Solution(object):
def guessNumber(self, n):
left = 1
right = n
while left < right:
mid = (left + right) >> 1
if guess(mid) == 1:
left = mid + 1
else:
right = mid
# 最后剩下的数一定是所求,无需后处理
return left
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.