ext
stringclasses 9
values | sha
stringlengths 40
40
| content
stringlengths 3
1.04M
|
---|---|---|
py | b402b0fb0c01a47c93347563499ba75952b5b88d | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
from ._enums import *
from ._inputs import *
__all__ = ['StorageAccountCredential']
class StorageAccountCredential(pulumi.CustomResource):
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_key: Optional[pulumi.Input[pulumi.InputType['AsymmetricEncryptedSecretArgs']]] = None,
account_type: Optional[pulumi.Input[Union[str, 'AccountType']]] = None,
alias: Optional[pulumi.Input[str]] = None,
blob_domain_name: Optional[pulumi.Input[str]] = None,
connection_string: Optional[pulumi.Input[str]] = None,
device_name: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
ssl_status: Optional[pulumi.Input[Union[str, 'SSLStatus']]] = None,
storage_account_id: Optional[pulumi.Input[str]] = None,
user_name: Optional[pulumi.Input[str]] = None,
__props__=None,
__name__=None,
__opts__=None):
"""
The storage account credential.
API Version: 2020-12-01.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[pulumi.InputType['AsymmetricEncryptedSecretArgs']] account_key: Encrypted storage key.
:param pulumi.Input[Union[str, 'AccountType']] account_type: Type of storage accessed on the storage account.
:param pulumi.Input[str] alias: Alias for the storage account.
:param pulumi.Input[str] blob_domain_name: Blob end point for private clouds.
:param pulumi.Input[str] connection_string: Connection string for the storage account. Use this string if username and account key are not specified.
:param pulumi.Input[str] device_name: The device name.
:param pulumi.Input[str] name: The storage account credential name.
:param pulumi.Input[str] resource_group_name: The resource group name.
:param pulumi.Input[Union[str, 'SSLStatus']] ssl_status: Signifies whether SSL needs to be enabled or not.
:param pulumi.Input[str] storage_account_id: Id of the storage account.
:param pulumi.Input[str] user_name: Username for the storage account.
"""
if __name__ is not None:
warnings.warn("explicit use of __name__ is deprecated", DeprecationWarning)
resource_name = __name__
if __opts__ is not None:
warnings.warn("explicit use of __opts__ is deprecated, use 'opts' instead", DeprecationWarning)
opts = __opts__
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = dict()
__props__['account_key'] = account_key
if account_type is None and not opts.urn:
raise TypeError("Missing required property 'account_type'")
__props__['account_type'] = account_type
if alias is None and not opts.urn:
raise TypeError("Missing required property 'alias'")
__props__['alias'] = alias
__props__['blob_domain_name'] = blob_domain_name
__props__['connection_string'] = connection_string
if device_name is None and not opts.urn:
raise TypeError("Missing required property 'device_name'")
__props__['device_name'] = device_name
__props__['name'] = name
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__['resource_group_name'] = resource_group_name
if ssl_status is None and not opts.urn:
raise TypeError("Missing required property 'ssl_status'")
__props__['ssl_status'] = ssl_status
__props__['storage_account_id'] = storage_account_id
__props__['user_name'] = user_name
__props__['system_data'] = None
__props__['type'] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-nextgen:databoxedge:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/latest:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/latest:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/v20190301:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/v20190301:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/v20190701:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/v20190701:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/v20190801:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/v20190801:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/v20200501preview:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/v20200501preview:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/v20200901:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/v20200901:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/v20200901preview:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/v20200901preview:StorageAccountCredential"), pulumi.Alias(type_="azure-native:databoxedge/v20201201:StorageAccountCredential"), pulumi.Alias(type_="azure-nextgen:databoxedge/v20201201:StorageAccountCredential")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(StorageAccountCredential, __self__).__init__(
'azure-native:databoxedge:StorageAccountCredential',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'StorageAccountCredential':
"""
Get an existing StorageAccountCredential resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = dict()
__props__["account_key"] = None
__props__["account_type"] = None
__props__["alias"] = None
__props__["blob_domain_name"] = None
__props__["connection_string"] = None
__props__["name"] = None
__props__["ssl_status"] = None
__props__["storage_account_id"] = None
__props__["system_data"] = None
__props__["type"] = None
__props__["user_name"] = None
return StorageAccountCredential(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="accountKey")
def account_key(self) -> pulumi.Output[Optional['outputs.AsymmetricEncryptedSecretResponse']]:
"""
Encrypted storage key.
"""
return pulumi.get(self, "account_key")
@property
@pulumi.getter(name="accountType")
def account_type(self) -> pulumi.Output[str]:
"""
Type of storage accessed on the storage account.
"""
return pulumi.get(self, "account_type")
@property
@pulumi.getter
def alias(self) -> pulumi.Output[str]:
"""
Alias for the storage account.
"""
return pulumi.get(self, "alias")
@property
@pulumi.getter(name="blobDomainName")
def blob_domain_name(self) -> pulumi.Output[Optional[str]]:
"""
Blob end point for private clouds.
"""
return pulumi.get(self, "blob_domain_name")
@property
@pulumi.getter(name="connectionString")
def connection_string(self) -> pulumi.Output[Optional[str]]:
"""
Connection string for the storage account. Use this string if username and account key are not specified.
"""
return pulumi.get(self, "connection_string")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The object name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="sslStatus")
def ssl_status(self) -> pulumi.Output[str]:
"""
Signifies whether SSL needs to be enabled or not.
"""
return pulumi.get(self, "ssl_status")
@property
@pulumi.getter(name="storageAccountId")
def storage_account_id(self) -> pulumi.Output[Optional[str]]:
"""
Id of the storage account.
"""
return pulumi.get(self, "storage_account_id")
@property
@pulumi.getter(name="systemData")
def system_data(self) -> pulumi.Output['outputs.SystemDataResponse']:
"""
StorageAccountCredential object
"""
return pulumi.get(self, "system_data")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The hierarchical type of the object.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter(name="userName")
def user_name(self) -> pulumi.Output[Optional[str]]:
"""
Username for the storage account.
"""
return pulumi.get(self, "user_name")
def translate_output_property(self, prop):
return _tables.CAMEL_TO_SNAKE_CASE_TABLE.get(prop) or prop
def translate_input_property(self, prop):
return _tables.SNAKE_TO_CAMEL_CASE_TABLE.get(prop) or prop
|
py | b402b0fbe8c9d35b784e966c25d196d6ebf449ff | from flask import Flask, render_template, request, redirect
from datetime import datetime
import random
app = Flask(__name__)
@app.route('/')
def forms():
return render_template('forms.html')
@app.route('/', methods=['POST'])
def my_form():
text = request.form['text']
print(f'Got data: "{text}"')
return redirect('/')
if __name__ == '__main__':
# Start application on default port 5000
app.run() |
py | b402b220ed3d3d076eede4ed0aa113ea214a34f3 | """JobControl
Revision ID: f2d512a7ccdd
Revises: 9e71ecc2708a
Create Date: 2021-06-01 15:32:32.996776
"""
# revision identifiers, used by Alembic.
revision = 'f2d512a7ccdd'
down_revision = '9e71ecc2708a'
from alembic import op
import sqlalchemy as sa
import sqlalchemy_utils
import app
import app.extensions
def upgrade():
"""
Upgrade Semantic Description:
ENTER DESCRIPTION HERE
"""
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('job_control',
sa.Column('created', sa.DateTime(), nullable=False),
sa.Column('updated', sa.DateTime(), nullable=False),
sa.Column('viewed', sa.DateTime(), nullable=False),
sa.Column('guid', app.extensions.GUID(), nullable=False),
sa.Column('asset_group_sighting_uuid', app.extensions.GUID(), nullable=True),
sa.Column('annotation_uuid', app.extensions.GUID(), nullable=True),
sa.PrimaryKeyConstraint('guid', name=op.f('pk_job_control'))
)
with op.batch_alter_table('annotation', schema=None) as batch_op:
batch_op.add_column(sa.Column('jobs', sa.JSON(), nullable=True))
# ### end Alembic commands ###
def downgrade():
"""
Downgrade Semantic Description:
ENTER DESCRIPTION HERE
"""
# ### commands auto generated by Alembic - please adjust! ###
with op.batch_alter_table('annotation', schema=None) as batch_op:
batch_op.drop_column('jobs')
op.drop_table('job_control')
# ### end Alembic commands ###
|
py | b402b23a57d926d0b6a3c42fcd5c4304d72a19f1 | from django.test import TestCase
from tests.utils import TestConfiguration
from core.models import RecordGroup, Job, Record
class OrganizationModelTestCase(TestCase):
def setUp(self) -> None:
self.config = TestConfiguration()
def test_all_jobs(self):
other_rg = RecordGroup.objects.create(organization=self.config.org,
name="Other Record Group")
Job.objects.create(record_group=other_rg,
user=self.config.user,
job_type='MergeJob',
job_details='{"test_key": "test value"}',
name="Other Job")
all_jobs = self.config.org.all_jobs()
self.assertEqual(len(all_jobs), 3)
assert(all_jobs[0].id < all_jobs[1].id)
assert(all_jobs[1].id < all_jobs[2].id)
def test_str(self):
self.assertEqual(str(self.config.org), "Organization: Test Organization")
def test_total_record_count(self):
self.assertEqual(self.config.org.total_record_count(), 1)
Record.objects.create(job_id=self.config.job.id,
record_id='testrecord2',
document='test document2')
self.assertEqual(self.config.org.total_record_count(), 1)
self.config.job.update_record_count()
self.assertEqual(self.config.org.total_record_count(), 2)
|
py | b402b3b5fddb6fa626e7de597ce6e8dc8c712326 | #!/usr/bin/env python3
import time
import unittest
from tests import UserscriptsTC
RELEASE_MBID = '61167171-2556-45a6-85c6-e08e33fd7e5f'
RECORDING_MBID = '4b86f0bf-be35-4c78-a554-80f795c020aa'
class EditUserscriptsTC(UserscriptsTC):
def test_script_seed_event(self):
self.login('recording', RECORDING_MBID)
self.load_userscript('mb-edit-seed_event_from_recording.user.js')
self.driver.find_element_by_id('createConcert').click()
time.sleep(8)
self.driver.switch_to.window(self.driver.window_handles[-1])
assert 'event/create' in self.driver.current_url
assert self.driver.find_element_by_id('id-edit-event.setlist').text
assert 'held at:' in self.driver.page_source
assert 'main performers:' in self.driver.page_source
assert 'recording location for:' in self.driver.page_source
def test_script_set_video(self):
self.login('release', RELEASE_MBID)
self.load_userscript('mb-edit-set_video_recordings.user.js')
self.driver.find_element_by_id('video_script_toggle').click()
time.sleep(2)
assert len(self.driver.find_elements_by_class_name('replacevideo')) > 1
self.driver.find_element_by_id('video-b8557e27-38d2-41de-a7f4-f970c11a4ba5').click()
# self.driver.set_network_conditions(offline=True, latency=1000, throughput=500 * 1024)
# self.driver.find_element_by_id('batch_video').click()
# time.sleep(1)
# assert 'Fetching required data' in self.driver.page_source
if __name__ == "__main__":
unittest.main()
|
py | b402b45a372cd0d2683534c7bb46b388e9ea2896 | import tinder
class V3Exception(tinder.APIException):
pass
URL = tinder.URL / 'v3' |
py | b402b539d978f14596a88a445a30b66b55f8eda1 | from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name='index'),
] |
py | b402b6c30bd40cf69e25b007c5d9719fdf7d4842 | from rest_framework.decorators import api_view
from rest_framework.response import Response
@api_view(['GET'])
def its_alive(request):
return Response({'message': 'its_alive'})
|
py | b402b6e87d3e4b6dcad1c855fcce9913ca7e43e9 | import Pyro.core
import Pyro.naming
import sys
# server based on using caller object on the TLS to store session data
print """
This is the storage server that depends on the caller object in the TLS to
keep track of what the resource is for that given session. Because that object
is always equal to the current active client connection, it will work with
or without multithreading enabled. You can check this by looking at the
output on the screen and the contents of the datafiles."""
print
Pyro.config.PYRO_MULTITHREADED=raw_input("Enable multithreading y/n? ") in ('y','Y')
# The datastore.
# It will store lines of text in a file named after the 'user'.
# The resource that is owned by this user session (the file handle) is stored
# on the caller object on the TLS.
class DataStore(Pyro.core.ObjBase):
def init(self, username):
caller=self.getLocalStorage().caller
caller.datastore=open("datastorage_%s.txt"%username,"w")
def addline(self, textline):
caller=self.getLocalStorage().caller
sys.stdout.write("adding line to "+caller.datastore.name+"\n")
sys.stdout.flush()
caller.datastore.write(textline+" | came from "+str(caller)+"\n")
def close(self):
caller=self.getLocalStorage().caller
caller.datastore.close()
daemon=Pyro.core.Daemon()
ns=Pyro.naming.NameServerLocator().getNS()
daemon.useNameServer(ns)
try:
ns.createGroup(":test")
except Exception:
pass
try:
ns.unregister(":test.datastorage")
except Exception:
pass
daemon.connect(DataStore(), ":test.datastorage")
print "Server (caller version) is running."
daemon.requestLoop()
|
py | b402b72d9bcb9f955a9a57490beaa3d64b861b81 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import copy
import itertools
import json
import logging
import math
import os
import pkgutil
import socket
import traceback
from collections import defaultdict
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Tuple
from urllib.parse import quote, unquote
import lazy_object_proxy
import markdown
import sqlalchemy as sqla
from flask import (
Markup, Response, escape, flash, jsonify, make_response, redirect, render_template, request,
session as flask_session, url_for,
)
from flask_appbuilder import BaseView, ModelView, expose, has_access, permission_name
from flask_appbuilder.actions import action
from flask_appbuilder.models.sqla.filters import BaseFilter
from flask_babel import lazy_gettext
from jinja2.utils import htmlsafe_json_dumps # type: ignore
from pygments import highlight, lexers
from pygments.formatters import HtmlFormatter
from sqlalchemy import and_, desc, func, or_, union_all
from sqlalchemy.orm import joinedload
from wtforms import SelectField, validators
import airflow
from airflow import models, settings
from airflow._vendor import nvd3
from airflow.api.common.experimental.mark_tasks import (
set_dag_run_state_to_failed, set_dag_run_state_to_success,
)
from airflow.configuration import AIRFLOW_CONFIG, conf
from airflow.exceptions import AirflowException
from airflow.executors.executor_loader import ExecutorLoader
from airflow.jobs.base_job import BaseJob
from airflow.jobs.scheduler_job import SchedulerJob
from airflow.models import Connection, DagModel, DagTag, Log, SlaMiss, TaskFail, XCom, errors
from airflow.models.dagcode import DagCode
from airflow.models.dagrun import DagRun, DagRunType
from airflow.settings import STORE_SERIALIZED_DAGS
from airflow.ti_deps.dep_context import DepContext
from airflow.ti_deps.dependencies_deps import RUNNING_DEPS, SCHEDULER_QUEUED_DEPS
from airflow.utils import timezone
from airflow.utils.dates import infer_time_unit, scale_time_units
from airflow.utils.helpers import alchemy_to_dict, render_log_filename
from airflow.utils.session import create_session, provide_session
from airflow.utils.state import State
from airflow.www import utils as wwwutils
from airflow.www.app import appbuilder
from airflow.www.decorators import action_logging, gzipped, has_dag_access
from airflow.www.forms import (
ConnectionForm, DagRunForm, DateTimeForm, DateTimeWithNumRunsForm, DateTimeWithNumRunsWithDagRunsForm,
)
from airflow.www.utils import get_dag
from airflow.www.widgets import AirflowModelListWidget
PAGE_SIZE = conf.getint('webserver', 'page_size')
FILTER_TAGS_COOKIE = 'tags_filter'
FILTER_STATUS_COOKIE = 'dag_status_filter'
if os.environ.get('SKIP_DAGS_PARSING') != 'True':
dagbag = models.DagBag(settings.DAGS_FOLDER, store_serialized_dags=STORE_SERIALIZED_DAGS)
else:
dagbag = models.DagBag(os.devnull, include_examples=False)
def get_date_time_num_runs_dag_runs_form_data(request, session, dag):
dttm = request.args.get('execution_date')
if dttm:
dttm = timezone.parse(dttm)
else:
dttm = dag.latest_execution_date or timezone.utcnow()
base_date = request.args.get('base_date')
if base_date:
base_date = timezone.parse(base_date)
else:
# The DateTimeField widget truncates milliseconds and would loose
# the first dag run. Round to next second.
base_date = (dttm + timedelta(seconds=1)).replace(microsecond=0)
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
DR = models.DagRun
drs = (
session.query(DR)
.filter(
DR.dag_id == dag.dag_id,
DR.execution_date <= base_date)
.order_by(desc(DR.execution_date))
.limit(num_runs)
.all()
)
dr_choices = []
dr_state = None
for dr in drs:
dr_choices.append((dr.execution_date.isoformat(), dr.run_id))
if dttm == dr.execution_date:
dr_state = dr.state
# Happens if base_date was changed and the selected dag run is not in result
if not dr_state and drs:
dr = drs[0]
dttm = dr.execution_date
dr_state = dr.state
return {
'dttm': dttm,
'base_date': base_date,
'num_runs': num_runs,
'execution_date': dttm.isoformat(),
'dr_choices': dr_choices,
'dr_state': dr_state,
}
######################################################################################
# Error handlers
######################################################################################
def circles(error):
return render_template(
'airflow/circles.html', hostname=socket.getfqdn() if conf.getboolean(
'webserver',
'EXPOSE_HOSTNAME',
fallback=True) else 'redact'), 404
def show_traceback(error):
from airflow.utils import asciiart as ascii_
return render_template(
'airflow/traceback.html',
hostname=socket.getfqdn() if conf.getboolean(
'webserver',
'EXPOSE_HOSTNAME',
fallback=True) else 'redact',
nukular=ascii_.nukular,
info=traceback.format_exc() if conf.getboolean(
'webserver',
'EXPOSE_STACKTRACE',
fallback=True) else 'Error! Please contact server admin'), 500
######################################################################################
# BaseViews
######################################################################################
class AirflowBaseView(BaseView):
from airflow import macros
route_base = ''
# Make our macros available to our UI templates too.
extra_args = {
'macros': macros,
}
def render_template(self, *args, **kwargs):
return super().render_template(
*args,
# Cache this at most once per request, not for the lifetime of the view instance
scheduler_job=lazy_object_proxy.Proxy(SchedulerJob.most_recent_job),
**kwargs
)
class Airflow(AirflowBaseView):
@expose('/health')
def health(self):
"""
An endpoint helping check the health status of the Airflow instance,
including metadatabase and scheduler.
"""
payload = {
'metadatabase': {'status': 'unhealthy'}
}
latest_scheduler_heartbeat = None
scheduler_status = 'unhealthy'
payload['metadatabase'] = {'status': 'healthy'}
try:
scheduler_job = SchedulerJob.most_recent_job()
if scheduler_job:
latest_scheduler_heartbeat = scheduler_job.latest_heartbeat.isoformat()
if scheduler_job.is_alive():
scheduler_status = 'healthy'
except Exception:
payload['metadatabase']['status'] = 'unhealthy'
payload['scheduler'] = {'status': scheduler_status,
'latest_scheduler_heartbeat': latest_scheduler_heartbeat}
return wwwutils.json_response(payload)
@expose('/home')
@has_access
def index(self):
hide_paused_dags_by_default = conf.getboolean('webserver',
'hide_paused_dags_by_default')
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
def get_int_arg(value, default=0):
try:
return int(value)
except ValueError:
return default
arg_current_page = request.args.get('page', '0')
arg_search_query = request.args.get('search', None)
arg_tags_filter = request.args.getlist('tags', None)
arg_status_filter = request.args.get('status', None)
if request.args.get('reset_tags') is not None:
flask_session[FILTER_TAGS_COOKIE] = None
arg_tags_filter = None
else:
cookie_val = flask_session.get(FILTER_TAGS_COOKIE)
if arg_tags_filter:
flask_session[FILTER_TAGS_COOKIE] = ','.join(arg_tags_filter)
elif cookie_val:
arg_tags_filter = cookie_val.split(',')
if arg_status_filter is None:
cookie_val = flask_session.get(FILTER_STATUS_COOKIE)
if cookie_val:
arg_status_filter = cookie_val
else:
arg_status_filter = 'active' if hide_paused_dags_by_default else 'all'
flask_session[FILTER_STATUS_COOKIE] = arg_status_filter
else:
status = arg_status_filter.strip().lower()
flask_session[FILTER_STATUS_COOKIE] = status
arg_status_filter = status
dags_per_page = PAGE_SIZE
current_page = get_int_arg(arg_current_page, default=0)
start = current_page * dags_per_page
end = start + dags_per_page
# Get all the dag id the user could access
filter_dag_ids = appbuilder.sm.get_accessible_dag_ids()
with create_session() as session:
# read orm_dags from the db
dags_query = session.query(DagModel).filter(
~DagModel.is_subdag, DagModel.is_active
)
if arg_search_query:
dags_query = dags_query.filter(
DagModel.dag_id.ilike('%' + arg_search_query + '%') |
DagModel.owners.ilike('%' + arg_search_query + '%')
)
if arg_tags_filter:
dags_query = dags_query.filter(DagModel.tags.any(DagTag.name.in_(arg_tags_filter)))
if 'all_dags' not in filter_dag_ids:
dags_query = dags_query.filter(DagModel.dag_id.in_(filter_dag_ids))
all_dags = dags_query
active_dags = dags_query.filter(~DagModel.is_paused)
paused_dags = dags_query.filter(DagModel.is_paused)
if arg_status_filter == 'active':
current_dags = active_dags
elif arg_status_filter == 'paused':
current_dags = paused_dags
else:
current_dags = all_dags
dags = current_dags.order_by(DagModel.dag_id).options(
joinedload(DagModel.tags)).offset(start).limit(dags_per_page).all()
dagtags = session.query(DagTag.name).distinct(DagTag.name).all()
tags = [
{"name": name, "selected": bool(arg_tags_filter and name in arg_tags_filter)}
for name, in dagtags
]
import_errors = session.query(errors.ImportError).all()
for ie in import_errors:
flash(
"Broken DAG: [{ie.filename}] {ie.stacktrace}".format(ie=ie),
"dag_import_error")
from airflow.plugins_manager import import_errors as plugin_import_errors
for filename, stacktrace in plugin_import_errors.items():
flash(
"Broken plugin: [{filename}] {stacktrace}".format(
stacktrace=stacktrace,
filename=filename),
"error")
num_of_all_dags = current_dags.count()
num_of_pages = int(math.ceil(num_of_all_dags / float(dags_per_page)))
status_count_active = active_dags.count()
status_count_paused = paused_dags.count()
status_count_all = status_count_active + status_count_paused
return self.render_template(
'airflow/dags.html',
dags=dags,
current_page=current_page,
search_query=arg_search_query if arg_search_query else '',
page_size=dags_per_page,
num_of_pages=num_of_pages,
num_dag_from=min(start + 1, num_of_all_dags),
num_dag_to=min(end, num_of_all_dags),
num_of_all_dags=num_of_all_dags,
paging=wwwutils.generate_pages(current_page,
num_of_pages,
search=escape(arg_search_query) if arg_search_query else None,
status=arg_status_filter if arg_status_filter else None),
num_runs=num_runs,
tags=tags,
status_filter=arg_status_filter,
status_count_all=status_count_all,
status_count_active=status_count_active,
status_count_paused=status_count_paused)
@expose('/dag_stats', methods=['POST'])
@has_access
@provide_session
def dag_stats(self, session=None):
dr = models.DagRun
allowed_dag_ids = appbuilder.sm.get_accessible_dag_ids()
if 'all_dags' in allowed_dag_ids:
allowed_dag_ids = [dag_id for dag_id, in session.query(models.DagModel.dag_id)]
dag_state_stats = session.query(dr.dag_id, dr.state, sqla.func.count(dr.state))\
.group_by(dr.dag_id, dr.state)
# Filter by post parameters
selected_dag_ids = {
unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id
}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
if not filter_dag_ids:
return wwwutils.json_response({})
payload = {}
dag_state_stats = dag_state_stats.filter(dr.dag_id.in_(filter_dag_ids))
data = {}
for dag_id, state, count in dag_state_stats:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
for dag_id in filter_dag_ids:
payload[dag_id] = []
for state in State.dag_states:
count = data.get(dag_id, {}).get(state, 0)
payload[dag_id].append({
'state': state,
'count': count,
'dag_id': dag_id,
'color': State.color(state)
})
return wwwutils.json_response(payload)
@expose('/task_stats', methods=['POST'])
@has_access
@provide_session
def task_stats(self, session=None):
TI = models.TaskInstance
DagRun = models.DagRun
Dag = models.DagModel
allowed_dag_ids = set(appbuilder.sm.get_accessible_dag_ids())
if not allowed_dag_ids:
return wwwutils.json_response({})
if 'all_dags' in allowed_dag_ids:
allowed_dag_ids = {dag_id for dag_id, in session.query(models.DagModel.dag_id)}
# Filter by post parameters
selected_dag_ids = {
unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id
}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
RunningDagRun = (
session.query(DagRun.dag_id, DagRun.execution_date)
.join(Dag, Dag.dag_id == DagRun.dag_id)
.filter(DagRun.state == State.RUNNING, Dag.is_active)
)
if selected_dag_ids:
RunningDagRun = RunningDagRun.filter(DagRun.dag_id.in_(filter_dag_ids))
RunningDagRun = RunningDagRun.subquery('running_dag_run')
# Select all task_instances from active dag_runs.
RunningTI = (
session.query(TI.dag_id.label('dag_id'), TI.state.label('state'))
.join(RunningDagRun,
and_(RunningDagRun.c.dag_id == TI.dag_id,
RunningDagRun.c.execution_date == TI.execution_date))
)
if selected_dag_ids:
RunningTI = RunningTI.filter(TI.dag_id.in_(filter_dag_ids))
if conf.getboolean('webserver', 'SHOW_RECENT_STATS_FOR_COMPLETED_RUNS', fallback=True):
LastDagRun = (
session.query(
DagRun.dag_id,
sqla.func.max(DagRun.execution_date).label('execution_date')
)
.join(Dag, Dag.dag_id == DagRun.dag_id)
.filter(DagRun.state != State.RUNNING, Dag.is_active)
.group_by(DagRun.dag_id)
)
if selected_dag_ids:
LastDagRun = LastDagRun.filter(DagRun.dag_id.in_(filter_dag_ids))
LastDagRun = LastDagRun.subquery('last_dag_run')
# Select all task_instances from active dag_runs.
# If no dag_run is active, return task instances from most recent dag_run.
LastTI = (
session.query(TI.dag_id.label('dag_id'), TI.state.label('state'))
.join(LastDagRun,
and_(LastDagRun.c.dag_id == TI.dag_id,
LastDagRun.c.execution_date == TI.execution_date))
)
if selected_dag_ids:
LastTI = LastTI.filter(TI.dag_id.in_(filter_dag_ids))
FinalTI = union_all(LastTI, RunningTI).alias('final_ti')
else:
FinalTI = RunningTI.subquery('final_ti')
qry = (
session.query(FinalTI.c.dag_id, FinalTI.c.state, sqla.func.count())
.group_by(FinalTI.c.dag_id, FinalTI.c.state)
)
data = {}
for dag_id, state, count in qry:
if dag_id not in data:
data[dag_id] = {}
data[dag_id][state] = count
payload = {}
for dag_id in filter_dag_ids:
payload[dag_id] = []
for state in State.task_states:
count = data.get(dag_id, {}).get(state, 0)
payload[dag_id].append({
'state': state,
'count': count,
'dag_id': dag_id,
'color': State.color(state)
})
return wwwutils.json_response(payload)
@expose('/last_dagruns', methods=['POST'])
@has_access
@provide_session
def last_dagruns(self, session=None):
DagRun = models.DagRun
allowed_dag_ids = appbuilder.sm.get_accessible_dag_ids()
if 'all_dags' in allowed_dag_ids:
allowed_dag_ids = [dag_id for dag_id, in session.query(models.DagModel.dag_id)]
# Filter by post parameters
selected_dag_ids = {
unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id
}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
if not filter_dag_ids:
return wwwutils.json_response({})
query = session.query(
DagRun.dag_id, sqla.func.max(DagRun.execution_date).label('last_run')
).group_by(DagRun.dag_id)
# Filter to only ask for accessible and selected dags
query = query.filter(DagRun.dag_id.in_(filter_dag_ids))
resp = {
r.dag_id.replace('.', '__dot__'): {
'dag_id': r.dag_id,
'last_run': r.last_run.isoformat(),
} for r in query
}
return wwwutils.json_response(resp)
@expose('/code')
@has_dag_access(can_dag_read=True)
@has_access
@provide_session
def code(self, session=None):
all_errors = ""
try:
dag_id = request.args.get('dag_id')
dag_orm = DagModel.get_dagmodel(dag_id, session=session)
code = DagCode.get_code_by_fileloc(dag_orm.fileloc)
html_code = highlight(
code, lexers.PythonLexer(), HtmlFormatter(linenos=True))
except Exception as e:
all_errors += (
"Exception encountered during " +
"dag_id retrieval/dag retrieval fallback/code highlighting:\n\n{}\n".format(e)
)
html_code = '<p>Failed to load file.</p><p>Details: {}</p>'.format(
escape(all_errors))
return self.render_template(
'airflow/dag_code.html', html_code=html_code, dag=dag_orm, title=dag_id,
root=request.args.get('root'),
demo_mode=conf.getboolean('webserver', 'demo_mode'),
wrapped=conf.getboolean('webserver', 'default_wrap'))
@expose('/dag_details')
@has_dag_access(can_dag_read=True)
@has_access
@provide_session
def dag_details(self, session=None):
dag_id = request.args.get('dag_id')
dag_orm = DagModel.get_dagmodel(dag_id, session=session)
# FIXME: items needed for this view should move to the database
dag = get_dag(dag_orm, STORE_SERIALIZED_DAGS)
title = "DAG details"
root = request.args.get('root', '')
TI = models.TaskInstance
states = (
session.query(TI.state, sqla.func.count(TI.dag_id))
.filter(TI.dag_id == dag_id)
.group_by(TI.state)
.all()
)
active_runs = models.DagRun.find(
dag_id=dag_id,
state=State.RUNNING,
external_trigger=False
)
return self.render_template(
'airflow/dag_details.html',
dag=dag, title=title, root=root, states=states, State=State, active_runs=active_runs)
@expose('/rendered')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def rendered(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
logging.info("Retrieving rendered templates.")
dag = dagbag.get_dag(dag_id)
task = copy.copy(dag.get_task(task_id))
ti = models.TaskInstance(task=task, execution_date=dttm)
try:
ti.get_rendered_template_fields()
except AirflowException as e:
msg = "Error rendering template: " + escape(e)
if e.__cause__:
msg += Markup("<br/><br/>OriginalError: ") + escape(e.__cause__)
flash(msg, "error")
except Exception as e:
flash("Error rendering template: " + str(e), "error")
title = "Rendered Template"
html_dict = {}
for template_field in task.template_fields:
content = getattr(task, template_field)
if template_field in wwwutils.get_attr_renderer():
html_dict[template_field] = \
wwwutils.get_attr_renderer()[template_field](content)
else:
html_dict[template_field] = (
"<pre><code>" + str(content) + "</pre></code>")
return self.render_template(
'airflow/ti_code.html',
html_dict=html_dict,
dag=dag,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
title=title)
@expose('/get_logs_with_metadata')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def get_logs_with_metadata(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
if request.args.get('try_number') is not None:
try_number = int(request.args.get('try_number'))
else:
try_number = None
metadata = request.args.get('metadata')
metadata = json.loads(metadata)
response_format = request.args.get('format', 'json')
# metadata may be null
if not metadata:
metadata = {}
# Convert string datetime into actual datetime
try:
execution_date = timezone.parse(execution_date)
except ValueError:
error_message = (
'Given execution date, {}, could not be identified '
'as a date. Example date format: 2015-11-16T14:34:15+00:00'.format(
execution_date))
response = jsonify({'error': error_message})
response.status_code = 400
return response
logger = logging.getLogger('airflow.task')
task_log_reader = conf.get('logging', 'task_log_reader')
handler = next((handler for handler in logger.handlers
if handler.name == task_log_reader), None)
ti = session.query(models.TaskInstance).filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm).first()
def _get_logs_with_metadata(try_number, metadata):
if ti is None:
logs = ["*** Task instance did not exist in the DB\n"]
metadata['end_of_log'] = True
else:
logs, metadatas = handler.read(ti, try_number, metadata=metadata)
metadata = metadatas[0]
return logs, metadata
try:
if ti is not None:
dag = dagbag.get_dag(dag_id)
if dag:
ti.task = dag.get_task(ti.task_id)
if response_format == 'json':
logs, metadata = _get_logs_with_metadata(try_number, metadata)
message = logs[0] if try_number is not None else logs
return jsonify(message=message, metadata=metadata)
filename_template = conf.get('logging', 'LOG_FILENAME_TEMPLATE')
attachment_filename = render_log_filename(
ti=ti,
try_number="all" if try_number is None else try_number,
filename_template=filename_template)
metadata['download_logs'] = True
def _generate_log_stream(try_number, metadata):
if try_number is None and ti is not None:
next_try = ti.next_try_number
try_numbers = list(range(1, next_try))
else:
try_numbers = [try_number]
for try_number in try_numbers:
metadata.pop('end_of_log', None)
metadata.pop('max_offset', None)
metadata.pop('offset', None)
while 'end_of_log' not in metadata or not metadata['end_of_log']:
logs, metadata = _get_logs_with_metadata(try_number, metadata)
yield "\n".join(logs) + "\n"
return Response(_generate_log_stream(try_number, metadata),
mimetype="text/plain",
headers={"Content-Disposition": "attachment; filename={}".format(
attachment_filename)})
except AttributeError as e:
error_message = ["Task log handler {} does not support read logs.\n{}\n"
.format(task_log_reader, str(e))]
metadata['end_of_log'] = True
return jsonify(message=error_message, error=True, metadata=metadata)
@expose('/log')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def log(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
dag_model = DagModel.get_dagmodel(dag_id)
ti = session.query(models.TaskInstance).filter(
models.TaskInstance.dag_id == dag_id,
models.TaskInstance.task_id == task_id,
models.TaskInstance.execution_date == dttm).first()
num_logs = 0
if ti is not None:
num_logs = ti.next_try_number - 1
if ti.state == State.UP_FOR_RESCHEDULE:
# Tasks in reschedule state decremented the try number
num_logs += 1
logs = [''] * num_logs
root = request.args.get('root', '')
return self.render_template(
'airflow/ti_log.html',
logs=logs, dag=dag_model, title="Log by attempts",
dag_id=dag_id, task_id=task_id,
execution_date=execution_date, form=form,
root=root, wrapped=conf.getboolean('webserver', 'default_wrap'))
@expose('/elasticsearch')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def elasticsearch(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
try_number = request.args.get('try_number', 1)
elasticsearch_frontend = conf.get('elasticsearch', 'frontend')
log_id_template = conf.get('elasticsearch', 'log_id_template')
log_id = log_id_template.format(
dag_id=dag_id, task_id=task_id,
execution_date=execution_date, try_number=try_number)
url = 'https://' + elasticsearch_frontend.format(log_id=quote(log_id))
return redirect(url)
@expose('/task')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
def task(self):
TI = models.TaskInstance
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
dag = dagbag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
flash(
"Task [{}.{}] doesn't seem to exist"
" at the moment".format(dag_id, task_id),
"error")
return redirect(url_for('Airflow.index'))
task = copy.copy(dag.get_task(task_id))
task.resolve_template_files()
ti = TI(task=task, execution_date=dttm)
ti.refresh_from_db()
ti_attrs = []
for attr_name in dir(ti):
if not attr_name.startswith('_'):
attr = getattr(ti, attr_name)
if type(attr) != type(self.task): # noqa
ti_attrs.append((attr_name, str(attr)))
task_attrs = []
for attr_name in dir(task):
if not attr_name.startswith('_'):
attr = getattr(task, attr_name)
if type(attr) != type(self.task) and \
attr_name not in wwwutils.get_attr_renderer(): # noqa
task_attrs.append((attr_name, str(attr)))
# Color coding the special attributes that are code
special_attrs_rendered = {}
for attr_name in wwwutils.get_attr_renderer():
if hasattr(task, attr_name):
source = getattr(task, attr_name)
special_attrs_rendered[attr_name] = \
wwwutils.get_attr_renderer()[attr_name](source)
no_failed_deps_result = [(
"Unknown",
"All dependencies are met but the task instance is not running. In most "
"cases this just means that the task will probably be scheduled soon "
"unless:<br/>\n- The scheduler is down or under heavy load<br/>\n{}\n"
"<br/>\nIf this task instance does not start soon please contact your "
"Airflow administrator for assistance.".format(
"- This task instance already ran and had it's state changed manually "
"(e.g. cleared in the UI)<br/>" if ti.state == State.NONE else ""))]
# Use the scheduler's context to figure out which dependencies are not met
dep_context = DepContext(SCHEDULER_QUEUED_DEPS)
failed_dep_reasons = [(dep.dep_name, dep.reason) for dep in
ti.get_failed_dep_statuses(
dep_context=dep_context)]
title = "Task Instance Details"
return self.render_template(
'airflow/task.html',
task_attrs=task_attrs,
ti_attrs=ti_attrs,
failed_dep_reasons=failed_dep_reasons or no_failed_deps_result,
task_id=task_id,
execution_date=execution_date,
special_attrs_rendered=special_attrs_rendered,
form=form,
root=root,
dag=dag, title=title)
@expose('/xcom')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def xcom(self, session=None):
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
# Carrying execution_date through, even though it's irrelevant for
# this context
execution_date = request.args.get('execution_date')
dttm = timezone.parse(execution_date)
form = DateTimeForm(data={'execution_date': dttm})
root = request.args.get('root', '')
dm_db = models.DagModel
ti_db = models.TaskInstance
dag = session.query(dm_db).filter(dm_db.dag_id == dag_id).first()
ti = session.query(ti_db).filter(ti_db.dag_id == dag_id and ti_db.task_id == task_id).first()
if not ti:
flash(
"Task [{}.{}] doesn't seem to exist"
" at the moment".format(dag_id, task_id),
"error")
return redirect(url_for('Airflow.index'))
xcomlist = session.query(XCom).filter(
XCom.dag_id == dag_id, XCom.task_id == task_id,
XCom.execution_date == dttm).all()
attributes = []
for xcom in xcomlist:
if not xcom.key.startswith('_'):
attributes.append((xcom.key, xcom.value))
title = "XCom"
return self.render_template(
'airflow/xcom.html',
attributes=attributes,
task_id=task_id,
execution_date=execution_date,
form=form,
root=root,
dag=dag, title=title)
@expose('/run', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def run(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
dag = dagbag.get_dag(dag_id)
task = dag.get_task(task_id)
execution_date = request.form.get('execution_date')
execution_date = timezone.parse(execution_date)
ignore_all_deps = request.form.get('ignore_all_deps') == "true"
ignore_task_deps = request.form.get('ignore_task_deps') == "true"
ignore_ti_state = request.form.get('ignore_ti_state') == "true"
executor = ExecutorLoader.get_default_executor()
valid_celery_config = False
valid_kubernetes_config = False
try:
from airflow.executors.celery_executor import CeleryExecutor
valid_celery_config = isinstance(executor, CeleryExecutor)
except ImportError:
pass
try:
from airflow.executors.kubernetes_executor import KubernetesExecutor
valid_kubernetes_config = isinstance(executor, KubernetesExecutor)
except ImportError:
pass
if not valid_celery_config and not valid_kubernetes_config:
flash("Only works with the Celery or Kubernetes executors, sorry", "error")
return redirect(origin)
ti = models.TaskInstance(task=task, execution_date=execution_date)
ti.refresh_from_db()
# Make sure the task instance can be run
dep_context = DepContext(
deps=RUNNING_DEPS,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state)
failed_deps = list(ti.get_failed_dep_statuses(dep_context=dep_context))
if failed_deps:
failed_deps_str = ", ".join(
["{}: {}".format(dep.dep_name, dep.reason) for dep in failed_deps])
flash("Could not queue task instance for execution, dependencies not met: "
"{}".format(failed_deps_str),
"error")
return redirect(origin)
executor.start()
executor.queue_task_instance(
ti,
ignore_all_deps=ignore_all_deps,
ignore_task_deps=ignore_task_deps,
ignore_ti_state=ignore_ti_state)
executor.heartbeat()
flash(
"Sent {} to the message queue, "
"it should start any moment now.".format(ti))
return redirect(origin)
@expose('/delete', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def delete(self):
from airflow.api.common.experimental import delete_dag
from airflow.exceptions import DagNotFound, DagFileExists
dag_id = request.values.get('dag_id')
origin = request.values.get('origin') or url_for('Airflow.index')
try:
delete_dag.delete_dag(dag_id)
except DagNotFound:
flash("DAG with id {} not found. Cannot delete".format(dag_id), 'error')
return redirect(request.referrer)
except DagFileExists:
flash("Dag id {} is still in DagBag. "
"Remove the DAG file first.".format(dag_id),
'error')
return redirect(request.referrer)
flash("Deleting DAG with id {}. May take a couple minutes to fully"
" disappear.".format(dag_id))
# Upon success return to origin.
return redirect(origin)
@expose('/trigger', methods=['POST', 'GET'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
@provide_session
def trigger(self, session=None):
dag_id = request.values.get('dag_id')
origin = request.values.get('origin') or url_for('Airflow.index')
if request.method == 'GET':
return self.render_template(
'airflow/trigger.html',
dag_id=dag_id,
origin=origin,
conf=''
)
dag_orm = session.query(models.DagModel).filter(models.DagModel.dag_id == dag_id).first()
if not dag_orm:
flash("Cannot find dag {}".format(dag_id))
return redirect(origin)
execution_date = timezone.utcnow()
run_id = f"{DagRunType.MANUAL.value}__{execution_date.isoformat()}"
dr = DagRun.find(dag_id=dag_id, run_id=run_id)
if dr:
flash("This run_id {} already exists".format(run_id))
return redirect(origin)
run_conf = {}
conf = request.values.get('conf')
if conf:
try:
run_conf = json.loads(conf)
except json.decoder.JSONDecodeError:
flash("Invalid JSON configuration", "error")
return self.render_template(
'airflow/trigger.html',
dag_id=dag_id,
origin=origin,
conf=conf
)
dag = get_dag(dag_orm, STORE_SERIALIZED_DAGS)
dag.create_dagrun(
run_id=run_id,
execution_date=execution_date,
state=State.RUNNING,
conf=run_conf,
external_trigger=True
)
flash(
"Triggered {}, "
"it should start any moment now.".format(dag_id))
return redirect(origin)
def _clear_dag_tis(self, dag, start_date, end_date, origin,
recursive=False, confirmed=False, only_failed=False):
from airflow.exceptions import AirflowException
if confirmed:
count = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
include_parentdag=recursive,
only_failed=only_failed,
)
flash("{0} task instances have been cleared".format(count))
return redirect(origin)
try:
tis = dag.clear(
start_date=start_date,
end_date=end_date,
include_subdags=recursive,
include_parentdag=recursive,
only_failed=only_failed,
dry_run=True,
)
except AirflowException as ex:
flash(str(ex), 'error')
return redirect(origin)
if not tis:
flash("No task instances to clear", 'error')
response = redirect(origin)
else:
details = "\n".join([str(t) for t in tis])
response = self.render_template(
'airflow/confirm.html',
message=("Here's the list of task instances you are about "
"to clear:"),
details=details)
return response
@expose('/clear', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def clear(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
dag = dagbag.get_dag(dag_id)
execution_date = request.form.get('execution_date')
execution_date = timezone.parse(execution_date)
confirmed = request.form.get('confirmed') == "true"
upstream = request.form.get('upstream') == "true"
downstream = request.form.get('downstream') == "true"
future = request.form.get('future') == "true"
past = request.form.get('past') == "true"
recursive = request.form.get('recursive') == "true"
only_failed = request.form.get('only_failed') == "true"
dag = dag.sub_dag(
task_regex=r"^{0}$".format(task_id),
include_downstream=downstream,
include_upstream=upstream)
end_date = execution_date if not future else None
start_date = execution_date if not past else None
return self._clear_dag_tis(dag, start_date, end_date, origin,
recursive=recursive, confirmed=confirmed, only_failed=only_failed)
@expose('/dagrun_clear', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def dagrun_clear(self):
dag_id = request.form.get('dag_id')
origin = request.form.get('origin')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == "true"
dag = dagbag.get_dag(dag_id)
execution_date = timezone.parse(execution_date)
start_date = execution_date
end_date = execution_date
return self._clear_dag_tis(dag, start_date, end_date, origin,
recursive=True, confirmed=confirmed)
@expose('/blocked', methods=['POST'])
@has_access
@provide_session
def blocked(self, session=None):
allowed_dag_ids = appbuilder.sm.get_accessible_dag_ids()
if 'all_dags' in allowed_dag_ids:
allowed_dag_ids = [dag_id for dag_id, in session.query(models.DagModel.dag_id)]
# Filter by post parameters
selected_dag_ids = {
unquote(dag_id) for dag_id in request.form.getlist('dag_ids') if dag_id
}
if selected_dag_ids:
filter_dag_ids = selected_dag_ids.intersection(allowed_dag_ids)
else:
filter_dag_ids = allowed_dag_ids
if not filter_dag_ids:
return wwwutils.json_response([])
DR = models.DagRun
dags = (
session.query(DR.dag_id, sqla.func.count(DR.id))
.filter(DR.state == State.RUNNING)
.filter(DR.dag_id.in_(filter_dag_ids))
.group_by(DR.dag_id)
)
payload = []
for dag_id, active_dag_runs in dags:
max_active_runs = 0
dag = dagbag.get_dag(dag_id)
if dag:
# TODO: Make max_active_runs a column so we can query for it directly
max_active_runs = dag.max_active_runs
payload.append({
'dag_id': dag_id,
'active_dag_run': active_dag_runs,
'max_active_runs': max_active_runs,
})
return wwwutils.json_response(payload)
def _mark_dagrun_state_as_failed(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = timezone.parse(execution_date)
dag = dagbag.get_dag(dag_id)
if not dag:
flash('Cannot find DAG: {}'.format(dag_id), 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_failed(dag, execution_date, commit=confirmed)
if confirmed:
flash('Marked failed on {} task instances'.format(len(new_dag_state)))
return redirect(origin)
else:
details = '\n'.join([str(t) for t in new_dag_state])
response = self.render_template(
'airflow/confirm.html',
message="Here's the list of task instances you are about to mark as failed",
details=details)
return response
def _mark_dagrun_state_as_success(self, dag_id, execution_date, confirmed, origin):
if not execution_date:
flash('Invalid execution date', 'error')
return redirect(origin)
execution_date = timezone.parse(execution_date)
dag = dagbag.get_dag(dag_id)
if not dag:
flash('Cannot find DAG: {}'.format(dag_id), 'error')
return redirect(origin)
new_dag_state = set_dag_run_state_to_success(dag, execution_date,
commit=confirmed)
if confirmed:
flash('Marked success on {} task instances'.format(len(new_dag_state)))
return redirect(origin)
else:
details = '\n'.join([str(t) for t in new_dag_state])
response = self.render_template(
'airflow/confirm.html',
message="Here's the list of task instances you are about to mark as success",
details=details)
return response
@expose('/dagrun_failed', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def dagrun_failed(self):
dag_id = request.form.get('dag_id')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == 'true'
origin = request.form.get('origin')
return self._mark_dagrun_state_as_failed(dag_id, execution_date,
confirmed, origin)
@expose('/dagrun_success', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def dagrun_success(self):
dag_id = request.form.get('dag_id')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == 'true'
origin = request.form.get('origin')
return self._mark_dagrun_state_as_success(dag_id, execution_date,
confirmed, origin)
def _mark_task_instance_state(self, dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, state):
dag = dagbag.get_dag(dag_id)
task = dag.get_task(task_id)
task.dag = dag
latest_execution_date = dag.latest_execution_date
if not latest_execution_date:
flash(f"Cannot make {state}, seem that dag {dag_id} has never run", "error")
return redirect(origin)
execution_date = timezone.parse(execution_date)
from airflow.api.common.experimental.mark_tasks import set_state
if confirmed:
altered = set_state(tasks=[task], execution_date=execution_date,
upstream=upstream, downstream=downstream,
future=future, past=past, state=state,
commit=True)
flash("Marked {} on {} task instances".format(state, len(altered)))
return redirect(origin)
to_be_altered = set_state(tasks=[task], execution_date=execution_date,
upstream=upstream, downstream=downstream,
future=future, past=past, state=state,
commit=False)
details = "\n".join([str(t) for t in to_be_altered])
response = self.render_template(
"airflow/confirm.html",
message=("Here's the list of task instances you are about to mark as {}:".format(state)),
details=details)
return response
@expose('/failed', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def failed(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == "true"
upstream = request.form.get('failed_upstream') == "true"
downstream = request.form.get('failed_downstream') == "true"
future = request.form.get('failed_future') == "true"
past = request.form.get('failed_past') == "true"
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.FAILED)
@expose('/success', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def success(self):
dag_id = request.form.get('dag_id')
task_id = request.form.get('task_id')
origin = request.form.get('origin')
execution_date = request.form.get('execution_date')
confirmed = request.form.get('confirmed') == "true"
upstream = request.form.get('success_upstream') == "true"
downstream = request.form.get('success_downstream') == "true"
future = request.form.get('success_future') == "true"
past = request.form.get('success_past') == "true"
return self._mark_task_instance_state(dag_id, task_id, origin, execution_date,
confirmed, upstream, downstream,
future, past, State.SUCCESS)
@expose('/tree')
@has_dag_access(can_dag_read=True)
@has_access
@gzipped
@action_logging
def tree(self):
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
dag = dagbag.get_dag(dag_id)
if not dag:
flash('DAG "{0}" seems to be missing from DagBag.'.format(dag_id), "error")
return redirect(url_for('Airflow.index'))
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_downstream=False,
include_upstream=True)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
if num_runs:
num_runs = int(num_runs)
else:
num_runs = conf.getint('webserver', 'default_dag_run_display_number')
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
with create_session() as session:
dag_runs = (
session.query(DagRun)
.filter(
DagRun.dag_id == dag.dag_id,
DagRun.execution_date <= base_date)
.order_by(DagRun.execution_date.desc())
.limit(num_runs)
.all()
)
dag_runs = {
dr.execution_date: alchemy_to_dict(dr) for dr in dag_runs
}
dates = sorted(list(dag_runs.keys()))
max_date = max(dates) if dates else None
min_date = min(dates) if dates else None
tis = dag.get_task_instances(start_date=min_date, end_date=base_date)
task_instances: Dict[Tuple[str, datetime], models.TaskInstance] = {}
for ti in tis:
task_instances[(ti.task_id, ti.execution_date)] = ti
expanded = set()
# The default recursion traces every path so that tree view has full
# expand/collapse functionality. After 5,000 nodes we stop and fall
# back on a quick DFS search for performance. See PR #320.
node_count = 0
node_limit = 5000 / max(1, len(dag.leaves))
def encode_ti(ti: Optional[models.TaskInstance]) -> Optional[List]:
if not ti:
return None
# NOTE: order of entry is important here because client JS relies on it for
# tree node reconstruction. Remember to change JS code in tree.html
# whenever order is altered.
data = [
ti.state,
ti.try_number,
None, # start_ts
None, # duration
]
if ti.start_date:
# round to seconds to reduce payload size
data[2] = int(ti.start_date.timestamp())
if ti.duration is not None:
data[3] = int(ti.duration)
return data
def recurse_nodes(task, visited):
nonlocal node_count
node_count += 1
visited.add(task)
task_id = task.task_id
node = {
'name': task.task_id,
'instances': [
encode_ti(task_instances.get((task_id, d)))
for d in dates
],
'num_dep': len(task.downstream_list),
'operator': task.task_type,
'retries': task.retries,
'owner': task.owner,
'ui_color': task.ui_color,
}
if task.downstream_list:
children = [
recurse_nodes(t, visited) for t in task.downstream_list
if node_count < node_limit or t not in visited]
# D3 tree uses children vs _children to define what is
# expanded or not. The following block makes it such that
# repeated nodes are collapsed by default.
if task.task_id not in expanded:
children_key = 'children'
expanded.add(task.task_id)
else:
children_key = "_children"
node[children_key] = children
if task.depends_on_past:
node['depends_on_past'] = task.depends_on_past
if task.start_date:
# round to seconds to reduce payload size
node['start_ts'] = int(task.start_date.timestamp())
if task.end_date:
# round to seconds to reduce payload size
node['end_ts'] = int(task.end_date.timestamp())
if task.extra_links:
node['extra_links'] = task.extra_links
return node
data = {
'name': '[DAG]',
'children': [recurse_nodes(t, set()) for t in dag.roots],
'instances': [
dag_runs.get(d) or {'execution_date': d.isoformat()}
for d in dates
],
}
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
external_logs = conf.get('elasticsearch', 'frontend')
return self.render_template(
'airflow/tree.html',
operators=sorted({op.task_type: op for op in dag.tasks}.values(), key=lambda x: x.task_type),
root=root,
form=form,
dag=dag,
# avoid spaces to reduce payload size
data=htmlsafe_json_dumps(data, separators=(',', ':')),
blur=blur, num_runs=num_runs,
show_external_logs=bool(external_logs))
@expose('/graph')
@has_dag_access(can_dag_read=True)
@has_access
@gzipped
@action_logging
@provide_session
def graph(self, session=None):
dag_id = request.args.get('dag_id')
blur = conf.getboolean('webserver', 'demo_mode')
dag = dagbag.get_dag(dag_id)
if not dag:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect(url_for('Airflow.index'))
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
arrange = request.args.get('arrange', dag.orientation)
nodes = []
edges = []
for task in dag.tasks:
nodes.append({
'id': task.task_id,
'value': {
'label': task.task_id,
'labelStyle': "fill:{0};".format(task.ui_fgcolor),
'style': "fill:{0};".format(task.ui_color),
'rx': 5,
'ry': 5,
}
})
def get_downstream(task):
for t in task.downstream_list:
edge = {
'source_id': task.task_id,
'target_id': t.task_id,
}
if edge not in edges:
edges.append(edge)
get_downstream(t)
for t in dag.roots:
get_downstream(t)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dt_nr_dr_data['arrange'] = arrange
dttm = dt_nr_dr_data['dttm']
class GraphForm(DateTimeWithNumRunsWithDagRunsForm):
arrange = SelectField("Layout", choices=(
('LR', "Left->Right"),
('RL', "Right->Left"),
('TB', "Top->Bottom"),
('BT', "Bottom->Top"),
))
form = GraphForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
task_instances = {
ti.task_id: alchemy_to_dict(ti)
for ti in dag.get_task_instances(dttm, dttm)}
tasks = {
t.task_id: {
'dag_id': t.dag_id,
'task_type': t.task_type,
'extra_links': t.extra_links,
}
for t in dag.tasks}
if not tasks:
flash("No tasks found", "error")
session.commit()
doc_md = markdown.markdown(dag.doc_md) \
if hasattr(dag, 'doc_md') and dag.doc_md else ''
external_logs = conf.get('elasticsearch', 'frontend')
return self.render_template(
'airflow/graph.html',
dag=dag,
form=form,
width=request.args.get('width', "100%"),
height=request.args.get('height', "800"),
execution_date=dttm.isoformat(),
state_token=wwwutils.state_token(dt_nr_dr_data['dr_state']),
doc_md=doc_md,
arrange=arrange,
operators=sorted({op.task_type: op for op in dag.tasks}.values(), key=lambda x: x.task_type),
blur=blur,
root=root or '',
task_instances=task_instances,
tasks=tasks,
nodes=nodes,
edges=edges,
show_external_logs=bool(external_logs))
@expose('/duration')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def duration(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if dag is None:
flash('DAG "{0}" seems to be missing.'.format(dag_id), "error")
return redirect(url_for('Airflow.index'))
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else timezone.utc_epoch()
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = wwwutils.get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, width="1200")
cum_chart = nvd3.lineChart(
name="cumLineChart", x_is_date=True, height=chart_height, width="1200")
y = defaultdict(list)
x = defaultdict(list)
cum_y = defaultdict(list)
tis = dag.get_task_instances(start_date=min_date, end_date=base_date)
TF = TaskFail
ti_fails = (
session.query(TF)
.filter(TF.dag_id == dag.dag_id,
TF.execution_date >= min_date,
TF.execution_date <= base_date,
TF.task_id.in_([t.task_id for t in dag.tasks]))
.all() # noqa
)
fails_totals = defaultdict(int)
for tf in ti_fails:
dict_key = (tf.dag_id, tf.task_id, tf.execution_date)
if tf.duration:
fails_totals[dict_key] += tf.duration
for ti in tis:
if ti.duration:
dttm = wwwutils.epoch(ti.execution_date)
x[ti.task_id].append(dttm)
y[ti.task_id].append(float(ti.duration))
fails_dict_key = (ti.dag_id, ti.task_id, ti.execution_date)
fails_total = fails_totals[fails_dict_key]
cum_y[ti.task_id].append(float(ti.duration + fails_total))
# determine the most relevant time unit for the set of task instance
# durations for the DAG
y_unit = infer_time_unit([d for t in y.values() for d in t])
cum_y_unit = infer_time_unit([d for t in cum_y.values() for d in t])
# update the y Axis on both charts to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Duration ({})'.format(y_unit))
chart.axislist['yAxis']['axisLabelDistance'] = '-15'
cum_chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Duration ({})'.format(cum_y_unit))
cum_chart.axislist['yAxis']['axisLabelDistance'] = '-15'
for task in dag.tasks:
if x[task.task_id]:
chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(y[task.task_id], y_unit))
cum_chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(cum_y[task.task_id],
cum_y_unit))
dates = sorted(list({ti.execution_date for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if dates else None
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
cum_chart.buildcontent()
s_index = cum_chart.htmlcontent.rfind('});')
cum_chart.htmlcontent = (cum_chart.htmlcontent[:s_index] +
"$( document ).trigger('chartload')" +
cum_chart.htmlcontent[s_index:])
return self.render_template(
'airflow/duration_chart.html',
dag=dag,
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
chart=chart.htmlcontent,
cum_chart=cum_chart.htmlcontent
)
@expose('/tries')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def tries(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else timezone.utc_epoch()
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = wwwutils.get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, y_axis_format='d', height=chart_height,
width="1200")
for task in dag.tasks:
y = []
x = []
for ti in task.get_task_instances(start_date=min_date, end_date=base_date):
dttm = wwwutils.epoch(ti.execution_date)
x.append(dttm)
# y value should reflect completed tries to have a 0 baseline.
y.append(ti.prev_attempted_tries)
if x:
chart.add_serie(name=task.task_id, x=x, y=y)
tis = dag.get_task_instances(start_date=min_date, end_date=base_date)
tries = sorted(list({ti.try_number for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if tries else None
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
return self.render_template(
'airflow/chart.html',
dag=dag,
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
chart=chart.htmlcontent,
tab_title='Tries',
)
@expose('/landing_times')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def landing_times(self, session=None):
default_dag_run = conf.getint('webserver', 'default_dag_run_display_number')
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
base_date = request.args.get('base_date')
num_runs = request.args.get('num_runs')
num_runs = int(num_runs) if num_runs else default_dag_run
if base_date:
base_date = timezone.parse(base_date)
else:
base_date = dag.latest_execution_date or timezone.utcnow()
dates = dag.date_range(base_date, num=-abs(num_runs))
min_date = dates[0] if dates else timezone.utc_epoch()
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
chart_height = wwwutils.get_chart_height(dag)
chart = nvd3.lineChart(
name="lineChart", x_is_date=True, height=chart_height, width="1200")
y = {}
x = {}
for task in dag.tasks:
task_id = task.task_id
y[task_id] = []
x[task_id] = []
for ti in task.get_task_instances(start_date=min_date, end_date=base_date):
ts = ti.execution_date
if dag.schedule_interval and dag.following_schedule(ts):
ts = dag.following_schedule(ts)
if ti.end_date:
dttm = wwwutils.epoch(ti.execution_date)
secs = (ti.end_date - ts).total_seconds()
x[task_id].append(dttm)
y[task_id].append(secs)
# determine the most relevant time unit for the set of landing times
# for the DAG
y_unit = infer_time_unit([d for t in y.values() for d in t])
# update the y Axis to have the correct time units
chart.create_y_axis('yAxis', format='.02f', custom_format=False,
label='Landing Time ({})'.format(y_unit))
chart.axislist['yAxis']['axisLabelDistance'] = '-15'
for task in dag.tasks:
if x[task.task_id]:
chart.add_serie(name=task.task_id, x=x[task.task_id],
y=scale_time_units(y[task.task_id], y_unit))
tis = dag.get_task_instances(start_date=min_date, end_date=base_date)
dates = sorted(list({ti.execution_date for ti in tis}))
max_date = max([ti.execution_date for ti in tis]) if dates else None
session.commit()
form = DateTimeWithNumRunsForm(data={'base_date': max_date,
'num_runs': num_runs})
chart.buildcontent()
return self.render_template(
'airflow/chart.html',
dag=dag,
chart=chart.htmlcontent,
height=str(chart_height + 100) + "px",
demo_mode=conf.getboolean('webserver', 'demo_mode'),
root=root,
form=form,
tab_title='Landing times',
)
@expose('/paused', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
def paused(self):
dag_id = request.args.get('dag_id')
is_paused = True if request.args.get('is_paused') == 'false' else False
models.DagModel.get_dagmodel(dag_id).set_is_paused(
is_paused=is_paused)
return "OK"
@expose('/refresh', methods=['POST'])
@has_dag_access(can_dag_edit=True)
@has_access
@action_logging
@provide_session
def refresh(self, session=None):
DagModel = models.DagModel
dag_id = request.values.get('dag_id')
orm_dag = session.query(
DagModel).filter(DagModel.dag_id == dag_id).first()
if orm_dag:
orm_dag.last_expired = timezone.utcnow()
session.merge(orm_dag)
session.commit()
dag = dagbag.get_dag(dag_id)
# sync dag permission
appbuilder.sm.sync_perm_for_dag(dag_id, dag.access_control)
flash("DAG [{}] is now fresh as a daisy".format(dag_id))
return redirect(request.referrer)
@expose('/gantt')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def gantt(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
demo_mode = conf.getboolean('webserver', 'demo_mode')
root = request.args.get('root')
if root:
dag = dag.sub_dag(
task_regex=root,
include_upstream=True,
include_downstream=False)
dt_nr_dr_data = get_date_time_num_runs_dag_runs_form_data(request, session, dag)
dttm = dt_nr_dr_data['dttm']
form = DateTimeWithNumRunsWithDagRunsForm(data=dt_nr_dr_data)
form.execution_date.choices = dt_nr_dr_data['dr_choices']
tis = [
ti for ti in dag.get_task_instances(dttm, dttm)
if ti.start_date and ti.state]
tis = sorted(tis, key=lambda ti: ti.start_date)
TF = TaskFail
ti_fails = list(itertools.chain(*[(
session
.query(TF)
.filter(TF.dag_id == ti.dag_id,
TF.task_id == ti.task_id,
TF.execution_date == ti.execution_date)
.all()
) for ti in tis]))
# determine bars to show in the gantt chart
gantt_bar_items = []
tasks = []
for ti in tis:
end_date = ti.end_date or timezone.utcnow()
# prev_attempted_tries will reflect the currently running try_number
# or the try_number of the last complete run
# https://issues.apache.org/jira/browse/AIRFLOW-2143
try_count = ti.prev_attempted_tries
gantt_bar_items.append((ti.task_id, ti.start_date, end_date, ti.state, try_count))
tasks.append(alchemy_to_dict(ti))
tf_count = 0
try_count = 1
prev_task_id = ""
for tf in ti_fails:
end_date = tf.end_date or timezone.utcnow()
start_date = tf.start_date or end_date
if tf_count != 0 and tf.task_id == prev_task_id:
try_count = try_count + 1
else:
try_count = 1
prev_task_id = tf.task_id
gantt_bar_items.append((tf.task_id, start_date, end_date, State.FAILED, try_count))
tf_count = tf_count + 1
d = alchemy_to_dict(tf)
d['state'] = State.FAILED
d['operator'] = dag.get_task(tf.task_id).task_type
d['try_number'] = try_count
tasks.append(d)
task_types = {}
extra_links = {}
for t in dag.tasks:
task_types[t.task_id] = t.task_type
extra_links[t.task_id] = t.extra_links
data = {
'taskNames': [ti.task_id for ti in tis],
'tasks': tasks,
'height': len(tis) * 25 + 25,
}
session.commit()
return self.render_template(
'airflow/gantt.html',
dag=dag,
execution_date=dttm.isoformat(),
form=form,
data=data,
base_date='',
demo_mode=demo_mode,
root=root,
)
@expose('/extra_links')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
def extra_links(self):
"""
A restful endpoint that returns external links for a given Operator
It queries the operator that sent the request for the links it wishes
to provide for a given external link name.
API: GET
Args: dag_id: The id of the dag containing the task in question
task_id: The id of the task in question
execution_date: The date of execution of the task
link_name: The name of the link reference to find the actual URL for
Returns:
200: {url: <url of link>, error: None} - returned when there was no problem
finding the URL
404: {url: None, error: <error message>} - returned when the operator does
not return a URL
"""
dag_id = request.args.get('dag_id')
task_id = request.args.get('task_id')
execution_date = request.args.get('execution_date')
link_name = request.args.get('link_name')
dttm = timezone.parse(execution_date)
dag = dagbag.get_dag(dag_id)
if not dag or task_id not in dag.task_ids:
response = jsonify(
{'url': None,
'error': "can't find dag {dag} or task_id {task_id}".format(
dag=dag,
task_id=task_id
)}
)
response.status_code = 404
return response
task = dag.get_task(task_id)
try:
url = task.get_extra_links(dttm, link_name)
except ValueError as err:
response = jsonify({'url': None, 'error': str(err)})
response.status_code = 404
return response
if url:
response = jsonify({'error': None, 'url': url})
response.status_code = 200
return response
else:
response = jsonify(
{'url': None, 'error': 'No URL found for {dest}'.format(dest=link_name)})
response.status_code = 404
return response
@expose('/object/task_instances')
@has_dag_access(can_dag_read=True)
@has_access
@action_logging
@provide_session
def task_instances(self, session=None):
dag_id = request.args.get('dag_id')
dag = dagbag.get_dag(dag_id)
dttm = request.args.get('execution_date')
if dttm:
dttm = timezone.parse(dttm)
else:
return "Error: Invalid execution_date"
task_instances = {
ti.task_id: alchemy_to_dict(ti)
for ti in dag.get_task_instances(dttm, dttm)}
return json.dumps(task_instances)
class VersionView(AirflowBaseView):
default_view = 'version'
@expose('/version')
@has_access
def version(self):
try:
airflow_version = airflow.__version__
except Exception as e:
airflow_version = None
logging.error(e)
# Get the Git repo and git hash
git_version = None
try:
git_version = str(pkgutil.get_data('airflow', 'git_version'), encoding="UTF-8")
except Exception as e:
logging.error(e)
# Render information
title = "Version Info"
return self.render_template(
'airflow/version.html',
title=title,
airflow_version=airflow_version,
git_version=git_version)
class ConfigurationView(AirflowBaseView):
default_view = 'conf'
@expose('/configuration')
@has_access
def conf(self):
raw = request.args.get('raw') == "true"
title = "Airflow Configuration"
subtitle = AIRFLOW_CONFIG
# Don't show config when expose_config variable is False in airflow config
if conf.getboolean("webserver", "expose_config"):
with open(AIRFLOW_CONFIG, 'r') as file:
config = file.read()
table = [(section, key, value, source)
for section, parameters in conf.as_dict(True, True).items()
for key, (value, source) in parameters.items()]
else:
config = (
"# Your Airflow administrator chose not to expose the "
"configuration, most likely for security reasons.")
table = None
if raw:
return Response(
response=config,
status=200,
mimetype="application/text")
else:
code_html = Markup(highlight(
config,
lexers.IniLexer(), # Lexer call
HtmlFormatter(noclasses=True))
)
return self.render_template(
'airflow/config.html',
pre_subtitle=settings.HEADER + " v" + airflow.__version__,
code_html=code_html, title=title, subtitle=subtitle,
table=table)
######################################################################################
# ModelViews
######################################################################################
class DagFilter(BaseFilter):
def apply(self, query, func): # noqa
if appbuilder.sm.has_all_dags_access():
return query
filter_dag_ids = appbuilder.sm.get_accessible_dag_ids()
return query.filter(self.model.dag_id.in_(filter_dag_ids))
class AirflowModelView(ModelView):
list_widget = AirflowModelListWidget
page_size = PAGE_SIZE
CustomSQLAInterface = wwwutils.CustomSQLAInterface
class SlaMissModelView(AirflowModelView):
route_base = '/slamiss'
datamodel = AirflowModelView.CustomSQLAInterface(SlaMiss)
base_permissions = ['can_list']
list_columns = ['dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp']
add_columns = ['dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp']
edit_columns = ['dag_id', 'task_id', 'execution_date', 'email_sent', 'timestamp']
search_columns = ['dag_id', 'task_id', 'email_sent', 'timestamp', 'execution_date']
base_order = ('execution_date', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'task_id': wwwutils.task_instance_link,
'execution_date': wwwutils.datetime_f('execution_date'),
'timestamp': wwwutils.datetime_f('timestamp'),
'dag_id': wwwutils.dag_link,
}
class XComModelView(AirflowModelView):
route_base = '/xcom'
datamodel = AirflowModelView.CustomSQLAInterface(XCom)
base_permissions = ['can_add', 'can_list', 'can_edit', 'can_delete']
search_columns = ['key', 'value', 'timestamp', 'execution_date', 'task_id', 'dag_id']
list_columns = ['key', 'value', 'timestamp', 'execution_date', 'task_id', 'dag_id']
add_columns = ['key', 'value', 'execution_date', 'task_id', 'dag_id']
edit_columns = ['key', 'value', 'execution_date', 'task_id', 'dag_id']
base_order = ('execution_date', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'task_id': wwwutils.task_instance_link,
'execution_date': wwwutils.datetime_f('execution_date'),
'timestamp': wwwutils.datetime_f('timestamp'),
'dag_id': wwwutils.dag_link,
}
@action('muldelete', 'Delete', "Are you sure you want to delete selected records?",
single=False)
def action_muldelete(self, items):
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
def pre_add(self, item):
item.execution_date = timezone.make_aware(item.execution_date)
item.value = XCom.serialize_value(item.value)
def pre_update(self, item):
item.execution_date = timezone.make_aware(item.execution_date)
item.value = XCom.serialize_value(item.value)
class ConnectionModelView(AirflowModelView):
route_base = '/connection'
datamodel = AirflowModelView.CustomSQLAInterface(Connection)
base_permissions = ['can_add', 'can_list', 'can_edit', 'can_delete']
extra_fields = ['extra__jdbc__drv_path', 'extra__jdbc__drv_clsname',
'extra__google_cloud_platform__project',
'extra__google_cloud_platform__key_path',
'extra__google_cloud_platform__keyfile_dict',
'extra__google_cloud_platform__scope',
'extra__google_cloud_platform__num_retries',
'extra__grpc__auth_type',
'extra__grpc__credential_pem_file',
'extra__grpc__scopes',
'extra__yandexcloud__service_account_json',
'extra__yandexcloud__service_account_json_path',
'extra__yandexcloud__oauth',
'extra__yandexcloud__public_ssh_key',
'extra__yandexcloud__folder_id',
'extra__kubernetes__in_cluster',
'extra__kubernetes__kube_config',
'extra__kubernetes__namespace']
list_columns = ['conn_id', 'conn_type', 'host', 'port', 'is_encrypted',
'is_extra_encrypted']
add_columns = edit_columns = ['conn_id', 'conn_type', 'host', 'schema',
'login', 'password', 'port', 'extra'] + extra_fields
add_form = edit_form = ConnectionForm
add_template = 'airflow/conn_create.html'
edit_template = 'airflow/conn_edit.html'
base_order = ('conn_id', 'asc')
@action('muldelete', 'Delete', 'Are you sure you want to delete selected records?',
single=False)
@has_dag_access(can_dag_edit=True)
def action_muldelete(self, items):
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
def process_form(self, form, is_created):
formdata = form.data
if formdata['conn_type'] in ['jdbc', 'google_cloud_platform', 'grpc', 'yandexcloud', 'kubernetes']:
extra = {
key: formdata[key]
for key in self.extra_fields if key in formdata}
form.extra.data = json.dumps(extra)
def prefill_form(self, form, pk):
try:
d = json.loads(form.data.get('extra', '{}'))
except Exception:
d = {}
if not hasattr(d, 'get'):
logging.warning('extra field for {} is not iterable'.format(
form.data.get('conn_id', '<unknown>')))
return
for field in self.extra_fields:
value = d.get(field, '')
if value:
field = getattr(form, field)
field.data = value
class PoolModelView(AirflowModelView):
route_base = '/pool'
datamodel = AirflowModelView.CustomSQLAInterface(models.Pool)
base_permissions = ['can_add', 'can_list', 'can_edit', 'can_delete']
list_columns = ['pool', 'slots', 'running_slots', 'queued_slots']
add_columns = ['pool', 'slots', 'description']
edit_columns = ['pool', 'slots', 'description']
base_order = ('pool', 'asc')
@action('muldelete', 'Delete', 'Are you sure you want to delete selected records?',
single=False)
def action_muldelete(self, items):
if any(item.pool == models.Pool.DEFAULT_POOL_NAME for item in items):
flash("default_pool cannot be deleted", 'error')
self.update_redirect()
return redirect(self.get_redirect())
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
def pool_link(attr):
pool_id = attr.get('pool')
if pool_id is not None:
url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id)
return Markup("<a href='{url}'>{pool_id}</a>").format(url=url, pool_id=pool_id)
else:
return Markup('<span class="label label-danger">Invalid</span>')
def frunning_slots(attr):
pool_id = attr.get('pool')
running_slots = attr.get('running_slots')
if pool_id is not None and running_slots is not None:
url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id, _flt_3_state='running')
return Markup("<a href='{url}'>{running_slots}</a>").format(url=url, running_slots=running_slots)
else:
return Markup('<span class="label label-danger">Invalid</span>')
def fqueued_slots(attr):
pool_id = attr.get('pool')
queued_slots = attr.get('queued_slots')
if pool_id is not None and queued_slots is not None:
url = url_for('TaskInstanceModelView.list', _flt_3_pool=pool_id, _flt_3_state='queued')
return Markup("<a href='{url}'>{queued_slots}</a>").format(url=url, queued_slots=queued_slots)
else:
return Markup('<span class="label label-danger">Invalid</span>')
formatters_columns = {
'pool': pool_link,
'running_slots': frunning_slots,
'queued_slots': fqueued_slots
}
validators_columns = {
'pool': [validators.DataRequired()],
'slots': [validators.NumberRange(min=-1)]
}
class VariableModelView(AirflowModelView):
route_base = '/variable'
list_template = 'airflow/variable_list.html'
edit_template = 'airflow/variable_edit.html'
datamodel = AirflowModelView.CustomSQLAInterface(models.Variable)
base_permissions = ['can_add', 'can_list', 'can_edit', 'can_delete', 'can_varimport']
list_columns = ['key', 'val', 'is_encrypted']
add_columns = ['key', 'val']
edit_columns = ['key', 'val']
search_columns = ['key', 'val']
base_order = ('key', 'asc')
def hidden_field_formatter(attr):
key = attr.get('key')
val = attr.get('val')
if wwwutils.should_hide_value_for_key(key):
return Markup('*' * 8)
if val:
return val
else:
return Markup('<span class="label label-danger">Invalid</span>')
formatters_columns = {
'val': hidden_field_formatter,
}
validators_columns = {
'key': [validators.DataRequired()]
}
def prefill_form(self, form, id):
if wwwutils.should_hide_value_for_key(form.key.data):
form.val.data = '*' * 8
@action('muldelete', 'Delete', 'Are you sure you want to delete selected records?',
single=False)
def action_muldelete(self, items):
self.datamodel.delete_all(items)
self.update_redirect()
return redirect(self.get_redirect())
@action('varexport', 'Export', '', single=False)
def action_varexport(self, items):
var_dict = {}
d = json.JSONDecoder()
for var in items:
try:
val = d.decode(var.val)
except Exception:
val = var.val
var_dict[var.key] = val
response = make_response(json.dumps(var_dict, sort_keys=True, indent=4))
response.headers["Content-Disposition"] = "attachment; filename=variables.json"
response.headers["Content-Type"] = "application/json; charset=utf-8"
return response
@expose('/varimport', methods=["POST"])
@has_access
@action_logging
def varimport(self):
try:
out = request.files['file'].read()
if isinstance(out, bytes):
d = json.loads(out.decode('utf-8'))
else:
d = json.loads(out)
except Exception:
self.update_redirect()
flash("Missing file or syntax error.", 'error')
return redirect(self.get_redirect())
else:
suc_count = fail_count = 0
for k, v in d.items():
try:
models.Variable.set(k, v, serialize_json=not isinstance(v, str))
except Exception as e:
logging.info('Variable import failed: {}'.format(repr(e)))
fail_count += 1
else:
suc_count += 1
flash("{} variable(s) successfully updated.".format(suc_count))
if fail_count:
flash("{} variable(s) failed to be updated.".format(fail_count), 'error')
self.update_redirect()
return redirect(self.get_redirect())
class JobModelView(AirflowModelView):
route_base = '/job'
datamodel = AirflowModelView.CustomSQLAInterface(BaseJob)
base_permissions = ['can_list']
list_columns = ['id', 'dag_id', 'state', 'job_type', 'start_date',
'end_date', 'latest_heartbeat',
'executor_class', 'hostname', 'unixname']
search_columns = ['id', 'dag_id', 'state', 'job_type', 'start_date',
'end_date', 'latest_heartbeat', 'executor_class',
'hostname', 'unixname']
base_order = ('start_date', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'start_date': wwwutils.datetime_f('start_date'),
'end_date': wwwutils.datetime_f('end_date'),
'hostname': wwwutils.nobr_f('hostname'),
'state': wwwutils.state_f,
'latest_heartbeat': wwwutils.datetime_f('latest_heartbeat'),
}
class DagRunModelView(AirflowModelView):
route_base = '/dagrun'
datamodel = AirflowModelView.CustomSQLAInterface(models.DagRun)
base_permissions = ['can_list', 'can_add']
add_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'external_trigger', 'conf']
list_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'external_trigger']
search_columns = ['state', 'dag_id', 'execution_date', 'run_id', 'external_trigger']
base_order = ('execution_date', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
add_form = edit_form = DagRunForm
formatters_columns = {
'execution_date': wwwutils.datetime_f('execution_date'),
'state': wwwutils.state_f,
'start_date': wwwutils.datetime_f('start_date'),
'dag_id': wwwutils.dag_link,
'run_id': wwwutils.dag_run_link,
}
@action('muldelete', "Delete", "Are you sure you want to delete selected records?",
single=False)
@has_dag_access(can_dag_edit=True)
@provide_session
def action_muldelete(self, items, session=None):
self.datamodel.delete_all(items)
self.update_redirect()
dirty_ids = []
for item in items:
dirty_ids.append(item.dag_id)
return redirect(self.get_redirect())
@action('set_running', "Set state to 'running'", '', single=False)
@provide_session
def action_set_running(self, drs, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
for dr in session.query(DR).filter(
DR.id.in_([dagrun.id for dagrun in drs])).all():
dirty_ids.append(dr.dag_id)
count += 1
dr.start_date = timezone.utcnow()
dr.state = State.RUNNING
session.commit()
flash("{count} dag runs were set to running".format(count=count))
except Exception as ex:
flash(str(ex), 'error')
flash('Failed to set state', 'error')
return redirect(self.get_default_url())
@action('set_failed', "Set state to 'failed'",
"All running task instances would also be marked as failed, are you sure?",
single=False)
@provide_session
def action_set_failed(self, drs, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
altered_tis = []
for dr in session.query(DR).filter(
DR.id.in_([dagrun.id for dagrun in drs])).all():
dirty_ids.append(dr.dag_id)
count += 1
altered_tis += \
set_dag_run_state_to_failed(dagbag.get_dag(dr.dag_id),
dr.execution_date,
commit=True,
session=session)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to failed".format(count=count, altered_ti_count=altered_ti_count))
except Exception:
flash('Failed to set state', 'error')
return redirect(self.get_default_url())
@action('set_success', "Set state to 'success'",
"All task instances would also be marked as success, are you sure?",
single=False)
@provide_session
def action_set_success(self, drs, session=None):
try:
DR = models.DagRun
count = 0
dirty_ids = []
altered_tis = []
for dr in session.query(DR).filter(
DR.id.in_([dagrun.id for dagrun in drs])).all():
dirty_ids.append(dr.dag_id)
count += 1
altered_tis += \
set_dag_run_state_to_success(dagbag.get_dag(dr.dag_id),
dr.execution_date,
commit=True,
session=session)
altered_ti_count = len(altered_tis)
flash(
"{count} dag runs and {altered_ti_count} task instances "
"were set to success".format(count=count, altered_ti_count=altered_ti_count))
except Exception:
flash('Failed to set state', 'error')
return redirect(self.get_default_url())
class LogModelView(AirflowModelView):
route_base = '/log'
datamodel = AirflowModelView.CustomSQLAInterface(Log)
base_permissions = ['can_list']
list_columns = ['id', 'dttm', 'dag_id', 'task_id', 'event', 'execution_date',
'owner', 'extra']
search_columns = ['dag_id', 'task_id', 'event', 'execution_date', 'owner', 'extra']
base_order = ('dttm', 'desc')
base_filters = [['dag_id', DagFilter, lambda: []]]
formatters_columns = {
'dttm': wwwutils.datetime_f('dttm'),
'execution_date': wwwutils.datetime_f('execution_date'),
'dag_id': wwwutils.dag_link,
}
class TaskInstanceModelView(AirflowModelView):
route_base = '/taskinstance'
datamodel = AirflowModelView.CustomSQLAInterface(models.TaskInstance)
base_permissions = ['can_list']
page_size = PAGE_SIZE
list_columns = ['state', 'dag_id', 'task_id', 'execution_date', 'operator',
'start_date', 'end_date', 'duration', 'job_id', 'hostname',
'unixname', 'priority_weight', 'queue', 'queued_dttm', 'try_number',
'pool', 'log_url']
search_columns = ['state', 'dag_id', 'task_id', 'execution_date', 'hostname',
'queue', 'pool', 'operator', 'start_date', 'end_date']
base_order = ('job_id', 'asc')
base_filters = [['dag_id', DagFilter, lambda: []]]
def log_url_formatter(attr):
log_url = attr.get('log_url')
return Markup(
'<a href="{log_url}">'
' <span class="glyphicon glyphicon-book" aria-hidden="true">'
'</span></a>').format(log_url=log_url)
def duration_f(attr):
end_date = attr.get('end_date')
duration = attr.get('duration')
if end_date and duration:
return timedelta(seconds=duration)
formatters_columns = {
'log_url': log_url_formatter,
'task_id': wwwutils.task_instance_link,
'hostname': wwwutils.nobr_f('hostname'),
'state': wwwutils.state_f,
'execution_date': wwwutils.datetime_f('execution_date'),
'start_date': wwwutils.datetime_f('start_date'),
'end_date': wwwutils.datetime_f('end_date'),
'queued_dttm': wwwutils.datetime_f('queued_dttm'),
'dag_id': wwwutils.dag_link,
'duration': duration_f,
}
@provide_session
@action('clear', lazy_gettext('Clear'),
lazy_gettext('Are you sure you want to clear the state of the selected task'
' instance(s) and set their dagruns to the running state?'),
single=False)
def action_clear(self, tis, session=None):
try:
dag_to_tis = {}
for ti in tis:
dag = dagbag.get_dag(ti.dag_id)
tis = dag_to_tis.setdefault(dag, [])
tis.append(ti)
for dag, tis in dag_to_tis.items():
models.clear_task_instances(tis, session, dag=dag)
session.commit()
flash("{0} task instances have been cleared".format(len(tis)))
self.update_redirect()
return redirect(self.get_redirect())
except Exception:
flash('Failed to clear task instances', 'error')
@provide_session
def set_task_instance_state(self, tis, target_state, session=None):
try:
count = len(tis)
for ti in tis:
ti.set_state(target_state, session)
session.commit()
flash("{count} task instances were set to '{target_state}'".format(
count=count, target_state=target_state))
except Exception:
flash('Failed to set state', 'error')
@action('set_running', "Set state to 'running'", '', single=False)
@has_dag_access(can_dag_edit=True)
def action_set_running(self, tis):
self.set_task_instance_state(tis, State.RUNNING)
self.update_redirect()
return redirect(self.get_redirect())
@action('set_failed', "Set state to 'failed'", '', single=False)
@has_dag_access(can_dag_edit=True)
def action_set_failed(self, tis):
self.set_task_instance_state(tis, State.FAILED)
self.update_redirect()
return redirect(self.get_redirect())
@action('set_success', "Set state to 'success'", '', single=False)
@has_dag_access(can_dag_edit=True)
def action_set_success(self, tis):
self.set_task_instance_state(tis, State.SUCCESS)
self.update_redirect()
return redirect(self.get_redirect())
@action('set_retry', "Set state to 'up_for_retry'", '', single=False)
@has_dag_access(can_dag_edit=True)
def action_set_retry(self, tis):
self.set_task_instance_state(tis, State.UP_FOR_RETRY)
self.update_redirect()
return redirect(self.get_redirect())
class DagModelView(AirflowModelView):
route_base = '/dagmodel'
datamodel = AirflowModelView.CustomSQLAInterface(models.DagModel)
base_permissions = ['can_list', 'can_show']
list_columns = ['dag_id', 'is_paused', 'last_scheduler_run',
'last_expired', 'scheduler_lock', 'fileloc', 'owners']
formatters_columns = {
'dag_id': wwwutils.dag_link
}
base_filters = [['dag_id', DagFilter, lambda: []]]
def get_query(self):
"""
Default filters for model
"""
return (
super().get_query()
.filter(or_(models.DagModel.is_active,
models.DagModel.is_paused))
.filter(~models.DagModel.is_subdag)
)
def get_count_query(self):
"""
Default filters for model
"""
return (
super().get_count_query()
.filter(models.DagModel.is_active)
.filter(~models.DagModel.is_subdag)
)
@has_access
@permission_name("list")
@provide_session
@expose('/autocomplete')
def autocomplete(self, session=None):
query = unquote(request.args.get('query', ''))
if not query:
wwwutils.json_response([])
# Provide suggestions of dag_ids and owners
dag_ids_query = session.query(DagModel.dag_id.label('item')).filter(
~DagModel.is_subdag, DagModel.is_active,
DagModel.dag_id.ilike('%' + query + '%'))
owners_query = session.query(func.distinct(DagModel.owners).label('item')).filter(
~DagModel.is_subdag, DagModel.is_active,
DagModel.owners.ilike('%' + query + '%'))
# Hide DAGs if not showing status: "all"
status = flask_session.get(FILTER_STATUS_COOKIE)
if status == 'active':
dag_ids_query = dag_ids_query.filter(~DagModel.is_paused)
owners_query = owners_query.filter(~DagModel.is_paused)
elif status == 'paused':
dag_ids_query = dag_ids_query.filter(DagModel.is_paused)
owners_query = owners_query.filter(DagModel.is_paused)
filter_dag_ids = appbuilder.sm.get_accessible_dag_ids()
if 'all_dags' not in filter_dag_ids:
dag_ids_query = dag_ids_query.filter(DagModel.dag_id.in_(filter_dag_ids))
owners_query = owners_query.filter(DagModel.dag_id.in_(filter_dag_ids))
payload = [row[0] for row in dag_ids_query.union(owners_query).limit(10).all()]
return wwwutils.json_response(payload)
|
py | b402b8681868bbc123d9ad9178f2b374811001c8 | """This module contains HeAT's version information."""
major: int = 1
"""Indicates HeAT's main version."""
minor: int = 2
"""Indicates feature extension."""
micro: int = 0
"""Indicates revisions for bugfixes."""
extension: str = "dev"
"""Indicates special builds, e.g. for specific hardware."""
if not extension:
__version__: str = "{}.{}.{}".format(major, minor, micro)
"""The combined version string, consisting out of major, minor, micro and possibly extension."""
else:
__version__: str = "{}.{}.{}-{}".format(major, minor, micro, extension)
|
py | b402b8ba48585c0185c7d82a3468616735381401 | from insights.parsers.httpd_conf import HttpdConf, ParsedData
from insights.combiners.httpd_conf import HttpdConfAll
from insights.tests import context_wrap
HTTPD_CONF_1 = '''
JustFotTest_NoSec "/var/www/cgi"
# prefork MPM
<IfModule prefork.c>
ServerLimit 256
ThreadsPerChild 16
JustForTest "AB"
MaxClients 256
</IfModule>
IncludeOptional conf.d/*.conf
'''.strip()
HTTPD_CONF_2 = '''
JustForTest_NoSec "/var/www/cgi"
# prefork MPM
<IfModule prefork.c>
ServerLimit 1024
JustForTest "ABC"
MaxClients 1024
</IfModule>
'''.strip()
HTTPD_CONF_3 = '''
# prefork MPM
<IfModule prefork.c>
ServerLimit 256
MaxClients 512
</IfModule>
'''.strip()
HTTPD_CONF_SHADOWTEST_1 = '''
Foo 1A
Foo 1B
Foo 1C
<IfModule prefork.c>
Foo 1xA
Foo 1xB
Foo 1xC
Bar 1A
Bar 1B
Bar 1C
</IfModule>
IncludeOptional conf.d/*.conf
'''.strip()
HTTPD_CONF_SHADOWTEST_2 = '''
Foo 2A
Foo 2B
Foo 2C
<IfModule ASDF.prefork.c.ASDF>
Foo 2xA
Foo 2xB
Foo 2xC
Bar 2A
Bar 2B
Bar 2C
</IfModule>
'''.strip()
HTTPD_CONF_SHADOWTEST_3 = '''
Foo 3A
Foo 3B
Foo 3C
<IfModule prefork.c>
Foo 3xA
Foo 3xB
Foo 3xC
Bar 3A
Bar 3B
Bar 3C
</IfModule>
'''.strip()
HTTPD_CONF_MAIN_1 = '''
ServerRoot "/etc/httpd"
Listen 80
# Load config files in the "/etc/httpd/conf.d" directory, if any.
IncludeOptional conf.d/*.conf
'''.strip()
HTTPD_CONF_MAIN_2 = '''
# Load config files in the "/etc/httpd/conf.d" directory, if any.
IncludeOptional conf.d/*.conf
ServerRoot "/etc/httpd"
Listen 80
'''.strip()
HTTPD_CONF_MAIN_3 = '''
ServerRoot "/etc/httpd"
# Load config files in the "/etc/httpd/conf.d" directory, if any.
IncludeOptional conf.d/*.conf
Listen 80
'''.strip()
HTTPD_CONF_FILE_1 = '''
ServerRoot "/home/skontar/httpd"
Listen 8080
'''.strip()
HTTPD_CONF_FILE_2 = '''
ServerRoot "/home/skontar/www"
'''.strip()
HTTPD_CONF_MORE = '''
UserDir disable
UserDir enable bob
'''.strip()
HTTPD_CONF_NEST_1 = """
<VirtualHost 128.39.140.28>
<Directory /var/www/example>
Options FollowSymLinks
AllowOverride None
</Directory>
<IfModule mod_php4.c>
php_admin_flag safe_mode Off
php_admin_value register_globals 0
</IfModule>
DirectoryIndex index.php
<IfModule mod_rewrite.c>
RewriteEngine On
RewriteRule .* /index.php
</IfModule>
<IfModule mod_rewrite.c>
RewriteEngine Off
</IfModule>
<IfModule !php5_module>
<IfModule !php4_module>
<FilesMatch ".php[45]?$">
Order allow,deny
Deny from all
</FilesMatch>
<FilesMatch ".php[45]?$">
Order deny,allow
</FilesMatch>
</IfModule>
</IfModule>
DocumentRoot /var/www/example
ServerName www.example.com
ServerAlias admin.example.com
</VirtualHost>
<IfModule !php5_module>
<IfModule !php4_module>
<Location />
<FilesMatch ".php[45]">
Order allow,deny
Deny from all
</FilesMatch>
</Location>
</IfModule>
</IfModule>
<IfModule mod_rewrite.c>
RewriteEngine Off
</IfModule>
LogLevel warn
DocumentRoot "/var/www/html_cgi"
IncludeOptional conf.d/*.conf
EnableSendfile on
""".strip()
HTTPD_CONF_NEST_2 = """
DocumentRoot "/var/www/html"
<VirtualHost 128.39.140.30>
<IfModule !php5_module>
<IfModule !php4_module>
<FilesMatch ".php[45]?$">
Order allow,deny
Deny from all
</FilesMatch>
<FilesMatch ".php[45]?$">
Order deny,allow
</FilesMatch>
</IfModule>
</IfModule>
DocumentRoot /var/www/example1
ServerName www.example1.com
ServerAlias admin.example1.com
</VirtualHost>
<IfModule !php5_module>
<IfModule !php4_module>
<Location />
<FilesMatch test>
Order deny,allow
Allow from all
</FilesMatch>
<FilesMatch ".php[45]">
Order deny,allow
</FilesMatch>
</Location>
</IfModule>
</IfModule>
<IfModule mod_rewrite.c>
RewriteEngine On
</IfModule>
EnableSendfile off
""".strip()
HTTPD_CONF_NEST_3 = """
<VirtualHost 128.39.140.28>
<IfModule !php5_module>
Testphp php5_v3_1
<IfModule !php4_module>
Testphp php4_v3_1
</IfModule>
Testphp php5_v3_2
</IfModule>
</VirtualHost>
<IfModule !php5_module>
Testphp php5_3_a
<IfModule !php4_module>
Testphp php4_3_a
</IfModule>
</IfModule>
""".strip()
HTTPD_CONF_NEST_4 = """
<VirtualHost 128.39.140.30>
<IfModule !php5_module>
Testphp php5_v4_1
<IfModule !php4_module>
Testphp php4_v4_1
</IfModule>
Testphp php5_v4_2
</IfModule>
</VirtualHost>
<IfModule !php5_module>
Testphp php5_4_b
<IfModule !php4_module>
Testphp php4_4_b
</IfModule>
</IfModule>
""".strip()
def test_active_httpd_directory():
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_NEST_1, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_NEST_2, path='/etc/httpd/conf.d/00-z.conf'))
result = HttpdConfAll([httpd1, httpd2])
assert result.get_section_list("Directory") == [(('Directory', '/var/www/example'), 'httpd.conf', '/etc/httpd/conf/httpd.conf')]
assert result.get_section_list("asdf") == []
assert result.get_section_list(123456) == []
def test_active_httpd_nest_1():
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_NEST_1, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_NEST_2, path='/etc/httpd/conf.d/00-z.conf'))
result = HttpdConfAll([httpd1, httpd2])
assert result.get_setting_list('Order1', ('FilesMatch', 'php')) == []
assert result.get_setting_list('Order', ('FilesMatch', 'pdf')) == []
php_fm_order = result.get_setting_list('Order', section=('FilesMatch', 'php'))
assert {
('FilesMatch', '".php[45]?$"'): [
('allow,deny', 'Order allow,deny', 'FilesMatch', '".php[45]?$"', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
('deny,allow', 'Order deny,allow', 'FilesMatch', '".php[45]?$"', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')]
} in php_fm_order
assert {
('FilesMatch', '".php[45]"'): [
('allow,deny', 'Order allow,deny', 'FilesMatch', '".php[45]"', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('deny,allow', 'Order deny,allow', 'FilesMatch', '".php[45]"', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')],
} in php_fm_order
assert {
('FilesMatch', '".php[45]?$"'): [
('allow,deny', 'Order allow,deny', 'FilesMatch', '".php[45]?$"', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('deny,allow', 'Order deny,allow', 'FilesMatch', '".php[45]?$"', 'httpd.conf', '/etc/httpd/conf/httpd.conf')]
} in php_fm_order
re_im = result.get_setting_list('RewriteEngine', 'IfModule')
assert {
('IfModule', 'mod_rewrite.c'): [
('On', 'RewriteEngine On', 'IfModule', 'mod_rewrite.c', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('Off', 'RewriteEngine Off', 'IfModule', 'mod_rewrite.c', 'httpd.conf', '/etc/httpd/conf/httpd.conf')]
} in re_im
assert {
('IfModule', 'mod_rewrite.c'): [
('Off', 'RewriteEngine Off', 'IfModule', 'mod_rewrite.c', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('On', 'RewriteEngine On', 'IfModule', 'mod_rewrite.c', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')]
} in re_im
assert sorted(result.get_setting_list('EnableSendfile')) == sorted([
('off', 'EnableSendfile off', None, None, '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
('on', 'EnableSendfile on', None, None, 'httpd.conf', '/etc/httpd/conf/httpd.conf')])
assert result.get_setting_list('LogLevel') == [
('warn', 'LogLevel warn', None, None, 'httpd.conf', '/etc/httpd/conf/httpd.conf')]
assert result.get_setting_list('LogLevel1') == []
assert result.get_active_setting('Order1', ('FilesMatch', 'php')) == []
assert result.get_active_setting('Order', ('FilesMatch', 'pdf')) == []
assert len(result.get_active_setting('Order', ('FilesMatch', '.php[45]?$'))) == 2
assert len(result.get_active_setting('Order', ('FilesMatch',))) == 4
assert len(result.get_active_setting('Order', ('FilesMatch', '.php[45]'))) == 3
assert sorted(result.get_active_setting('Order', section=('FilesMatch', 'php'))) == sorted([
('deny,allow', 'Order deny,allow', 'FilesMatch', '".php[45]?$"', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
('deny,allow', 'Order deny,allow', 'FilesMatch', '".php[45]"', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
('deny,allow', 'Order deny,allow', 'FilesMatch', '".php[45]?$"', 'httpd.conf', '/etc/httpd/conf/httpd.conf')])
assert sorted(result.get_active_setting('RewriteEngine', section='IfModule')) == sorted([
('Off', 'RewriteEngine Off', 'IfModule', 'mod_rewrite.c', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('On', 'RewriteEngine On', 'IfModule', 'mod_rewrite.c', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')])
assert result.get_active_setting('EnableSendfile').line == 'EnableSendfile on'
assert result.get_active_setting('Deny', ('FilesMatch', 'test')) == []
assert result.get_active_setting('Allow', ('FilesMatch', 'test'))[0].value == 'from all'
assert result.get_active_setting('Deny', section=('IfModule',)) == []
assert result.get_active_setting('MaxClients', section=('IfModule', 'prefork')) == []
assert result.get_active_setting('RewriteRule', section=('IfModule', 'mod_rewrite.c'))[0].line == "RewriteRule .* /index.php"
assert result.get_active_setting("DocumentRoot").value == '/var/www/html'
assert result.get_active_setting('RewriteRule', section=('IfModule', 'mod_rewrite.c', 'invalid_test')) == []
assert result.get_active_setting('LogLevel') == ('warn', 'LogLevel warn', None, None, 'httpd.conf', '/etc/httpd/conf/httpd.conf')
assert result.get_active_setting('LogLevel1') is None
def test_active_httpd_nest_2():
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_NEST_3, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_NEST_4, path='/etc/httpd/conf.d/00-z.conf'))
result = HttpdConfAll([httpd1, httpd2])
testphp_im = result.get_setting_list('Testphp', 'IfModule')
assert {('IfModule', '!php5_module'): [
('php5_v3_1', 'Testphp php5_v3_1', 'IfModule', '!php5_module', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('php5_v3_2', 'Testphp php5_v3_2', 'IfModule', '!php5_module', 'httpd.conf', '/etc/httpd/conf/httpd.conf')
]} in testphp_im
assert {('IfModule', '!php4_module'): [
('php4_v3_1', 'Testphp php4_v3_1', 'IfModule', '!php4_module', 'httpd.conf', '/etc/httpd/conf/httpd.conf')
]} in testphp_im
assert {('IfModule', '!php5_module'): [
('php5_v4_1', 'Testphp php5_v4_1', 'IfModule', '!php5_module', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
('php5_v4_2', 'Testphp php5_v4_2', 'IfModule', '!php5_module', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')
]} in testphp_im
assert {('IfModule', '!php4_module'): [
('php4_v4_1', 'Testphp php4_v4_1', 'IfModule', '!php4_module', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')
]} in testphp_im
assert {('IfModule', '!php5_module'): [
('php5_3_a', 'Testphp php5_3_a', 'IfModule', '!php5_module', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('php5_4_b', 'Testphp php5_4_b', 'IfModule', '!php5_module', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')
]} in testphp_im
assert {('IfModule', '!php4_module'): [
('php4_3_a', 'Testphp php4_3_a', 'IfModule', '!php4_module', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
('php4_4_b', 'Testphp php4_4_b', 'IfModule', '!php4_module', '00-z.conf', '/etc/httpd/conf.d/00-z.conf')
]} in testphp_im
def test_active_httpd():
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_1, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_2, path='/etc/httpd/conf.d/00-z.conf'))
httpd3 = HttpdConf(context_wrap(HTTPD_CONF_3, path='/etc/httpd/conf.d/z-z.conf'))
result = HttpdConfAll([httpd1, httpd2, httpd3])
assert result.get_active_setting('MaxClients', section=('IfModule', 'prefork.c'))[0].value == '512'
assert result.get_active_setting('MaxClients', section=('IfModule', 'prefork.c'))[0].file_path == '/etc/httpd/conf.d/z-z.conf'
assert result.get_active_setting('ThreadsPerChild', section=('IfModule',
'prefork.c'))[0].value == '16'
assert result.get_active_setting("MaxClients", ("IfModule", "prefork")) == [
ParsedData(value='512', line='MaxClients 512',
section='IfModule', section_name='prefork.c',
file_name='z-z.conf', file_path='/etc/httpd/conf.d/z-z.conf')]
assert result.get_active_setting('ServerLimit', section=('IfModule', 'prefork.c'))[0].value == '256'
assert result.get_active_setting('JustForTest', section=('IfModule', 'prefork.c'))[-1].file_name == '00-z.conf'
assert result.get_active_setting('JustForTest_NoSec').line == 'JustForTest_NoSec "/var/www/cgi"'
def test_shadowing():
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_SHADOWTEST_1, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_SHADOWTEST_2, path='/etc/httpd/conf.d/00-z.conf'))
httpd3 = HttpdConf(context_wrap(HTTPD_CONF_SHADOWTEST_3, path='/etc/httpd/conf.d/z-z.conf'))
result = HttpdConfAll([httpd1, httpd2, httpd3])
# get_setting_list returns ALL matching data
assert result.get_setting_list('Foo') == [
ParsedData('1A', 'Foo 1A', None, None, 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
ParsedData('1B', 'Foo 1B', None, None, 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
ParsedData('1C', 'Foo 1C', None, None, 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
ParsedData('2A', 'Foo 2A', None, None, '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
ParsedData('2B', 'Foo 2B', None, None, '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
ParsedData('2C', 'Foo 2C', None, None, '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
ParsedData('3A', 'Foo 3A', None, None, 'z-z.conf', '/etc/httpd/conf.d/z-z.conf'),
ParsedData('3B', 'Foo 3B', None, None, 'z-z.conf', '/etc/httpd/conf.d/z-z.conf'),
ParsedData('3C', 'Foo 3C', None, None, 'z-z.conf', '/etc/httpd/conf.d/z-z.conf'),
]
assert result.get_setting_list('Bar', section=('IfModule', 'prefork.c')) == [
{('IfModule', 'prefork.c'): [
ParsedData('1A', 'Bar 1A', 'IfModule', 'prefork.c', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
ParsedData('1B', 'Bar 1B', 'IfModule', 'prefork.c', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
ParsedData('1C', 'Bar 1C', 'IfModule', 'prefork.c', 'httpd.conf', '/etc/httpd/conf/httpd.conf'),
ParsedData('3A', 'Bar 3A', 'IfModule', 'prefork.c', 'z-z.conf', '/etc/httpd/conf.d/z-z.conf'),
ParsedData('3B', 'Bar 3B', 'IfModule', 'prefork.c', 'z-z.conf', '/etc/httpd/conf.d/z-z.conf'),
ParsedData('3C', 'Bar 3C', 'IfModule', 'prefork.c', 'z-z.conf', '/etc/httpd/conf.d/z-z.conf'),
],
},
{('IfModule', 'ASDF.prefork.c.ASDF'): [
ParsedData('2A', 'Bar 2A', 'IfModule', 'ASDF.prefork.c.ASDF', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
ParsedData('2B', 'Bar 2B', 'IfModule', 'ASDF.prefork.c.ASDF', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
ParsedData('2C', 'Bar 2C', 'IfModule', 'ASDF.prefork.c.ASDF', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
],
},
]
assert result.get_setting_list('Bar') == []
# get_active_setting returns the last value
assert result.get_active_setting('Foo') == ('3C', 'Foo 3C', None, None, 'z-z.conf', '/etc/httpd/conf.d/z-z.conf')
assert result.get_active_setting('Bar', section=('IfModule', 'prefork.c')) == [
('3C', 'Bar 3C', 'IfModule', 'prefork.c', 'z-z.conf', '/etc/httpd/conf.d/z-z.conf'),
('2C', 'Bar 2C', 'IfModule', 'ASDF.prefork.c.ASDF', '00-z.conf', '/etc/httpd/conf.d/00-z.conf'),
]
assert result.get_active_setting('Bar') is None
def test_httpd_splits():
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_MAIN_1, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_FILE_1, path='/etc/httpd/conf.d/00-a.conf'))
httpd3 = HttpdConf(context_wrap(HTTPD_CONF_FILE_2, path='/etc/httpd/conf.d/01-b.conf'))
result = HttpdConfAll([httpd1, httpd2, httpd3])
assert result.get_active_setting('ServerRoot').value == '/home/skontar/www'
assert result.get_active_setting('ServerRoot').line == 'ServerRoot "/home/skontar/www"'
assert result.get_active_setting('ServerRoot').file_name == '01-b.conf'
assert result.get_active_setting('ServerRoot').file_path == '/etc/httpd/conf.d/01-b.conf'
assert result.get_active_setting('Listen').value == '8080'
assert result.get_active_setting('Listen').line == 'Listen 8080'
assert result.get_active_setting('Listen').file_name == '00-a.conf'
assert result.get_active_setting('Listen').file_path == '/etc/httpd/conf.d/00-a.conf'
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_MAIN_2, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_FILE_1, path='/etc/httpd/conf.d/00-a.conf'))
httpd3 = HttpdConf(context_wrap(HTTPD_CONF_FILE_2, path='/etc/httpd/conf.d/01-b.conf'))
result = HttpdConfAll([httpd1, httpd2, httpd3])
assert result.get_active_setting('ServerRoot').value == '/etc/httpd'
assert result.get_active_setting('ServerRoot').line == 'ServerRoot "/etc/httpd"'
assert result.get_active_setting('ServerRoot').file_name == 'httpd.conf'
assert result.get_active_setting('ServerRoot').file_path == '/etc/httpd/conf/httpd.conf'
assert result.get_active_setting('Listen').value == '80'
assert result.get_active_setting('Listen').line == 'Listen 80'
assert result.get_active_setting('Listen').file_name == 'httpd.conf'
assert result.get_active_setting('Listen').file_path == '/etc/httpd/conf/httpd.conf'
httpd1 = HttpdConf(context_wrap(HTTPD_CONF_MAIN_3, path='/etc/httpd/conf/httpd.conf'))
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_FILE_1, path='/etc/httpd/conf.d/00-a.conf'))
httpd3 = HttpdConf(context_wrap(HTTPD_CONF_FILE_2, path='/etc/httpd/conf.d/01-b.conf'))
result = HttpdConfAll([httpd1, httpd2, httpd3])
assert result.get_active_setting('ServerRoot').value == '/home/skontar/www'
assert result.get_active_setting('ServerRoot').line == 'ServerRoot "/home/skontar/www"'
assert result.get_active_setting('ServerRoot').file_name == '01-b.conf'
assert result.get_active_setting('ServerRoot').file_path == '/etc/httpd/conf.d/01-b.conf'
assert result.get_active_setting('Listen').value == '80'
assert result.get_active_setting('Listen').line == 'Listen 80'
assert result.get_active_setting('Listen').file_name == 'httpd.conf'
assert result.get_active_setting('Listen').file_path == '/etc/httpd/conf/httpd.conf'
# Test is data from inactive configs are also stored
assert [a.file_name for a in result.config_data] == ['httpd.conf', '00-a.conf', '01-b.conf', 'httpd.conf']
assert result.config_data[1].file_name == '00-a.conf'
assert result.config_data[1].file_path == '/etc/httpd/conf.d/00-a.conf'
assert result.config_data[1].full_data_dict['Listen'][0].value == '8080'
assert result.config_data[1].full_data_dict['Listen'][0].line == 'Listen 8080'
def test_httpd_no_main_config():
httpd2 = HttpdConf(context_wrap(HTTPD_CONF_FILE_1, path='/etc/httpd/conf.d/00-a.conf'))
httpd3 = HttpdConf(context_wrap(HTTPD_CONF_FILE_2, path='/etc/httpd/conf.d/01-b.conf'))
result = HttpdConfAll([httpd2, httpd3])
assert [a.file_name for a in result.config_data] == ['00-a.conf', '01-b.conf']
def test_httpd_one_file_overwrites():
httpd = HttpdConf(context_wrap(HTTPD_CONF_MORE, path='/etc/httpd/conf/httpd.conf'))
result = HttpdConfAll([httpd])
active_setting = result.get_active_setting('UserDir')
assert active_setting.value == 'enable bob'
assert active_setting.line == 'UserDir enable bob'
assert active_setting.file_path == '/etc/httpd/conf/httpd.conf'
assert active_setting.file_name == 'httpd.conf'
setting_list = result.get_setting_list('UserDir')
assert len(setting_list) == 2
assert setting_list[0].value == 'disable'
assert setting_list[0].line == 'UserDir disable'
assert setting_list[0].file_path == '/etc/httpd/conf/httpd.conf'
assert setting_list[0].file_name == 'httpd.conf'
assert setting_list[0].section is None
assert setting_list[1].value == 'enable bob'
assert setting_list[1].line == 'UserDir enable bob'
assert setting_list[1].file_path == '/etc/httpd/conf/httpd.conf'
assert setting_list[1].file_name == 'httpd.conf'
assert setting_list[1].section_name is None
|
py | b402b8fce597a6e8119641e1d00d2ca24b535963 | from textwrap import dedent
from unittest import TestCase
from pyVHDLParser.Token import WordToken, StartOfDocumentToken, SpaceToken, CharacterToken, EndOfDocumentToken, LinebreakToken, IndentationToken
from pyVHDLParser.Blocks import StartOfDocumentBlock, EndOfDocumentBlock
from pyVHDLParser.Blocks.Common import WhitespaceBlock, LinebreakBlock, IndentationBlock
from pyVHDLParser.Blocks.List import GenericList
from pyVHDLParser.Blocks.Sequential import PackageBody
from tests.unit.Common import Initializer, ExpectedDataMixin, LinkingTests, TokenSequence, BlockSequence, ExpectedTokenStream, ExpectedBlockStream, TokenLinking, BlockSequenceWithParserError
if __name__ == "__main__":
print("ERROR: you called a testcase declaration file as an executable module.")
print("Use: 'python -m unitest <testcase module>'")
exit(1)
def setUpModule():
i = Initializer()
class SimplePackageBody_OneLine_OnlyEnd(TestCase, ExpectedDataMixin, LinkingTests, TokenSequence, BlockSequence):
code = "package body p is end;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None),
(WordToken, "package"),
(SpaceToken, " "),
(WordToken, "body"),
(SpaceToken, " "),
(WordToken, "p"),
(SpaceToken, " "),
(WordToken, "is"),
(SpaceToken, " "),
(WordToken, "end"),
(CharacterToken, ";"),
(EndOfDocumentToken, None)
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body p is"), # package body p is
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "end;"), # end;
(EndOfDocumentBlock, None) #
]
)
class SimplePackageBody_OneLine_EndWithKeyword(TestCase, ExpectedDataMixin, LinkingTests, TokenSequence, BlockSequence):
code = "package body p is end package body;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # package
(SpaceToken, " "), #
(WordToken, "p"), # e
(SpaceToken, " "), #
(WordToken, "is"), # is
(SpaceToken, " "), #
(WordToken, "end"), # end
(SpaceToken, " "), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(CharacterToken, ";"), # ;
(EndOfDocumentToken, None) #
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body p is"), # package p is
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "end package body;"), # end package;
(EndOfDocumentBlock, None) #
],
)
class SimplePackageBody_OneLine_EndWithName(TestCase, ExpectedDataMixin, LinkingTests, TokenSequence, BlockSequence):
code = "package body p is end p;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(SpaceToken, " "), #
(WordToken, "is"), # is
(SpaceToken, " "), #
(WordToken, "end"), # end
(SpaceToken, " "), #
(WordToken, "p"), # p
(CharacterToken, ";"), # ;
(EndOfDocumentToken, None) #
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body p is"), # package body p is
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "end p;"), # end e;
(EndOfDocumentBlock, None) #
]
)
class SimplePackageBody_OneLine_EndWithKeywordAndName(TestCase, ExpectedDataMixin, LinkingTests, TokenSequence, BlockSequence):
code = "package body p is end package body p;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(SpaceToken, " "), #
(WordToken, "is"), # is
(SpaceToken, " "), #
(WordToken, "end"), # end
(SpaceToken, " "), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(CharacterToken, ";"), # ;
(EndOfDocumentToken, None) #
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body p is"), # package body p is
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "end package body p;"), # end package body p;
(EndOfDocumentBlock, None) #
]
)
class SimplePackageBody_OneLine_NoName_EndWithKeywordAndName(TestCase, ExpectedDataMixin, TokenLinking, TokenSequence, BlockSequenceWithParserError):
code = "package body is end package body p;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "is"), # is
(SpaceToken, " "), #
(WordToken, "end"), # end
(SpaceToken, " "), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(CharacterToken, ";"), # ;
(EndOfDocumentToken, None) #
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body is"), # package body is
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "end package body p;"), # end package body p;
(EndOfDocumentBlock, None) #
]
)
class SimplePackageBody_OneLine_NoIs_EndWithKeywordAndName(TestCase, ExpectedDataMixin, TokenLinking, TokenSequence, BlockSequenceWithParserError):
code = "package body p end package body p;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(SpaceToken, " "), #
(WordToken, "end"), # end
(SpaceToken, " "), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(CharacterToken, ";"), # ;
(EndOfDocumentToken, None) #
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body p"), # package body p
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "end package body p;"), # end package body p;
(EndOfDocumentBlock, None) #
]
)
class SimplePackageBody_OneLine_NoEnd_EndWithKeywordAndName(TestCase, ExpectedDataMixin, TokenLinking, TokenSequence, BlockSequenceWithParserError):
code = "package body p is package body p;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(SpaceToken, " "), #
(WordToken, "is"), # is
(SpaceToken, " "), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(CharacterToken, ";"), # ;
(EndOfDocumentToken, None) #
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body p is"), # package body p is
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "package body p;"), # package body p;
(EndOfDocumentBlock, None) #
]
)
class SimplePackageBody_OneLine_EndWithKeywordAndName_WrongName(TestCase, ExpectedDataMixin, LinkingTests, TokenSequence, BlockSequence):
code = "package body p is end package body a;"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "p"), # p
(SpaceToken, " "), #
(WordToken, "is"), # is
(SpaceToken, " "), #
(WordToken, "end"), # end
(SpaceToken, " "), #
(WordToken, "package"), # package
(SpaceToken, " "), #
(WordToken, "body"), # body
(SpaceToken, " "), #
(WordToken, "a"), # a
(CharacterToken, ";"), # ;
(EndOfDocumentToken, None) #
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None), #
(PackageBody.NameBlock, "package body p is"), # package p is
(WhitespaceBlock, " "), #
(PackageBody.EndBlock, "end package body a;"), # end package a;
(EndOfDocumentBlock, None) #
]
)
class SimplePackageBody_MultiLine_LongForm(TestCase, ExpectedDataMixin, LinkingTests, TokenSequence, BlockSequence):
code = dedent("""\
package body p is
end package body p ;
""")
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None),
(WordToken, "package"),
(SpaceToken, " "),
(WordToken, "body"),
(SpaceToken, " "),
(WordToken, "p"),
(SpaceToken, " "),
(WordToken, "is"),
(LinebreakToken, "\n"),
(WordToken, "end"),
(SpaceToken, " "),
(WordToken, "package"),
(SpaceToken, " "),
(WordToken, "body"),
(SpaceToken, " "),
(WordToken, "p"),
(SpaceToken, " "),
(CharacterToken, ";"),
(LinebreakToken, "\n"),
(EndOfDocumentToken, None)
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None),
(PackageBody.NameBlock, "package body p is"),
(LinebreakBlock, "\n"),
(PackageBody.EndBlock, "end package body p ;"),
(LinebreakBlock, "\n"),
(EndOfDocumentBlock, None)
]
)
class SimplePackageBody_AllLine_LongForm(TestCase, ExpectedDataMixin, TokenLinking, TokenSequence):
code = "package\nbody\np\nis\nend\npackage\nbody\np\n;\n"
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None),
(WordToken, "package"),
(LinebreakToken, "\n"),
(WordToken, "body"),
(LinebreakToken, "\n"),
(WordToken, "p"),
(LinebreakToken, "\n"),
(WordToken, "is"),
(LinebreakToken, "\n"),
(WordToken, "end"),
(LinebreakToken, "\n"),
(WordToken, "package"),
(LinebreakToken, "\n"),
(WordToken, "body"),
(LinebreakToken, "\n"),
(WordToken, "p"),
(LinebreakToken, "\n"),
(CharacterToken, ";"),
(LinebreakToken, "\n"),
(EndOfDocumentToken, None)
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None),
(PackageBody.NameBlock, "package"),
(LinebreakBlock, "\n"),
(PackageBody.NameBlock, "body"),
(LinebreakBlock, "\n"),
# (IndentationBlock, "\t"),
(PackageBody.NameBlock, "p"),
(LinebreakBlock, "\n"),
(PackageBody.NameBlock, "is"),
(LinebreakBlock, "\n"),
(PackageBody.EndBlock, "end\n"),
# (LinebreakBlock, "\n"),
(PackageBody.EndBlock, "package\n"),
# (LinebreakBlock, "\n"),
(PackageBody.EndBlock, "body\n"),
# (LinebreakBlock, "\n"),
(PackageBody.EndBlock, "p\n"),
# (LinebreakBlock, "\n"),
(PackageBody.EndBlock, ";"),
(LinebreakBlock, "\n"),
(EndOfDocumentBlock, None)
]
)
class SimplePackageBody_MultiLine_LongForm_WithSingleGeneric(TestCase, ExpectedDataMixin, TokenLinking, TokenSequence, BlockSequenceWithParserError):
code = dedent("""\
package body p is
generic (
G : integer
);
end package body p;
""")
tokenStream = ExpectedTokenStream(
[ (StartOfDocumentToken, None),
(WordToken, "package"),
(SpaceToken, " "),
(WordToken, "body"),
(SpaceToken, " "),
(WordToken, "p"),
(SpaceToken, " "),
(WordToken, "is"),
(LinebreakToken, None),
(IndentationToken, "\t"),
(WordToken, "generic"),
(SpaceToken, " "),
(CharacterToken, "("),
(LinebreakToken, None),
(IndentationToken, "\t\t"),
(WordToken, "G"),
(SpaceToken, " "),
(CharacterToken, ":"),
(SpaceToken, " "),
(WordToken, "integer"),
(LinebreakToken, None),
(IndentationToken, "\t"),
(CharacterToken, ")"),
(CharacterToken, ";"),
(LinebreakToken, None),
(WordToken, "end"),
(SpaceToken, " "),
(WordToken, "package"),
(SpaceToken, " "),
(WordToken, "body"),
(SpaceToken, " "),
(WordToken, "p"),
(CharacterToken, ";"),
(LinebreakToken, None),
(EndOfDocumentToken, None)
]
)
blockStream = ExpectedBlockStream(
[ (StartOfDocumentBlock, None),
(PackageBody.NameBlock, "package body p is"),
(LinebreakBlock, "\n"),
(IndentationBlock, "\t"),
(GenericList.OpenBlock, "generic ("),
(LinebreakBlock, "\n"),
(IndentationBlock, "\t\t"),
(GenericList.GenericListInterfaceConstantBlock, "G : integer"),
(LinebreakBlock, "\n"),
(GenericList.GenericListInterfaceConstantBlock, "\t"),
(GenericList.CloseBlock, ");"),
(LinebreakBlock, "\n"),
(PackageBody.EndBlock, "end package body p;"),
(LinebreakBlock, "\n"),
(EndOfDocumentBlock, None)
]
)
|
py | b402b92968f4a64c7a452b6d0ecd1ceadfe853b1 | import numpy as np
from time import time
import random
import os
from sklearn.metrics import roc_auc_score
import torch
from torch import log
from torch import nn, optim
import world
from dataloader import BasicDataset
try:
from cppimport import imp_from_filepath
from os.path import join, dirname
path = join(dirname(__file__), "./sources/sampling.cpp")
sampling = imp_from_filepath(path)
sampling.seed(world.seed)
sample_ext = True
except:
print("Cpp extension not loaded")
sample_ext = False
class BPRLoss:
def __init__(self,
model):
self.model = model
self.weight_decay = world.decay
self.lr = world.lr
self.opt = optim.Adam(self.model.parameters(), lr=self.lr)
def stageOne(self, epoch, users, pos, neg, gum_temp, hard):
loss, reg_loss = self.model.bpr_loss(epoch, users, pos, neg, gum_temp, 1, hard)
reg_loss = reg_loss*self.weight_decay
loss = loss + reg_loss
self.opt.zero_grad()
loss.backward()
self.opt.step()
return loss.cpu().item()
def UniformSample_original(dataset, neg_ratio = 1):
dataset : BasicDataset
allPos = dataset.allPos
start = time()
if sample_ext:
S = sampling.sample_negative(dataset.n_users, dataset.m_items,
dataset.trainDataSize, allPos, neg_ratio)
else:
S = UniformSample_original_python(dataset)
return S
def UniformSample_original_python(dataset):
"""
the original impliment of BPR Sampling in LightGCN
:return:
np.array
"""
total_start = time()
dataset : BasicDataset
user_num = dataset.trainDataSize
users = np.random.randint(0, dataset.n_users, user_num)
allPos = dataset.allPos
S = []
sample_time1 = 0.
sample_time2 = 0.
for i, user in enumerate(users):
start = time()
posForUser = allPos[user]
if len(posForUser) == 0:
continue
sample_time2 += time() - start
posindex = np.random.randint(0, len(posForUser))
positem = posForUser[posindex]
while True:
negitem = np.random.randint(0, dataset.m_items)
if negitem in posForUser:
continue
else:
break
S.append([user, positem, negitem])
end = time()
sample_time1 += end - start
total = time() - total_start
return np.array(S)
# ===================end samplers==========================
# =====================utils====================================
def set_seed(seed):
np.random.seed(seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
torch.manual_seed(seed)
def minibatch(*tensors, **kwargs):
batch_size = kwargs.get('batch_size', world.bpr_batch_size)
if len(tensors) == 1:
tensor = tensors[0]
for i in range(0, len(tensor), batch_size):
yield tensor[i:i + batch_size]
else:
for i in range(0, len(tensors[0]), batch_size):
yield tuple(x[i:i + batch_size] for x in tensors)
def shuffle(*arrays, **kwargs):
require_indices = kwargs.get('indices', False)
if len(set(len(x) for x in arrays)) != 1:
raise ValueError('All inputs to shuffle must have '
'the same length.')
shuffle_indices = np.arange(len(arrays[0]))
np.random.shuffle(shuffle_indices)
if len(arrays) == 1:
result = arrays[0][shuffle_indices]
else:
result = tuple(x[shuffle_indices] for x in arrays)
if require_indices:
return result, shuffle_indices
else:
return result
class timer:
"""
Time context manager for code block
with timer():
do something
timer.get()
"""
from time import time
TAPE = [-1] # global time record
NAMED_TAPE = {}
@staticmethod
def get():
if len(timer.TAPE) > 1:
return timer.TAPE.pop()
else:
return -1
@staticmethod
def dict(select_keys=None):
hint = "|"
if select_keys is None:
for key, value in timer.NAMED_TAPE.items():
hint = hint + f"{key}:{value:.2f}|"
else:
for key in select_keys:
value = timer.NAMED_TAPE[key]
hint = hint + f"{key}:{value:.2f}|"
return hint
@staticmethod
def zero(select_keys=None):
if select_keys is None:
for key, value in timer.NAMED_TAPE.items():
timer.NAMED_TAPE[key] = 0
else:
for key in select_keys:
timer.NAMED_TAPE[key] = 0
def __init__(self, tape=None, **kwargs):
if kwargs.get('name'):
timer.NAMED_TAPE[kwargs['name']] = timer.NAMED_TAPE[
kwargs['name']] if timer.NAMED_TAPE.get(kwargs['name']) else 0.
self.named = kwargs['name']
if kwargs.get("group"):
#TODO: add group function
pass
else:
self.named = False
self.tape = tape or timer.TAPE
def __enter__(self):
self.start = timer.time()
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.named:
timer.NAMED_TAPE[self.named] += timer.time() - self.start
else:
self.tape.append(timer.time() - self.start)
# ====================Metrics==============================
# =========================================================
def RecallPrecision_ATk(test_data, r, k):
"""
test_data should be a list? cause users may have different amount of pos items. shape (test_batch, k)
pred_data : shape (test_batch, k) NOTE: pred_data should be pre-sorted
k : top-k
"""
right_pred = r[:, :k].sum(1)
precis_n = k
recall_n = np.array([len(test_data[i]) for i in range(len(test_data))])
recall = np.sum(right_pred/recall_n)
precis = np.sum(right_pred)/precis_n
return {'recall': recall, 'precision': precis}
def MRRatK_r(r, k):
"""
Mean Reciprocal Rank
"""
pred_data = r[:, :k]
scores = np.log2(1./np.arange(1, k+1))
pred_data = pred_data/scores
pred_data = pred_data.sum(1)
return np.sum(pred_data)
def NDCGatK_r(test_data,r,k):
"""
Normalized Discounted Cumulative Gain
rel_i = 1 or 0, so 2^{rel_i} - 1 = 1 or 0
"""
assert len(r) == len(test_data)
pred_data = r[:, :k]
test_matrix = np.zeros((len(pred_data), k))
for i, items in enumerate(test_data):
length = k if k <= len(items) else len(items)
test_matrix[i, :length] = 1
max_r = test_matrix
idcg = np.sum(max_r * 1./np.log2(np.arange(2, k + 2)), axis=1)
dcg = pred_data*(1./np.log2(np.arange(2, k + 2)))
dcg = np.sum(dcg, axis=1)
idcg[idcg == 0.] = 1.
ndcg = dcg/idcg
ndcg[np.isnan(ndcg)] = 0.
return np.sum(ndcg)
def AUC(all_item_scores, dataset, test_data):
"""
design for a single user
"""
dataset : BasicDataset
r_all = np.zeros((dataset.m_items, ))
r_all[test_data] = 1
r = r_all[all_item_scores >= 0]
test_item_scores = all_item_scores[all_item_scores >= 0]
return roc_auc_score(r, test_item_scores)
def getLabel(test_data, pred_data):
r = []
for i in range(len(test_data)):
groundTrue = test_data[i]
predictTopK = pred_data[i]
pred = list(map(lambda x: x in groundTrue, predictTopK))
pred = np.array(pred).astype("float")
r.append(pred)
return np.array(r).astype('float')
# ====================end Metrics=============================
# =========================================================
|
py | b402b9de277b36f0d621122c8abf7f1e6a037a8d | import HydrusConstants as HC
import HydrusSerialisable
# Service object that has bandwidth tracking and rules
# file repos also have max storage
|
py | b402b9fed912128f320e52e9b0afe80d2b42691e | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack.package import *
class Cppzmq(CMakePackage):
"""C++ binding for 0MQ"""
homepage = "https://www.zeromq.org"
url = "https://github.com/zeromq/cppzmq/archive/v4.2.2.tar.gz"
git = "https://github.com/zeromq/cppzmq.git"
version('master', branch='master')
version('4.7.1', sha256='9853e0437d834cbed5d3c223bf1d755cadee70e7c964c6e42c4c6783dee5d02c')
version('4.6.0', sha256='e9203391a0b913576153a2ad22a2dc1479b1ec325beb6c46a3237c669aef5a52')
version('4.5.0', sha256='64eb4e58eaf0c77505391c6c9a606cffcb57c6086f3431567a1ef4a25b01fa36')
version('4.4.1', sha256='117fc1ca24d98dbe1a60c072cde13be863d429134907797f8e03f654ce679385')
version('4.4.0', sha256='118b9ff117f07d1aabadfb905d7227362049d7940d16b7863b3dd3cebd28be85')
version('4.3.0', sha256='27d1f56406ba94ee779e639203218820975cf68174f92fbeae0f645df0fcada4')
version('4.2.3', sha256='3e6b57bf49115f4ae893b1ff7848ead7267013087dc7be1ab27636a97144d373')
version('4.2.2', sha256='3ef50070ac5877c06c6bb25091028465020e181bbfd08f110294ed6bc419737d')
variant("drafts", default=False,
description="Build and install draft classes and methods")
depends_on('[email protected]:', type='build')
depends_on('libzmq')
depends_on('[email protected]', when='@4.2.2:4.2.3')
depends_on('libzmq+drafts', when='+drafts')
def cmake_args(self):
args = []
args.append(self.define_from_variant("ENABLE_DRAFTS", "drafts"))
# https://github.com/zeromq/cppzmq/issues/422
# https://github.com/zeromq/cppzmq/pull/288
args.append('-DCPPZMQ_BUILD_TESTS=OFF')
return args
|
py | b402baf6c066c3ff354e0014abfb0669bf7aee71 | # -*- coding: utf-8 -*- {{{
# vim: set fenc=utf-8 ft=python sw=4 ts=4 sts=4 et:
# Copyright (c) 2013, Battelle Memorial Institute
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in
# the documentation and/or other materials provided with the
# distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation
# are those of the authors and should not be interpreted as representing
# official policies, either expressed or implied, of the FreeBSD
# Project.
#
# This material was prepared as an account of work sponsored by an
# agency of the United States Government. Neither the United States
# Government nor the United States Department of Energy, nor Battelle,
# nor any of their employees, nor any jurisdiction or organization that
# has cooperated in the development of these materials, makes any
# warranty, express or implied, or assumes any legal liability or
# responsibility for the accuracy, completeness, or usefulness or any
# information, apparatus, product, software, or process disclosed, or
# represents that its use would not infringe privately owned rights.
#
# Reference herein to any specific commercial product, process, or
# service by trade name, trademark, manufacturer, or otherwise does not
# necessarily constitute or imply its endorsement, recommendation, or
# favoring by the United States Government or any agency thereof, or
# Battelle Memorial Institute. The views and opinions of authors
# expressed herein do not necessarily state or reflect those of the
# United States Government or any agency thereof.
#
# PACIFIC NORTHWEST NATIONAL LABORATORY
# operated by BATTELLE for the UNITED STATES DEPARTMENT OF ENERGY
# under Contract DE-AC05-76RL01830
#}}}
import datetime
import sys
import time
from volttron.lite.agent import BaseAgent, PublishMixin, periodic
from volttron.lite.agent import utils, matching, sched
from volttron.lite.agent.utils import jsonapi
from volttron.lite.messaging import headers as headers_mod
from volttron.lite.messaging import topics
import settings
def DemandResponseAgent(config_path, **kwargs):
config = utils.load_config(config_path)
def get_config(name):
try:
csp = kwargs.pop(name)
except KeyError:
return config[name]
agent_id = get_config('agentid')
rtu_path = {
'campus': get_config('campus'),
'building': get_config('building'),
'unit': get_config('unit'),
}
class Agent(PublishMixin, BaseAgent):
def setup(self):
super(Agent, self).setup()
headers = {
'Content-Type': 'text/plain',
'requesterID': agent_id,
}
#DT=datetime.datetime.now().replace(hour=0,minute=0,second=0, microsecond=0)
#signal=settings.signal
#self.schedule(DT,sched.Event(self.check_signal,[signal]))
self.start_timer=self.periodic_timer(10,self.get_signal)
@matching.match_exact(topics.RTU_VALUE(point='MixedAirTemperature', **rtu_path))
def on_new_data(self, topic, headers, message, match):
data = jsonapi.loads(message[0])
mixed_air_temperature=data
print(mixed_air_temperature)
def __init__(self, **kwargs):
super(Agent,self).__init__(**kwargs)
self.after_timer = None
def pre_cpp_timer(self,csp_normal):
print("Pre-cooling for CPP Event") #pre-cool change cooling set point
self.pre_timer = self.periodic_timer(5, self.pre_cpp_cooling,{'csp':settings.csp_norm})
def pre_cpp_cooling(self,csp):
if csp['csp']> settings.csp_pre:
csp['csp']=csp['csp']-1
print(csp)
elif csp['csp']<=settings.csp_pre:
csp['csp']=settings.csp_pre
self.pre_timer.cancel()
print(csp)
def accelerated_pre_cooling_timer(self,pre_slope, csp):
print("Accelerated pre-cooling for CPP Event")
self.pre_timer = self.periodic_timer(5, self.accelerated_pre_cooling,pre_slope,{'csp':csp})
def accelerated_pre_cooling(self,pre_slope,csp):
if csp['csp']> settings.csp_pre:
csp['csp']=csp['csp']-1*pre_slope
print(csp)
elif csp['csp']<=settings.csp_pre:
csp['csp']=settings.csp_pre
print(csp)
self.pre_timer.cancel()
def during_cpp(self):
print("During CPP Event")
def after_cpp_timer(self,csp_normal):
#Pull current cooling setpoint from controller CSP
#CSP= PULL FROM CONTROLLER (GET NEW DATA)
print(csp_normal)
print("After CPP Event, returning to normal operations")
self.after_timer = self.periodic_timer(3, self.after_cpp_cooling, csp_normal,{'csp':80})
#set cooling setpoint down by 1 degree every 30 minutes until it reaches normal
def after_cpp_cooling(self,csp_normal,csp):
print("After_CPP_COOLING")
if csp['csp'] > csp_normal:
csp['csp']=csp['csp']-1
print(csp)
print(datetime.datetime.now())
elif csp['csp'] <= csp_normal:
self.after_timer.cancel()
csp['csp']=csp_normal
print(csp)
self.setup()
def get_signal(self):
#Pull signal from source
time_now=time.mktime(datetime.datetime.now().timetuple())
time_pre=time.mktime(datetime.datetime.now().replace(hour=settings.pre_cpp_hour,minute=0,second=0, microsecond=0).timetuple())
time_event=time.mktime(datetime.datetime.now().replace(hour=settings.during_cpp_hour,minute=51,second=0, microsecond=0).timetuple())
time_after=time.mktime(datetime.datetime.now().replace(hour=settings.after_cpp_hour,minute=54,second=0, microsecond=0).timetuple())
print(time_now)
print(time_event)
#PULL NORMAL COOLING SETPOINT
csp_normal=settings.csp_norm
if (settings.signal and time_now<time_pre):
print ("Scheduling")
pre_cpp_time=datetime.datetime.now().replace(hour=settings.pre_cpp_hour,minute=25,second=10, microsecond=0)
self.schedule(pre_cpp_time,sched.Event(self.pre_cpp_timer, (csp_normal,)))
during_cpp_time=datetime.datetime.now().replace(hour=settings.during_cpp_hour,minute=26,second=20, microsecond=0)
self.schedule(during_cpp_time,sched.Event(self.during_cpp))
after_cpp_time=datetime.datetime.now().replace(hour=settings.after_cpp_hour,minute=27,second=30, microsecond=0)
self.schedule(after_cpp_time,sched.Event(self.after_cpp_timer, (csp_normal,)))
self.start_timer.cancel()
elif(settings.signal and time_now>time_pre and time_now<time_event):
print("Scheduling")
self.start_timer.cancel()
pre_slope=(time_event-time_now)/(3600)
during_cpp_time=datetime.datetime.now().replace(hour=settings.during_cpp_hour,minute=46,second=20, microsecond=0)
self.schedule(during_cpp_time,sched.Event(self.during_cpp))
after_cpp_time=datetime.datetime.now().replace(hour=settings.after_cpp_hour,minute=47,second=10, microsecond=0)
self.schedule(after_cpp_time,sched.Event(self.after_cpp_timer, (csp_normal,)))
self.accelerated_pre_cooling_timer(pre_slope,csp_normal)
elif(settings.signal and time_now>time_event and time_now<time_after):
print("Too late to pre-cool!")
self.start_timer.cancel()
after_cpp_time=datetime.datetime.now().replace(hour=settings.after_cpp_hour,minute=54,second=10, microsecond=0)
self.schedule(after_cpp_time,sched.Event(self.after_cpp_timer, (csp_normal,)))
self.during_cpp()
print("CPP Event Missed")
self.setup()
Agent.__name__ = 'DemandResponseAgent'
return Agent(**kwargs)
def main(argv=sys.argv):
'''Main method called by the eggsecutable.'''
utils.default_main(DemandResponseAgent,
description = 'VOLTTRON Lite grid response agent',
argv=argv)
if __name__ == '__main__':
# Entry point for script
try:
sys.exit(main())
except KeyboardInterrupt:
pass
|
py | b402bb21c1beba515548577712288774734d9612 | # Standard library imports
import json
import logging
# Third party imports
from kafka import KafkaProducer
# Local application imports
from Config.config import KafkaTopicNames, KafkaConfig
from . import kafkaProducer
class CVESeachScanProducers():
def __init__(self):
self.logger = logging.getLogger(__name__)
def sendDataToQueue(self, data):
kafkaProducer.send(KafkaTopicNames.CVESEARCHSCAN, value = data)
kafkaProducer.flush()
self.logger.info('Produce {}_{}'.format(data.get("target"), data.get("hostname"))) |
py | b402bb4f2a184e187bf5f438d6438604d3b2c5e0 | from mpegdash.utils import (
parse_attr_value, parse_child_nodes, parse_node_value,
write_attr_value, write_child_node, write_node_value
)
class XMLNode(object):
def parse(self, xmlnode):
raise NotImplementedError('Should have implemented this')
def write(self, xmlnode):
raise NotImplementedError('Should have implemented this')
class Subset(XMLNode):
def __init__(self):
self.id = None # xs:string
self.contains = [] # UIntVectorType (required)
def parse(self, xmlnode):
self.id = parse_attr_value(xmlnode, 'id', str)
self.contains = parse_attr_value(xmlnode, 'contains', [int])
def write(self, xmlnode):
write_attr_value(xmlnode, 'id', self.id)
write_attr_value(xmlnode, 'contains', self.contains)
class URL(XMLNode):
def __init__(self):
self.source_url = None # xs:anyURI
self.range = None # xs:string
def parse(self, xmlnode):
self.source_url = parse_attr_value(xmlnode, 'sourceURL', str)
self.range = parse_attr_value(xmlnode, 'range', str)
def write(self, xmlnode):
write_attr_value(xmlnode, 'sourceURL', self.source_url)
write_attr_value(xmlnode, 'range', self.range)
class BaseURL(XMLNode):
def __init__(self):
self.base_url_value = None # xs:anyURI
self.service_location = None # xs:string
self.byte_range = None # xs:string
self.availability_time_offset = None # xs:double
self.availability_time_complete = None # xs:boolean
def parse(self, xmlnode):
self.base_url_value = parse_node_value(xmlnode, str)
self.service_location = parse_attr_value(xmlnode, 'serviceLocation', str)
self.byte_range = parse_attr_value(xmlnode, 'byteRange', str)
self.availability_time_offset = parse_attr_value(xmlnode, 'availabilityTimeOffset', float)
self.availability_time_complete = parse_attr_value(xmlnode, 'availabilityTimeComplete', bool)
def write(self, xmlnode):
write_node_value(xmlnode, self.base_url_value)
write_attr_value(xmlnode, 'serviceLocation', self.service_location)
write_attr_value(xmlnode, 'byteRange', self.byte_range)
write_attr_value(xmlnode, 'availabilityTimeOffset', self.availability_time_offset)
write_attr_value(xmlnode, 'availabilityTimeComplete', self.availability_time_complete)
class XsStringElement(XMLNode):
def __init__(self):
self.text = None
def parse(self, xmlnode):
self.text = parse_node_value(xmlnode, str)
def write(self, xmlnode):
write_node_value(xmlnode, self.text)
class ProgramInformation(XMLNode):
def __init__(self):
self.lang = None # xs:language
self.more_information_url = None # xs:anyURI
self.titles = None # xs:string*
self.sources = None # xs:string*
self.copyrights = None # xs:string*
def parse(self, xmlnode):
self.lang = parse_attr_value(xmlnode, 'lang', str)
self.more_information_url = parse_attr_value(xmlnode, 'moreInformationURL', str)
self.titles = parse_child_nodes(xmlnode, 'Title', XsStringElement)
self.sources = parse_child_nodes(xmlnode, 'Source', XsStringElement)
self.copyrights = parse_child_nodes(xmlnode, 'Copyright', XsStringElement)
def write(self, xmlnode):
write_attr_value(xmlnode, 'lang', self.lang)
write_attr_value(xmlnode, 'moreInformationURL', self.more_information_url)
write_child_node(xmlnode, 'Title', self.titles)
write_child_node(xmlnode, 'Source', self.sources)
write_child_node(xmlnode, 'Copyright', self.copyrights)
class Metrics(XMLNode):
def __init__(self):
self.metrics = '' # xs:string (required)
self.reportings = None # DescriptorType*
self.ranges = None # RangeType*
def parse(self, xmlnode):
self.metrics = parse_attr_value(xmlnode, 'metrics', str)
self.reportings = parse_child_nodes(xmlnode, 'Reporting', Descriptor)
self.ranges = parse_child_nodes(xmlnode, 'Range', Range)
def write(self, xmlnode):
write_attr_value(xmlnode, 'metrics', self.metrics)
write_child_node(xmlnode, 'Reporting', self.reportings)
write_child_node(xmlnode, 'Range', self.ranges)
class Range(XMLNode):
def __init__(self):
self.starttime = None # xs:duration
self.duration = None # xs:duration
def parse(self, xmlnode):
self.starttime = parse_attr_value(xmlnode, 'starttime', str)
self.duration = parse_attr_value(xmlnode, 'duration', str)
def write(self, xmlnode):
write_attr_value(xmlnode, 'starttime', self.starttime)
write_attr_value(xmlnode, 'duration', self.duration)
class SegmentURL(XMLNode):
def __init__(self):
self.media = None # xs:anyURI
self.media_range = None # xs:string
self.index = None # xs:anyURI
self.index_range = None # xs:string
def parse(self, xmlnode):
self.media = parse_attr_value(xmlnode, 'media', str)
self.media_range = parse_attr_value(xmlnode, 'mediaRange', str)
self.index = parse_attr_value(xmlnode, 'index', str)
self.index_range = parse_attr_value(xmlnode, 'indexRange', str)
def write(self, xmlnode):
write_attr_value(xmlnode, 'media', self.media)
write_attr_value(xmlnode, 'mediaRange', self.media_range)
write_attr_value(xmlnode, 'index', self.index)
write_attr_value(xmlnode, 'indexRange', self.index_range)
class S(XMLNode):
def __init__(self):
self.t = None # xs:unsignedLong
self.d = 0 # xs:unsignedLong (required)
self.r = None # xml:integer
def parse(self, xmlnode):
self.t = parse_attr_value(xmlnode, 't', int)
self.d = parse_attr_value(xmlnode, 'd', int)
self.r = parse_attr_value(xmlnode, 'r', int)
def write(self, xmlnode):
write_attr_value(xmlnode, 't', self.t)
write_attr_value(xmlnode, 'd', self.d)
write_attr_value(xmlnode, 'r', self.r)
class SegmentTimeline(XMLNode):
def __init__(self):
self.Ss = None # xs:complexType+
def parse(self, xmlnode):
self.Ss = parse_child_nodes(xmlnode, 'S', S)
def write(self, xmlnode):
write_child_node(xmlnode, 'S', self.Ss)
class SegmentBase(XMLNode):
def __init__(self):
self.timescale = None # xs:unsignedInt
self.index_range = None # xs:string
self.index_range_exact = None # xs:boolean
self.presentation_time_offset = None # xs:unsignedLong
self.availability_time_offset = None # xs:double
self.availability_time_complete = None # xs:boolean
self.initializations = None # URLType*
self.representation_indexes = None # URLType*
def parse(self, xmlnode):
self.timescale = parse_attr_value(xmlnode, 'timescale', int)
self.index_range = parse_attr_value(xmlnode, 'indexRange', str)
self.index_range_exact = parse_attr_value(xmlnode, 'indexRangeExact', bool)
self.presentation_time_offset = parse_attr_value(xmlnode, 'presentationTimeOffset', int)
self.availability_time_offset = parse_attr_value(xmlnode, 'availabilityTimeOffset', float)
self.availability_time_complete = parse_attr_value(xmlnode, 'availabilityTimeComplete', bool)
self.initializations = parse_child_nodes(xmlnode, 'Initialization', URL)
self.representation_indexes = parse_child_nodes(xmlnode, 'RepresentationIndex', URL)
def write(self, xmlnode):
write_attr_value(xmlnode, 'timescale', self.timescale)
write_attr_value(xmlnode, 'indexRange', self.index_range)
write_attr_value(xmlnode, 'indexRangeExact', self.index_range_exact)
write_attr_value(xmlnode, 'presentationTimeOffset', self.presentation_time_offset)
write_attr_value(xmlnode, 'availabilityTimeOffset', self.availability_time_offset)
write_attr_value(xmlnode, 'availabilityTimeComplete', self.availability_time_complete)
write_child_node(xmlnode, 'Initialization', self.initializations)
write_child_node(xmlnode, 'RepresentationIndex', self.representation_indexes)
class MultipleSegmentBase(SegmentBase):
def __init__(self):
SegmentBase.__init__(self)
self.duration = None # xs:unsignedInt
self.start_number = None # xs:unsignedInt
self.segment_timelines = None # SegmentTimelineType*
self.bitstream_switchings = None # URLType*
def parse(self, xmlnode):
SegmentBase.parse(self, xmlnode)
self.duration = parse_attr_value(xmlnode, 'duration', int)
self.start_number = parse_attr_value(xmlnode, 'startNumber', int)
self.segment_timelines = parse_child_nodes(xmlnode, 'SegmentTimeline', SegmentTimeline)
self.bitstream_switchings = parse_child_nodes(xmlnode, 'BitstreamSwitching', URL)
def write(self, xmlnode):
SegmentBase.write(self, xmlnode)
write_attr_value(xmlnode, 'duration', self.duration)
write_attr_value(xmlnode, 'startNumber', self.start_number)
write_child_node(xmlnode, 'SegmentTimeline', self.segment_timelines)
write_child_node(xmlnode, 'BitstreamSwitching', self.bitstream_switchings)
class SegmentTemplate(MultipleSegmentBase):
def __init__(self):
MultipleSegmentBase.__init__(self)
self.media = None # xs:string
self.index = None # xs:string
self.initialization = None # xs:string
self.bitstream_switching = None # xs:string
def parse(self, xmlnode):
MultipleSegmentBase.parse(self, xmlnode)
self.media = parse_attr_value(xmlnode, 'media', str)
self.index = parse_attr_value(xmlnode, 'index', str)
self.initialization = parse_attr_value(xmlnode, 'initialization', str)
self.bitstream_switching = parse_attr_value(xmlnode, 'bitstreamSwitching', str)
def write(self, xmlnode):
MultipleSegmentBase.write(self, xmlnode)
write_attr_value(xmlnode, 'media', self.media)
write_attr_value(xmlnode, 'index', self.index)
write_attr_value(xmlnode, 'initialization', self.initialization)
write_attr_value(xmlnode, 'bitstreamSwitching', self.bitstream_switching)
class SegmentList(MultipleSegmentBase):
def __init__(self):
MultipleSegmentBase.__init__(self)
self.segment_urls = None # SegmentURLType
def parse(self, xmlnode):
MultipleSegmentBase.parse(self, xmlnode)
self.segment_urls = parse_child_nodes(xmlnode, 'SegmentURL', SegmentURL)
def write(self, xmlnode):
MultipleSegmentBase.write(self, xmlnode)
write_child_node(xmlnode, 'SegmentURL', self.segment_urls)
class Event(XMLNode):
def __init__(self):
self.event_value = None # xs:string
self.message_data = None # xs:string
self.presentation_time = None # xs:unsignedLong
self.duration = None # xs:unsignedLong
self.id = None # xs:unsignedInt
def parse(self, xmlnode):
self.event_value = parse_node_value(xmlnode, str)
self.message_data = parse_attr_value(xmlnode, 'messageData', str)
self.presentation_time = parse_attr_value(xmlnode, 'presentationTime', int)
self.duration = parse_attr_value(xmlnode, 'duration', int)
self.id = parse_attr_value(xmlnode, 'id', int)
def write(self, xmlnode):
write_node_value(xmlnode, self.event_value)
write_attr_value(xmlnode, 'messageData', self.message_data)
write_attr_value(xmlnode, 'presentationTime', self.presentation_time)
write_attr_value(xmlnode, 'duration', self.duration)
write_attr_value(xmlnode, 'id', self.id)
class Descriptor(XMLNode):
def __init__(self):
self.scheme_id_uri = '' # xs:anyURI (required)
self.value = None # xs:string
self.id = None # xs:string
def parse(self, xmlnode):
self.scheme_id_uri = parse_attr_value(xmlnode, 'schemeIdUri', str)
self.value = parse_attr_value(xmlnode, 'value', str)
self.id = parse_attr_value(xmlnode, 'id', str)
def write(self, xmlnode):
write_attr_value(xmlnode, 'schemeIdUri', self.scheme_id_uri)
write_attr_value(xmlnode, 'value', self.value)
write_attr_value(xmlnode, 'id', self.id)
class PSSH(XMLNode):
def __init__(self):
self.pssh = None
def parse(self, xmlnode):
self.pssh = parse_node_value(xmlnode, str)
def write(self, xmlnode):
write_node_value(xmlnode, self.pssh)
class ContentProtection(XMLNode):
def __init__(self):
self.scheme_id_uri = "" # xs:anyURI (required)
self.value = None # xs:string
self.id = None # xs:string
self.pssh = None # PSSH
self.default_key_id = None # xs:string
self.ns2_key_id = None # xs:string
self.cenc_default_kid = None # xs:string
def parse(self, xmlnode):
self.scheme_id_uri = parse_attr_value(xmlnode, "schemeIdUri", str)
self.value = parse_attr_value(xmlnode, "value", str)
self.id = parse_attr_value(xmlnode, "id", str)
self.default_key_id = parse_attr_value(xmlnode, "default_KID", str)
self.ns2_key_id = parse_attr_value(xmlnode, "ns2:default_KID", str)
self.cenc_default_kid = parse_attr_value(xmlnode, "cenc:default_KID", str)
self.pssh = parse_child_nodes(xmlnode, "cenc:pssh", PSSH)
def write(self, xmlnode):
write_attr_value(xmlnode, "schemeIdUri", self.scheme_id_uri)
write_attr_value(xmlnode, "value", self.value)
write_attr_value(xmlnode, "id", self.id)
write_attr_value(xmlnode, "default_KID", self.default_key_id)
write_attr_value(xmlnode, "ns2:default_KID", self.ns2_key_id)
write_attr_value(xmlnode, "cenc:default_KID", self.cenc_default_kid)
write_child_node(xmlnode, "cenc:pssh", self.pssh)
class ContentComponent(XMLNode):
def __init__(self):
self.id = None # xs:unsigendInt
self.lang = None # xs:language
self.content_type = None # xs:string
self.par = None # RatioType
self.accessibilities = None # DescriptorType*
self.roles = None # DescriptorType*
self.ratings = None # DescriptorType*
self.viewpoints = None # DescriptorType*
def parse(self, xmlnode):
self.id = parse_attr_value(xmlnode, 'id', int)
self.lang = parse_attr_value(xmlnode, 'lang', str)
self.content_type = parse_attr_value(xmlnode, 'contentType', str)
self.par = parse_attr_value(xmlnode, 'par', str)
self.accessibilities = parse_child_nodes(xmlnode, 'Accessibility', Descriptor)
self.roles = parse_child_nodes(xmlnode, 'Role', Descriptor)
self.ratings = parse_child_nodes(xmlnode, 'Rating', Descriptor)
self.viewpoints = parse_child_nodes(xmlnode, 'Viewpoint', Descriptor)
def write(self, xmlnode):
write_attr_value(xmlnode, 'id', self.id)
write_attr_value(xmlnode, 'lang', self.lang)
write_attr_value(xmlnode, 'contentType', self.content_type)
write_attr_value(xmlnode, 'par', self.par)
write_child_node(xmlnode, 'Accessibility', self.accessibilities)
write_child_node(xmlnode, 'Role', self.roles)
write_child_node(xmlnode, 'Rating', self.ratings)
write_child_node(xmlnode, 'Viewpoint', self.viewpoints)
class RepresentationBase(XMLNode):
def __init__(self):
self.profile = None # xs:string
self.profiles = None # xs:string
self.width = None # xs:unsigendInt
self.height = None # xs:unsigendInt
self.sar = None # RatioType
self.frame_rate = None # FrameRateType
self.audio_sampling_rate = None # xs:string
self.mime_type = None # xs:string
self.segment_profiles = None # xs:string
self.codecs = None # xs:string
self.maximum_sap_period = None # xs:double
self.start_with_sap = None # SAPType
self.max_playout_rate = None # xs:double
self.coding_dependency = None # xs:boolean
self.scan_type = None # VideoScanType
self.frame_packings = None # DescriptorType*
self.audio_channel_configurations = None # DescriptorType*
self.content_protections = None # ContentProtection*
self.essential_properties = None # DescriptorType*
self.supplemental_properties = None # DescriptorType*
self.inband_event_streams = None # DescriptorType*
def parse(self, xmlnode):
self.profile = parse_attr_value(xmlnode, 'profile', str)
self.profiles = parse_attr_value(xmlnode, 'profiles', str)
self.width = parse_attr_value(xmlnode, 'width', int)
self.height = parse_attr_value(xmlnode, 'height', int)
self.sar = parse_attr_value(xmlnode, 'sar', str)
self.frame_rate = parse_attr_value(xmlnode, 'frameRate', str)
self.audio_sampling_rate = parse_attr_value(xmlnode, 'audioSamplingRate', str)
self.mime_type = parse_attr_value(xmlnode, 'mimeType', str)
self.segment_profiles = parse_attr_value(xmlnode, 'segmentProfiles', str)
self.codecs = parse_attr_value(xmlnode, 'codecs', str)
self.maximum_sap_period = parse_attr_value(xmlnode, 'maximumSAPPeriod', float)
self.start_with_sap = parse_attr_value(xmlnode, 'startWithSAP', int)
self.max_playout_rate = parse_attr_value(xmlnode, 'maxPlayoutRate', float)
self.coding_dependency = parse_attr_value(xmlnode, 'codingDependency', bool)
self.scan_type = parse_attr_value(xmlnode, 'scanType', str)
self.frame_packings = parse_child_nodes(xmlnode, 'FramePacking', Descriptor)
self.audio_channel_configurations = parse_child_nodes(xmlnode, 'AudioChannelConfiguration', Descriptor)
self.content_protections = parse_child_nodes(xmlnode, 'ContentProtection', ContentProtection)
self.essential_properties = parse_child_nodes(xmlnode, 'EssentialProperty', Descriptor)
self.supplemental_properties = parse_child_nodes(xmlnode, 'SupplementalProperty', Descriptor)
self.inband_event_streams = parse_child_nodes(xmlnode, 'InbandEventStream', Descriptor)
def write(self, xmlnode):
write_attr_value(xmlnode, 'profile', self.profile)
write_attr_value(xmlnode, 'profiles', self.profiles)
write_attr_value(xmlnode, 'width', self.width)
write_attr_value(xmlnode, 'height', self.height)
write_attr_value(xmlnode, 'sar', self.sar)
write_attr_value(xmlnode, 'frameRate', self.frame_rate)
write_attr_value(xmlnode, 'audioSamplingRate', self.audio_sampling_rate)
write_attr_value(xmlnode, 'mimeType', self.mime_type)
write_attr_value(xmlnode, 'segmentProfiles', self.segment_profiles)
write_attr_value(xmlnode, 'codecs', self.codecs)
write_attr_value(xmlnode, 'maximumSAPPeriod', self.maximum_sap_period)
write_attr_value(xmlnode, 'startWithSAP', self.start_with_sap)
write_attr_value(xmlnode, 'maxPlayoutRate', self.max_playout_rate)
write_attr_value(xmlnode, 'codingDependency', self.coding_dependency)
write_attr_value(xmlnode, 'scanType', self.scan_type)
write_child_node(xmlnode, 'FramePacking', self.frame_packings)
write_child_node(xmlnode, 'AudioChannelConfiguration', self.audio_channel_configurations)
write_child_node(xmlnode, 'ContentProtection', self.content_protections)
write_child_node(xmlnode, 'EssentialProperty', self.essential_properties)
write_child_node(xmlnode, 'SupplementalProperty', self.supplemental_properties)
write_child_node(xmlnode, 'InbandEventStream', self.inband_event_streams)
class Representation(RepresentationBase):
def __init__(self):
RepresentationBase.__init__(self)
self.id = '' # StringNoWhitespaceType (Required)
self.bandwidth = 0 # xs:unsignedInt (required)
self.quality_ranking = None # xs:unsignedInt
self.dependency_id = None # StringVectorType
self.num_channels = None # xs:unsignedInt
self.sample_rate = None # xs:unsignedLong
self.base_urls = None # BaseURLType*
self.segment_bases = None # SegmentBaseType*
self.segment_lists = None # SegmentListType*
self.segment_templates = None # SegmentTemplateType*
self.sub_representations = None # SubRepresentationType*
def parse(self, xmlnode):
RepresentationBase.parse(self, xmlnode)
self.id = parse_attr_value(xmlnode, 'id', str)
self.bandwidth = parse_attr_value(xmlnode, 'bandwidth', int)
self.quality_ranking = parse_attr_value(xmlnode, 'qualityRanking', int)
self.dependency_id = parse_attr_value(xmlnode, 'dependencyId', [str])
self.num_channels = parse_attr_value(xmlnode, 'numChannels', int)
self.sample_rate = parse_attr_value(xmlnode, 'sampleRate', int)
self.base_urls = parse_child_nodes(xmlnode, 'BaseURL', BaseURL)
self.segment_bases = parse_child_nodes(xmlnode, 'SegmentBase', SegmentBase)
self.segment_lists = parse_child_nodes(xmlnode, 'SegmentList', SegmentList)
self.segment_templates = parse_child_nodes(xmlnode, 'SegmentTemplate', SegmentTemplate)
self.sub_representations = parse_child_nodes(xmlnode, 'SubRepresentation', SubRepresentation)
def write(self, xmlnode):
RepresentationBase.write(self, xmlnode)
write_attr_value(xmlnode, 'id', self.id)
write_attr_value(xmlnode, 'width', self.width)
write_attr_value(xmlnode, 'height', self.height)
write_attr_value(xmlnode, 'bandwidth', self.bandwidth)
write_attr_value(xmlnode, 'mimeType', self.mime_type)
write_attr_value(xmlnode, 'codecs', self.codecs)
write_child_node(xmlnode, 'BaseURL', self.base_urls)
write_child_node(xmlnode, 'SegmentBase', self.segment_bases)
write_child_node(xmlnode, 'SegmentList', self.segment_lists)
write_child_node(xmlnode, 'SegmentTemplate', self.segment_templates)
write_child_node(xmlnode, 'SubRepresentation', self.sub_representations)
class SubRepresentation(RepresentationBase):
def __init__(self):
RepresentationBase.__init__(self)
self.level = None # xs:unsigendInt
self.bandwidth = None # xs:unsignedInt
self.dependency_level = None # UIntVectorType
self.content_component = None # StringVectorType
def parse(self, xmlnode):
RepresentationBase.parse(self, xmlnode)
self.level = parse_attr_value(xmlnode, 'level', int)
self.bandwidth = parse_attr_value(xmlnode, 'bandwidth', int)
self.dependency_level = parse_attr_value(xmlnode, 'dependencyLevel', [int])
self.content_component = parse_attr_value(xmlnode, 'contentComponent', [str])
def write(self, xmlnode):
RepresentationBase.write(self, xmlnode)
write_attr_value(xmlnode, 'level', self.level)
write_attr_value(xmlnode, 'bandwidth', self.bandwidth)
write_attr_value(xmlnode, 'dependencyLevel', self.dependency_level)
write_attr_value(xmlnode, 'contentComponent', self.content_component)
class AdaptationSet(RepresentationBase):
def __init__(self):
RepresentationBase.__init__(self)
self.id = None # xs:unsignedInt
self.group = None # xs:unsignedInt
self.lang = None # xs:language
self.label = None # xs:string
self.content_type = None # xs:string
self.par = None # RatioType
self.min_bandwidth = None # xs:unsignedInt
self.max_bandwidth = None # xs:unsignedInt
self.min_width = None # xs:unsignedInt
self.max_width = None # xs:unsignedInt
self.min_height = None # xs:unsignedInt
self.max_height = None # xs:unsignedInt
self.min_frame_rate = None # FrameRateType
self.max_frame_rate = None # FrameRateType
self.segment_alignment = None # ConditionalUintType
self.selection_priority = None # xs:unsignedInt
self.subsegment_alignment = None # ConditionalUintType
self.subsegment_starts_with_sap = None # SAPType
self.bitstream_switching = None # xs:boolean
self.accessibilities = None # DescriptorType*
self.roles = None # DescriptorType*
self.ratings = None # DescriptorType*
self.viewpoints = None # DescriptorType*
self.content_components = None # DescriptorType*
self.base_urls = None # BaseURLType*
self.segment_bases = None # SegmentBase*
self.segment_lists = None # SegmentListType*
self.segment_templates = None # SegmentTemplateType*
self.representations = None # RepresentationType*
def parse(self, xmlnode):
RepresentationBase.parse(self, xmlnode)
self.id = parse_attr_value(xmlnode, 'id', int)
self.group = parse_attr_value(xmlnode, 'group', int)
self.lang = parse_attr_value(xmlnode, 'lang', str)
self.label = parse_attr_value(xmlnode, 'label', str)
self.content_type = parse_attr_value(xmlnode, 'contentType', str)
self.par = parse_attr_value(xmlnode, 'par', str)
self.min_bandwidth = parse_attr_value(xmlnode, 'minBandwidth', int)
self.max_bandwidth = parse_attr_value(xmlnode, 'maxBandwidth', int)
self.min_width = parse_attr_value(xmlnode, 'minWidth', int)
self.max_width = parse_attr_value(xmlnode, 'maxWidth', int)
self.min_height = parse_attr_value(xmlnode, 'minHeight', int)
self.max_height = parse_attr_value(xmlnode, 'maxHeight', int)
self.min_frame_rate = parse_attr_value(xmlnode, 'minFrameRate', str)
self.max_frame_rate = parse_attr_value(xmlnode, 'maxFrameRate', str)
self.segment_alignment = parse_attr_value(xmlnode, 'segmentAlignment', bool)
self.selection_priority = parse_attr_value(xmlnode, 'selectionPriority', int)
self.subsegment_alignment = parse_attr_value(xmlnode, 'subsegmentAlignment', bool)
self.subsegment_starts_with_sap = parse_attr_value(xmlnode, 'subsegmentStartsWithSAP', int)
self.bitstream_switching = parse_attr_value(xmlnode, 'bitstreamSwitching', bool)
self.accessibilities = parse_child_nodes(xmlnode, 'Accessibility', Descriptor)
self.roles = parse_child_nodes(xmlnode, 'Role', Descriptor)
self.ratings = parse_child_nodes(xmlnode, 'Rating', Descriptor)
self.viewpoints = parse_child_nodes(xmlnode, 'Viewpoint', Descriptor)
self.content_components = parse_child_nodes(xmlnode, 'ContentComponent', ContentComponent)
self.base_urls = parse_child_nodes(xmlnode, 'BaseURL', BaseURL)
self.segment_bases = parse_child_nodes(xmlnode, 'SegmentBase', SegmentBase)
self.segment_lists = parse_child_nodes(xmlnode, 'SegmentList', SegmentList)
self.segment_templates = parse_child_nodes(xmlnode, 'SegmentTemplate', SegmentTemplate)
self.representations = parse_child_nodes(xmlnode, 'Representation', Representation)
def write(self, xmlnode):
RepresentationBase.write(self, xmlnode)
write_attr_value(xmlnode, 'id', self.id)
write_attr_value(xmlnode, 'group', self.group)
write_attr_value(xmlnode, 'lang', self.lang)
write_attr_value(xmlnode, 'label', self.label)
write_attr_value(xmlnode, 'contentType', self.content_type)
write_attr_value(xmlnode, 'par', self.par)
write_attr_value(xmlnode, 'minBandwidth', self.min_bandwidth)
write_attr_value(xmlnode, 'maxBandwidth', self.max_bandwidth)
write_attr_value(xmlnode, 'minWidth', self.min_width)
write_attr_value(xmlnode, 'maxWidth', self.max_width)
write_attr_value(xmlnode, 'minHeight', self.min_height)
write_attr_value(xmlnode, 'maxHeight', self.max_height)
write_attr_value(xmlnode, 'minFrameRate', self.min_frame_rate)
write_attr_value(xmlnode, 'maxFrameRate', self.max_frame_rate)
write_attr_value(xmlnode, 'segmentAlignment', self.segment_alignment)
write_attr_value(xmlnode, 'selectionPriority', self.selection_priority)
write_attr_value(xmlnode, 'subsegmentAlignment', self.subsegment_alignment)
write_attr_value(xmlnode, 'subsegmentStartsWithSAP', self.subsegment_starts_with_sap)
write_attr_value(xmlnode, 'bitstreamSwitching', self.bitstream_switching)
write_child_node(xmlnode, 'Accessibility', self.accessibilities)
write_child_node(xmlnode, 'Role', self.roles)
write_child_node(xmlnode, 'Rating', self.ratings)
write_child_node(xmlnode, 'Viewpoint', self.viewpoints)
write_child_node(xmlnode, 'ContentComponent', self.content_components)
write_child_node(xmlnode, 'BaseURL', self.base_urls)
write_child_node(xmlnode, 'SegmentBase', self.segment_bases)
write_child_node(xmlnode, 'SegmentList', self.segment_lists)
write_child_node(xmlnode, 'SegmentTemplate', self.segment_templates)
write_child_node(xmlnode, 'Representation', self.representations)
class EventStream(XMLNode):
def __init__(self):
self.scheme_id_uri = None # xs:anyURI (required)
self.value = None # xs:string
self.timescale = None # xs:unsignedInt
self.events = None # EventType*
def parse(self, xmlnode):
self.scheme_id_uri = parse_attr_value(xmlnode, 'schemeIdUri', str)
self.value = parse_attr_value(xmlnode, 'value', str)
self.timescale = parse_attr_value(xmlnode, 'timescale', int)
self.events = parse_child_nodes(xmlnode, 'Event', Event)
def write(self, xmlnode):
write_attr_value(xmlnode, 'schemeIdUri', self.scheme_id_uri)
write_attr_value(xmlnode, 'value', self.value)
write_attr_value(xmlnode, 'timescale', self.timescale)
write_child_node(xmlnode, 'Event', self.events)
class Period(XMLNode):
def __init__(self):
self.id = None # xs:string
self.start = None # xs:duration
self.duration = None # xs:duration
self.bitstream_switching = None # xs:boolean
self.base_urls = None # BaseURLType*
self.segment_bases = None # SegmentBaseType*
self.segment_lists = None # SegmentListType*
self.segment_templates = None # SegmentTemplateType*
self.asset_identifiers = None # DescriptorType*
self.event_streams = None # EventStreamType*
self.adaptation_sets = None # AdaptationSetType*
self.subsets = None # SubsetType*
def parse(self, xmlnode):
self.id = parse_attr_value(xmlnode, 'id', str)
self.start = parse_attr_value(xmlnode, 'start', str)
self.duration = parse_attr_value(xmlnode, 'duration', str)
self.bitstream_switching = parse_attr_value(xmlnode, 'bitstreamSwitching', bool)
self.base_urls = parse_child_nodes(xmlnode, 'BaseURL', BaseURL)
self.segment_bases = parse_child_nodes(xmlnode, 'SegmentBase', SegmentBase)
self.segment_lists = parse_child_nodes(xmlnode, 'SegmentList', SegmentList)
self.segment_templates = parse_child_nodes(xmlnode, 'SegmentTemplate', SegmentTemplate)
self.asset_identifiers = parse_child_nodes(xmlnode, 'AssetIdentifier', Descriptor)
self.event_streams = parse_child_nodes(xmlnode, 'EventStream', EventStream)
self.adaptation_sets = parse_child_nodes(xmlnode, 'AdaptationSet', AdaptationSet)
self.subsets = parse_child_nodes(xmlnode, 'Subset', Subset)
def write(self, xmlnode):
write_attr_value(xmlnode, 'id', self.id)
write_attr_value(xmlnode, 'start', self.start)
write_attr_value(xmlnode, 'duration', self.duration)
write_attr_value(xmlnode, 'bitstreamSwitching', self.bitstream_switching)
write_child_node(xmlnode, 'BaseURL', self.base_urls)
write_child_node(xmlnode, 'SegmentBase', self.segment_bases)
write_child_node(xmlnode, 'SegmentList', self.segment_lists)
write_child_node(xmlnode, 'SegmentTemplate', self.segment_templates)
write_child_node(xmlnode, 'AssetIdentifier', self.asset_identifiers)
write_child_node(xmlnode, 'EventStream', self.event_streams)
write_child_node(xmlnode, 'AdaptationSet', self.adaptation_sets)
write_child_node(xmlnode, 'Subset', self.subsets)
class MPEGDASH(XMLNode):
def __init__(self):
self.xmlns = None # xmlns
self.id = None # xs:string
self.type = None # PresentationType
self.profiles = '' # xs:string (required)
self.cenc = None # xs:string
self.availability_start_time = None # xs:dateTime
self.availability_end_time = None # xs:dateTime
self.publish_time = None # xs:dateTime
self.media_presentation_duration = None # xs:duration
self.minimum_update_period = None # xs:duration
self.min_buffer_time = None # xs:duration
self.time_shift_buffer_depth = None # xs:duration
self.suggested_presentation_delay = None # xs:duration
self.max_segment_duration = None # xs:duration
self.max_subsegment_duration = None # xs:duration
self.program_informations = None # ProgramInformationType*
self.base_urls = None # BaseURLType*
self.locations = None # xs:anyURI*
self.periods = None # PeriodType+
self.metrics = None # MetricsType*
self.utc_timings = None # DescriptorType*
def parse(self, xmlnode):
self.xmlns = parse_attr_value(xmlnode, 'xmlns', str)
self.id = parse_attr_value(xmlnode, 'id', str)
self.type = parse_attr_value(xmlnode, 'type', str)
self.profiles = parse_attr_value(xmlnode, 'profiles', str)
self.cenc = parse_attr_value(xmlnode, "xmlns:cenc", str)
self.availability_start_time = parse_attr_value(xmlnode, 'availabilityStartTime', str)
self.availability_end_time = parse_attr_value(xmlnode, 'availabilityEndTime', str)
self.publish_time = parse_attr_value(xmlnode, 'publishTime', str)
self.media_presentation_duration = parse_attr_value(xmlnode, 'mediaPresentationDuration', str)
self.minimum_update_period = parse_attr_value(xmlnode, 'minimumUpdatePeriod', str)
self.min_buffer_time = parse_attr_value(xmlnode, 'minBufferTime', str)
self.time_shift_buffer_depth = parse_attr_value(xmlnode, 'timeShiftBufferDepth', str)
self.suggested_presentation_delay = parse_attr_value(xmlnode, 'suggestedPresentationDelay', str)
self.max_segment_duration = parse_attr_value(xmlnode, 'maxSegmentDuration', str)
self.max_subsegment_duration = parse_attr_value(xmlnode, 'maxSubsegmentDuration', str)
self.program_informations = parse_child_nodes(xmlnode, 'ProgramInformation', ProgramInformation)
self.base_urls = parse_child_nodes(xmlnode, 'BaseURL', BaseURL)
self.locations = parse_child_nodes(xmlnode, 'Location', XsStringElement)
self.periods = parse_child_nodes(xmlnode, 'Period', Period)
self.metrics = parse_child_nodes(xmlnode, 'Metrics', Metrics)
self.utc_timings = parse_child_nodes(xmlnode, 'UTCTiming', Descriptor)
def write(self, xmlnode):
write_attr_value(xmlnode, 'xmlns', self.xmlns)
write_attr_value(xmlnode, 'id', self.id)
write_attr_value(xmlnode, 'type', self.type)
write_attr_value(xmlnode, 'profiles', self.profiles)
write_attr_value(xmlnode, "xmlns:cenc", self.cenc)
write_attr_value(xmlnode, 'availabilityStartTime', self.availability_start_time)
write_attr_value(xmlnode, 'availabilityEndTime', self.availability_end_time)
write_attr_value(xmlnode, 'publishTime', self.publish_time)
write_attr_value(xmlnode, 'mediaPresentationDuration', self.media_presentation_duration)
write_attr_value(xmlnode, 'minimumUpdatePeriod', self.minimum_update_period)
write_attr_value(xmlnode, 'minBufferTime', self.min_buffer_time)
write_attr_value(xmlnode, 'timeShiftBufferDepth', self.time_shift_buffer_depth)
write_attr_value(xmlnode, 'suggestedPresentationDelay', self.suggested_presentation_delay)
write_attr_value(xmlnode, 'maxSegmentDuration', self.max_segment_duration)
write_attr_value(xmlnode, 'maxSubsegmentDuration', self.max_subsegment_duration)
write_child_node(xmlnode, 'ProgramInformation', self.program_informations)
write_child_node(xmlnode, 'BaseURL', self.base_urls)
write_child_node(xmlnode, 'Location', self.locations)
write_child_node(xmlnode, 'Period', self.periods)
write_child_node(xmlnode, 'Metrics', self.metrics)
write_child_node(xmlnode, 'UTCTiming', self.utc_timings)
|
py | b402bca8bae6ed578b44b90082ac39d8dbfd0f9e | # Copyright 2010 OpenStack Foundation
# Copyright 2011 Piston Cloud Computing, Inc
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import base64
import re
from oslo.config import cfg
from oslo import messaging
import six
import stevedore
import webob
from webob import exc
from nova.api.openstack import common
from nova.api.openstack.compute.schemas.v3 import servers as schema_servers
from nova.api.openstack.compute.views import servers as views_servers
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api import validation
from nova import compute
from nova.compute import flavors
from nova import exception
from nova.i18n import _
from nova.i18n import _LW
from nova.image import glance
from nova import objects
from nova.openstack.common import log as logging
from nova.openstack.common import strutils
from nova.openstack.common import timeutils
from nova.openstack.common import uuidutils
from nova import policy
from nova import utils
CONF = cfg.CONF
CONF.import_opt('enable_instance_password',
'nova.api.openstack.compute.servers')
CONF.import_opt('network_api_class', 'nova.network')
CONF.import_opt('reclaim_instance_interval', 'nova.compute.manager')
CONF.import_opt('extensions_blacklist', 'nova.api.openstack', group='osapi_v3')
CONF.import_opt('extensions_whitelist', 'nova.api.openstack', group='osapi_v3')
LOG = logging.getLogger(__name__)
authorizer = extensions.core_authorizer('compute:v3', 'servers')
class ServersController(wsgi.Controller):
"""The Server API base controller class for the OpenStack API."""
EXTENSION_CREATE_NAMESPACE = 'nova.api.v3.extensions.server.create'
EXTENSION_DESERIALIZE_EXTRACT_SERVER_NAMESPACE = (
'nova.api.v3.extensions.server.create.deserialize')
EXTENSION_REBUILD_NAMESPACE = 'nova.api.v3.extensions.server.rebuild'
EXTENSION_DESERIALIZE_EXTRACT_REBUILD_NAMESPACE = (
'nova.api.v3.extensions.server.rebuild.deserialize')
EXTENSION_UPDATE_NAMESPACE = 'nova.api.v3.extensions.server.update'
_view_builder_class = views_servers.ViewBuilderV3
schema_server_create = schema_servers.base_create
schema_server_update = schema_servers.base_update
schema_server_rebuild = schema_servers.base_rebuild
@staticmethod
def _add_location(robj):
# Just in case...
if 'server' not in robj.obj:
return robj
link = filter(lambda l: l['rel'] == 'self',
robj.obj['server']['links'])
if link:
robj['Location'] = utils.utf8(link[0]['href'])
# Convenience return
return robj
def __init__(self, **kwargs):
def _check_load_extension(required_function):
def check_whiteblack_lists(ext):
# Check whitelist is either empty or if not then the extension
# is in the whitelist
if (not CONF.osapi_v3.extensions_whitelist or
ext.obj.alias in CONF.osapi_v3.extensions_whitelist):
# Check the extension is not in the blacklist
if ext.obj.alias not in CONF.osapi_v3.extensions_blacklist:
return True
else:
LOG.warn(_LW("Not loading %s because it is "
"in the blacklist"), ext.obj.alias)
return False
else:
LOG.warn(
_LW("Not loading %s because it is not in the "
"whitelist"), ext.obj.alias)
return False
def check_load_extension(ext):
if isinstance(ext.obj, extensions.V3APIExtensionBase):
# Filter out for the existence of the required
# function here rather than on every request. We
# don't have a new abstract base class to reduce
# duplication in the extensions as they may want
# to implement multiple server (and other) entry
# points if hasattr(ext.obj, 'server_create'):
if hasattr(ext.obj, required_function):
LOG.debug('extension %(ext_alias)s detected by '
'servers extension for function %(func)s',
{'ext_alias': ext.obj.alias,
'func': required_function})
return check_whiteblack_lists(ext)
else:
LOG.debug(
'extension %(ext_alias)s is missing %(func)s',
{'ext_alias': ext.obj.alias,
'func': required_function})
return False
else:
return False
return check_load_extension
self.extension_info = kwargs.pop('extension_info')
super(ServersController, self).__init__(**kwargs)
self.compute_api = compute.API()
# Look for implementation of extension point of server creation
self.create_extension_manager = \
stevedore.enabled.EnabledExtensionManager(
namespace=self.EXTENSION_CREATE_NAMESPACE,
check_func=_check_load_extension('server_create'),
invoke_on_load=True,
invoke_kwds={"extension_info": self.extension_info},
propagate_map_exceptions=True)
if not list(self.create_extension_manager):
LOG.debug("Did not find any server create extensions")
# Look for implementation of extension point of server rebuild
self.rebuild_extension_manager = \
stevedore.enabled.EnabledExtensionManager(
namespace=self.EXTENSION_REBUILD_NAMESPACE,
check_func=_check_load_extension('server_rebuild'),
invoke_on_load=True,
invoke_kwds={"extension_info": self.extension_info},
propagate_map_exceptions=True)
if not list(self.rebuild_extension_manager):
LOG.debug("Did not find any server rebuild extensions")
# Look for implementation of extension point of server update
self.update_extension_manager = \
stevedore.enabled.EnabledExtensionManager(
namespace=self.EXTENSION_UPDATE_NAMESPACE,
check_func=_check_load_extension('server_update'),
invoke_on_load=True,
invoke_kwds={"extension_info": self.extension_info},
propagate_map_exceptions=True)
if not list(self.update_extension_manager):
LOG.debug("Did not find any server update extensions")
# Look for API schema of server create extension
self.create_schema_manager = \
stevedore.enabled.EnabledExtensionManager(
namespace=self.EXTENSION_CREATE_NAMESPACE,
check_func=_check_load_extension('get_server_create_schema'),
invoke_on_load=True,
invoke_kwds={"extension_info": self.extension_info},
propagate_map_exceptions=True)
if list(self.create_schema_manager):
self.create_schema_manager.map(self._create_extension_schema,
self.schema_server_create)
else:
LOG.debug("Did not find any server create schemas")
# Look for API schema of server update extension
self.update_schema_manager = \
stevedore.enabled.EnabledExtensionManager(
namespace=self.EXTENSION_UPDATE_NAMESPACE,
check_func=_check_load_extension('get_server_update_schema'),
invoke_on_load=True,
invoke_kwds={"extension_info": self.extension_info},
propagate_map_exceptions=True)
if list(self.update_schema_manager):
self.update_schema_manager.map(self._update_extension_schema,
self.schema_server_update)
else:
LOG.debug("Did not find any server update schemas")
# Look for API schema of server rebuild extension
self.rebuild_schema_manager = \
stevedore.enabled.EnabledExtensionManager(
namespace=self.EXTENSION_REBUILD_NAMESPACE,
check_func=_check_load_extension('get_server_rebuild_schema'),
invoke_on_load=True,
invoke_kwds={"extension_info": self.extension_info},
propagate_map_exceptions=True)
if list(self.rebuild_schema_manager):
self.rebuild_schema_manager.map(self._rebuild_extension_schema,
self.schema_server_rebuild)
else:
LOG.debug("Did not find any server rebuild schemas")
@extensions.expected_errors((400, 403))
def index(self, req):
"""Returns a list of server names and ids for a given user."""
try:
servers = self._get_servers(req, is_detail=False)
except exception.Invalid as err:
raise exc.HTTPBadRequest(explanation=err.format_message())
return servers
@extensions.expected_errors((400, 403))
def detail(self, req):
"""Returns a list of server details for a given user."""
try:
servers = self._get_servers(req, is_detail=True)
except exception.Invalid as err:
raise exc.HTTPBadRequest(explanation=err.format_message())
return servers
def _get_servers(self, req, is_detail):
"""Returns a list of servers, based on any search options specified."""
search_opts = {}
search_opts.update(req.GET)
context = req.environ['nova.context']
remove_invalid_options(context, search_opts,
self._get_server_search_options())
# Verify search by 'status' contains a valid status.
# Convert it to filter by vm_state or task_state for compute_api.
search_opts.pop('status', None)
if 'status' in req.GET.keys():
statuses = req.GET.getall('status')
states = common.task_and_vm_state_from_status(statuses)
vm_state, task_state = states
if not vm_state and not task_state:
return {'servers': []}
search_opts['vm_state'] = vm_state
# When we search by vm state, task state will return 'default'.
# So we don't need task_state search_opt.
if 'default' not in task_state:
search_opts['task_state'] = task_state
if 'changes-since' in search_opts:
try:
parsed = timeutils.parse_isotime(search_opts['changes-since'])
except ValueError:
msg = _('Invalid changes-since value')
raise exc.HTTPBadRequest(explanation=msg)
search_opts['changes-since'] = parsed
# By default, compute's get_all() will return deleted instances.
# If an admin hasn't specified a 'deleted' search option, we need
# to filter out deleted instances by setting the filter ourselves.
# ... Unless 'changes-since' is specified, because 'changes-since'
# should return recently deleted images according to the API spec.
if 'deleted' not in search_opts:
if 'changes-since' not in search_opts:
# No 'changes-since', so we only want non-deleted servers
search_opts['deleted'] = False
else:
# Convert deleted filter value to a valid boolean.
# Return non-deleted servers if an invalid value
# is passed with deleted filter.
search_opts['deleted'] = strutils.bool_from_string(
search_opts['deleted'], default=False)
if search_opts.get("vm_state") == ['deleted']:
if context.is_admin:
search_opts['deleted'] = True
else:
msg = _("Only administrators may list deleted instances")
raise exc.HTTPForbidden(explanation=msg)
# If tenant_id is passed as a search parameter this should
# imply that all_tenants is also enabled unless explicitly
# disabled. Note that the tenant_id parameter is filtered out
# by remove_invalid_options above unless the requestor is an
# admin.
if 'tenant_id' in search_opts and 'all_tenants' not in search_opts:
# We do not need to add the all_tenants flag if the tenant
# id associated with the token is the tenant id
# specified. This is done so a request that does not need
# the all_tenants flag does not fail because of lack of
# policy permission for compute:get_all_tenants when it
# doesn't actually need it.
if context.project_id != search_opts.get('tenant_id'):
search_opts['all_tenants'] = 1
# If all tenants is passed with 0 or false as the value
# then remove it from the search options. Nothing passed as
# the value for all_tenants is considered to enable the feature
all_tenants = search_opts.get('all_tenants')
if all_tenants:
try:
if not strutils.bool_from_string(all_tenants, True):
del search_opts['all_tenants']
except ValueError as err:
raise exception.InvalidInput(six.text_type(err))
if 'all_tenants' in search_opts:
policy.enforce(context, 'compute:get_all_tenants',
{'project_id': context.project_id,
'user_id': context.user_id})
del search_opts['all_tenants']
else:
if context.project_id:
search_opts['project_id'] = context.project_id
else:
search_opts['user_id'] = context.user_id
limit, marker = common.get_limit_and_marker(req)
try:
instance_list = self.compute_api.get_all(context,
search_opts=search_opts, limit=limit, marker=marker,
want_objects=True, expected_attrs=['pci_devices'])
except exception.MarkerNotFound:
msg = _('marker [%s] not found') % marker
raise exc.HTTPBadRequest(explanation=msg)
except exception.FlavorNotFound:
log_msg = _("Flavor '%s' could not be found ")
LOG.debug(log_msg, search_opts['flavor'])
# TODO(mriedem): Move to ObjectListBase.__init__ for empty lists.
instance_list = objects.InstanceList(objects=[])
if is_detail:
instance_list.fill_faults()
response = self._view_builder.detail(req, instance_list)
else:
response = self._view_builder.index(req, instance_list)
req.cache_db_instances(instance_list)
return response
def _get_server(self, context, req, instance_uuid):
"""Utility function for looking up an instance by uuid."""
instance = common.get_instance(self.compute_api, context,
instance_uuid, want_objects=True,
expected_attrs=['pci_devices'])
req.cache_db_instance(instance)
return instance
def _check_string_length(self, value, name, max_length=None):
try:
if isinstance(value, six.string_types):
value = value.strip()
utils.check_string_length(value, name, min_length=1,
max_length=max_length)
except exception.InvalidInput as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
def _get_requested_networks(self, requested_networks):
"""Create a list of requested networks from the networks attribute."""
networks = []
network_uuids = []
for network in requested_networks:
request = objects.NetworkRequest()
try:
# fixed IP address is optional
# if the fixed IP address is not provided then
# it will use one of the available IP address from the network
request.address = network.get('fixed_ip', None)
request.port_id = network.get('port', None)
if request.port_id:
request.network_id = None
if not utils.is_neutron():
# port parameter is only for neutron v2.0
msg = _("Unknown argument: port")
raise exc.HTTPBadRequest(explanation=msg)
if request.address is not None:
msg = _("Specified Fixed IP '%(addr)s' cannot be used "
"with port '%(port)s': port already has "
"a Fixed IP allocated.") % {
"addr": request.address,
"port": request.port_id}
raise exc.HTTPBadRequest(explanation=msg)
else:
request.network_id = network['uuid']
if (not request.port_id and
not uuidutils.is_uuid_like(request.network_id)):
br_uuid = request.network_id.split('-', 1)[-1]
if not uuidutils.is_uuid_like(br_uuid):
msg = _("Bad networks format: network uuid is "
"not in proper format "
"(%s)") % request.network_id
raise exc.HTTPBadRequest(explanation=msg)
if (request.network_id and
request.network_id in network_uuids):
expl = (_("Duplicate networks"
" (%s) are not allowed") %
request.network_id)
raise exc.HTTPBadRequest(explanation=expl)
network_uuids.append(request.network_id)
networks.append(request)
except KeyError as key:
expl = _('Bad network format: missing %s') % key
raise exc.HTTPBadRequest(explanation=expl)
except TypeError:
expl = _('Bad networks format')
raise exc.HTTPBadRequest(explanation=expl)
return objects.NetworkRequestList(objects=networks)
# NOTE(vish): Without this regex, b64decode will happily
# ignore illegal bytes in the base64 encoded
# data.
B64_REGEX = re.compile('^(?:[A-Za-z0-9+\/]{4})*'
'(?:[A-Za-z0-9+\/]{2}=='
'|[A-Za-z0-9+\/]{3}=)?$')
def _decode_base64(self, data):
data = re.sub(r'\s', '', data)
if not self.B64_REGEX.match(data):
return None
try:
return base64.b64decode(data)
except TypeError:
return None
@extensions.expected_errors(404)
def show(self, req, id):
"""Returns server details by server id."""
context = req.environ['nova.context']
instance = common.get_instance(self.compute_api, context, id,
want_objects=True,
expected_attrs=['pci_devices'])
req.cache_db_instance(instance)
return self._view_builder.show(req, instance)
@extensions.expected_errors((400, 403, 409, 413))
@wsgi.response(202)
@validation.schema(schema_server_create)
def create(self, req, body):
"""Creates a new server for a given user."""
context = req.environ['nova.context']
server_dict = body['server']
password = self._get_server_admin_password(server_dict)
name = server_dict['name']
# Arguments to be passed to instance create function
create_kwargs = {}
# Query extensions which want to manipulate the keyword
# arguments.
# NOTE(cyeoh): This is the hook that extensions use
# to replace the extension specific code below.
# When the extensions are ported this will also result
# in some convenience function from this class being
# moved to the extension
if list(self.create_extension_manager):
self.create_extension_manager.map(self._create_extension_point,
server_dict, create_kwargs, body)
image_uuid = self._image_from_req_data(server_dict, create_kwargs)
# NOTE(cyeoh): Although an extension can set
# return_reservation_id in order to request that a reservation
# id be returned to the client instead of the newly created
# instance information we do not want to pass this parameter
# to the compute create call which always returns both. We use
# this flag after the instance create call to determine what
# to return to the client
return_reservation_id = create_kwargs.pop('return_reservation_id',
False)
requested_networks = None
# TODO(cyeoh): bp v3-api-core-as-extensions
# Replace with an extension point when the os-networks
# extension is ported. Currently reworked
# to take into account is_neutron
# if (self.ext_mgr.is_loaded('os-networks')
# or utils.is_neutron()):
# requested_networks = server_dict.get('networks')
if utils.is_neutron():
requested_networks = server_dict.get('networks')
if requested_networks is not None:
requested_networks = self._get_requested_networks(
requested_networks)
try:
flavor_id = self._flavor_id_from_req_data(body)
except ValueError as error:
msg = _("Invalid flavorRef provided.")
raise exc.HTTPBadRequest(explanation=msg)
try:
inst_type = flavors.get_flavor_by_flavor_id(
flavor_id, ctxt=context, read_deleted="no")
(instances, resv_id) = self.compute_api.create(context,
inst_type,
image_uuid,
display_name=name,
display_description=name,
metadata=server_dict.get('metadata', {}),
admin_password=password,
requested_networks=requested_networks,
check_server_group_quota=True,
**create_kwargs)
except (exception.QuotaError,
exception.PortLimitExceeded) as error:
raise exc.HTTPForbidden(
explanation=error.format_message(),
headers={'Retry-After': 0})
except exception.ImageNotFound as error:
msg = _("Can not find requested image")
raise exc.HTTPBadRequest(explanation=msg)
except exception.FlavorNotFound as error:
msg = _("Invalid flavorRef provided.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.KeypairNotFound as error:
msg = _("Invalid key_name provided.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.ConfigDriveInvalidValue:
msg = _("Invalid config_drive provided.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.ExternalNetworkAttachForbidden as error:
raise exc.HTTPForbidden(explanation=error.format_message())
except messaging.RemoteError as err:
msg = "%(err_type)s: %(err_msg)s" % {'err_type': err.exc_type,
'err_msg': err.value}
raise exc.HTTPBadRequest(explanation=msg)
except UnicodeDecodeError as error:
msg = "UnicodeError: %s" % unicode(error)
raise exc.HTTPBadRequest(explanation=msg)
except (exception.ImageNotActive,
exception.FlavorDiskTooSmall,
exception.FlavorMemoryTooSmall,
exception.InvalidMetadata,
exception.InvalidRequest,
exception.InvalidVolume,
exception.MultiplePortsNotApplicable,
exception.InvalidFixedIpAndMaxCountRequest,
exception.InstanceUserDataMalformed,
exception.InstanceUserDataTooLarge,
exception.PortNotFound,
exception.FixedIpAlreadyInUse,
exception.SecurityGroupNotFound,
exception.PortRequiresFixedIP,
exception.NetworkRequiresSubnet,
exception.NetworkNotFound,
exception.InvalidBDMVolumeNotBootable,
exception.InvalidBDMSnapshot,
exception.InvalidBDMVolume,
exception.InvalidBDMImage,
exception.InvalidBDMBootSequence,
exception.InvalidBDMLocalsLimit,
exception.InvalidBDMVolumeNotBootable) as error:
raise exc.HTTPBadRequest(explanation=error.format_message())
except (exception.PortInUse,
exception.NetworkAmbiguous,
exception.NoUniqueMatch) as error:
raise exc.HTTPConflict(explanation=error.format_message())
# If the caller wanted a reservation_id, return it
if return_reservation_id:
# NOTE(cyeoh): In v3 reservation_id was wrapped in
# servers_reservation but this is reverted for V2 API
# compatibility. In the long term with the tasks API we
# will probably just drop the concept of reservation_id
return wsgi.ResponseObject({'reservation_id': resv_id})
req.cache_db_instances(instances)
server = self._view_builder.create(req, instances[0])
if CONF.enable_instance_password:
server['server']['adminPass'] = password
robj = wsgi.ResponseObject(server)
return self._add_location(robj)
# NOTE(gmann): Parameter 'req_body' is placed to handle scheduler_hint
# extension for V2.1. No other extension supposed to use this as
# it will be removed soon.
def _create_extension_point(self, ext, server_dict,
create_kwargs, req_body):
handler = ext.obj
LOG.debug("Running _create_extension_point for %s", ext.obj)
handler.server_create(server_dict, create_kwargs, req_body)
def _rebuild_extension_point(self, ext, rebuild_dict, rebuild_kwargs):
handler = ext.obj
LOG.debug("Running _rebuild_extension_point for %s", ext.obj)
handler.server_rebuild(rebuild_dict, rebuild_kwargs)
def _resize_extension_point(self, ext, resize_dict, resize_kwargs):
handler = ext.obj
LOG.debug("Running _resize_extension_point for %s", ext.obj)
handler.server_resize(resize_dict, resize_kwargs)
def _update_extension_point(self, ext, update_dict, update_kwargs):
handler = ext.obj
LOG.debug("Running _update_extension_point for %s", ext.obj)
handler.server_update(update_dict, update_kwargs)
def _create_extension_schema(self, ext, create_schema):
handler = ext.obj
LOG.debug("Running _create_extension_schema for %s", ext.obj)
schema = handler.get_server_create_schema()
create_schema['properties']['server']['properties'].update(schema)
def _update_extension_schema(self, ext, update_schema):
handler = ext.obj
LOG.debug("Running _update_extension_schema for %s", ext.obj)
schema = handler.get_server_update_schema()
update_schema['properties']['server']['properties'].update(schema)
def _rebuild_extension_schema(self, ext, rebuild_schema):
handler = ext.obj
LOG.debug("Running _rebuild_extension_schema for %s", ext.obj)
schema = handler.get_server_rebuild_schema()
rebuild_schema['properties']['rebuild']['properties'].update(schema)
def _delete(self, context, req, instance_uuid):
instance = self._get_server(context, req, instance_uuid)
if CONF.reclaim_instance_interval:
try:
self.compute_api.soft_delete(context, instance)
except exception.InstanceInvalidState:
# Note(yufang521247): instance which has never been active
# is not allowed to be soft_deleted. Thus we have to call
# delete() to clean up the instance.
self.compute_api.delete(context, instance)
else:
self.compute_api.delete(context, instance)
@extensions.expected_errors((400, 404))
@validation.schema(schema_server_update)
def update(self, req, id, body):
"""Update server then pass on to version-specific controller."""
ctxt = req.environ['nova.context']
update_dict = {}
if 'name' in body['server']:
update_dict['display_name'] = body['server']['name']
# TODO(oomichi): The following host_id validation code can be removed
# when setting "'additionalProperties': False" in base_update schema.
if 'host_id' in body['server']:
msg = _("host_id cannot be updated.")
raise exc.HTTPBadRequest(explanation=msg)
if list(self.update_extension_manager):
self.update_extension_manager.map(self._update_extension_point,
body['server'], update_dict)
instance = common.get_instance(self.compute_api, ctxt, id,
want_objects=True,
expected_attrs=['pci_devices'])
try:
# NOTE(mikal): this try block needs to stay because save() still
# might throw an exception.
req.cache_db_instance(instance)
policy.enforce(ctxt, 'compute:update', instance)
instance.update(update_dict)
instance.save()
return self._view_builder.show(req, instance)
except exception.NotFound:
msg = _("Instance could not be found")
raise exc.HTTPNotFound(explanation=msg)
# NOTE(gmann): Returns 204 for backwards compatibility but should be 202
# for representing async API as this API just accepts the request and
# request hypervisor driver to complete the same in async mode.
@extensions.expected_errors((400, 404, 409))
@wsgi.response(204)
@wsgi.action('confirmResize')
def _action_confirm_resize(self, req, id, body):
context = req.environ['nova.context']
instance = self._get_server(context, req, id)
try:
self.compute_api.confirm_resize(context, instance)
except exception.MigrationNotFound:
msg = _("Instance has not been resized.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'confirmResize')
@extensions.expected_errors((400, 404, 409))
@wsgi.response(202)
@wsgi.action('revertResize')
def _action_revert_resize(self, req, id, body):
context = req.environ['nova.context']
instance = self._get_server(context, req, id)
try:
self.compute_api.revert_resize(context, instance)
except exception.MigrationNotFound:
msg = _("Instance has not been resized.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.FlavorNotFound:
msg = _("Flavor used by the instance could not be found.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'revertResize')
return webob.Response(status_int=202)
@extensions.expected_errors((400, 404, 409))
@wsgi.response(202)
@wsgi.action('reboot')
def _action_reboot(self, req, id, body):
if 'reboot' in body and 'type' in body['reboot']:
if not isinstance(body['reboot']['type'], six.string_types):
msg = _("Argument 'type' for reboot must be a string")
LOG.error(msg)
raise exc.HTTPBadRequest(explanation=msg)
valid_reboot_types = ['HARD', 'SOFT']
reboot_type = body['reboot']['type'].upper()
if not valid_reboot_types.count(reboot_type):
msg = _("Argument 'type' for reboot is not HARD or SOFT")
LOG.error(msg)
raise exc.HTTPBadRequest(explanation=msg)
else:
msg = _("Missing argument 'type' for reboot")
LOG.error(msg)
raise exc.HTTPBadRequest(explanation=msg)
context = req.environ['nova.context']
instance = self._get_server(context, req, id)
try:
self.compute_api.reboot(context, instance, reboot_type)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'reboot')
return webob.Response(status_int=202)
def _resize(self, req, instance_id, flavor_id, **kwargs):
"""Begin the resize process with given instance/flavor."""
context = req.environ["nova.context"]
instance = self._get_server(context, req, instance_id)
try:
self.compute_api.resize(context, instance, flavor_id, **kwargs)
except exception.QuotaError as error:
raise exc.HTTPForbidden(
explanation=error.format_message(),
headers={'Retry-After': 0})
except exception.FlavorNotFound:
msg = _("Unable to locate requested flavor.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.CannotResizeToSameFlavor:
msg = _("Resize requires a flavor change.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.CannotResizeDisk as e:
raise exc.HTTPBadRequest(explanation=e.format_message())
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'resize')
except exception.ImageNotAuthorized:
msg = _("You are not authorized to access the image "
"the instance was started with.")
raise exc.HTTPUnauthorized(explanation=msg)
except exception.ImageNotFound:
msg = _("Image that the instance was started "
"with could not be found.")
raise exc.HTTPBadRequest(explanation=msg)
except exception.Invalid:
msg = _("Invalid instance image.")
raise exc.HTTPBadRequest(explanation=msg)
return webob.Response(status_int=202)
@extensions.expected_errors((404, 409))
@wsgi.response(204)
def delete(self, req, id):
"""Destroys a server."""
try:
self._delete(req.environ['nova.context'], req, id)
except exception.NotFound:
msg = _("Instance could not be found")
raise exc.HTTPNotFound(explanation=msg)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'delete')
def _image_uuid_from_href(self, image_href):
# If the image href was generated by nova api, strip image_href
# down to an id and use the default glance connection params
image_uuid = image_href.split('/').pop()
if not uuidutils.is_uuid_like(image_uuid):
msg = _("Invalid imageRef provided.")
raise exc.HTTPBadRequest(explanation=msg)
return image_uuid
def _image_from_req_data(self, server_dict, create_kwargs):
"""Get image data from the request or raise appropriate
exceptions.
The field imageRef is mandatory when no block devices have been
defined and must be a proper uuid when present.
"""
image_href = server_dict.get('imageRef')
if not image_href and create_kwargs.get('block_device_mapping'):
return ''
elif image_href:
return self._image_uuid_from_href(unicode(image_href))
else:
msg = _("Missing imageRef attribute")
raise exc.HTTPBadRequest(explanation=msg)
def _flavor_id_from_req_data(self, data):
flavor_ref = data['server']['flavorRef']
return common.get_id_from_href(flavor_ref)
@extensions.expected_errors((400, 401, 403, 404, 409))
@wsgi.response(202)
@wsgi.action('resize')
def _action_resize(self, req, id, body):
"""Resizes a given instance to the flavor size requested."""
resize_dict = body['resize']
try:
flavor_ref = str(resize_dict["flavorRef"])
if not flavor_ref:
msg = _("Resize request has invalid 'flavorRef' attribute.")
raise exc.HTTPBadRequest(explanation=msg)
except (KeyError, TypeError):
msg = _("Resize requests require 'flavorRef' attribute.")
raise exc.HTTPBadRequest(explanation=msg)
resize_kwargs = {}
return self._resize(req, id, flavor_ref, **resize_kwargs)
@extensions.expected_errors((400, 403, 404, 409, 413))
@wsgi.response(202)
@wsgi.action('rebuild')
@validation.schema(schema_server_rebuild)
def _action_rebuild(self, req, id, body):
"""Rebuild an instance with the given attributes."""
rebuild_dict = body['rebuild']
image_href = rebuild_dict["imageRef"]
image_href = self._image_uuid_from_href(image_href)
password = self._get_server_admin_password(rebuild_dict)
context = req.environ['nova.context']
instance = self._get_server(context, req, id)
attr_map = {
'name': 'display_name',
'metadata': 'metadata',
}
rebuild_kwargs = {}
if 'preserve_ephemeral' in rebuild_dict:
rebuild_kwargs['preserve_ephemeral'] = strutils.bool_from_string(
rebuild_dict['preserve_ephemeral'], strict=True)
if list(self.rebuild_extension_manager):
self.rebuild_extension_manager.map(self._rebuild_extension_point,
rebuild_dict, rebuild_kwargs)
for request_attribute, instance_attribute in attr_map.items():
try:
rebuild_kwargs[instance_attribute] = rebuild_dict[
request_attribute]
except (KeyError, TypeError):
pass
try:
self.compute_api.rebuild(context,
instance,
image_href,
password,
**rebuild_kwargs)
except exception.InstanceIsLocked as e:
raise exc.HTTPConflict(explanation=e.format_message())
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'rebuild')
except exception.InstanceNotFound:
msg = _("Instance could not be found")
raise exc.HTTPNotFound(explanation=msg)
except exception.ImageNotFound:
msg = _("Cannot find image for rebuild")
raise exc.HTTPBadRequest(explanation=msg)
except exception.QuotaError as error:
raise exc.HTTPForbidden(explanation=error.format_message())
except (exception.ImageNotActive,
exception.FlavorDiskTooSmall,
exception.FlavorMemoryTooSmall,
exception.InvalidMetadata) as error:
raise exc.HTTPBadRequest(explanation=error.format_message())
instance = self._get_server(context, req, id)
view = self._view_builder.show(req, instance)
# Add on the admin_password attribute since the view doesn't do it
# unless instance passwords are disabled
if CONF.enable_instance_password:
view['server']['adminPass'] = password
robj = wsgi.ResponseObject(view)
return self._add_location(robj)
@extensions.expected_errors((400, 403, 404, 409))
@wsgi.response(202)
@wsgi.action('createImage')
@common.check_snapshots_enabled
def _action_create_image(self, req, id, body):
"""Snapshot a server instance."""
context = req.environ['nova.context']
entity = body.get("createImage", {})
image_name = entity.get("name")
if not image_name:
msg = _("createImage entity requires name attribute")
raise exc.HTTPBadRequest(explanation=msg)
props = {}
metadata = entity.get('metadata', {})
common.check_img_metadata_properties_quota(context, metadata)
try:
props.update(metadata)
except ValueError:
msg = _("Invalid metadata")
raise exc.HTTPBadRequest(explanation=msg)
instance = self._get_server(context, req, id)
bdms = objects.BlockDeviceMappingList.get_by_instance_uuid(
context, instance.uuid)
try:
if self.compute_api.is_volume_backed_instance(context, instance,
bdms):
img = instance['image_ref']
if not img:
properties = bdms.root_metadata(
context, self.compute_api.image_api,
self.compute_api.volume_api)
image_meta = {'properties': properties}
else:
image_meta = self.compute_api.image_api.get(context, img)
image = self.compute_api.snapshot_volume_backed(
context,
instance,
image_meta,
image_name,
extra_properties=props)
else:
image = self.compute_api.snapshot(context,
instance,
image_name,
extra_properties=props)
except exception.InstanceInvalidState as state_error:
common.raise_http_conflict_for_instance_invalid_state(state_error,
'createImage')
except exception.Invalid as err:
raise exc.HTTPBadRequest(explanation=err.format_message())
# build location of newly-created image entity
image_id = str(image['id'])
image_ref = glance.generate_image_url(image_id)
resp = webob.Response(status_int=202)
resp.headers['Location'] = image_ref
return resp
def _get_server_admin_password(self, server):
"""Determine the admin password for a server on creation."""
try:
password = server['adminPass']
except KeyError:
password = utils.generate_password()
return password
def _get_server_search_options(self):
"""Return server search options allowed by non-admin."""
return ('reservation_id', 'name', 'status', 'image', 'flavor',
'ip', 'changes-since', 'all_tenants')
def _get_instance(self, context, instance_uuid):
try:
attrs = ['system_metadata', 'metadata']
return objects.Instance.get_by_uuid(context, instance_uuid,
expected_attrs=attrs)
except exception.InstanceNotFound as e:
raise webob.exc.HTTPNotFound(explanation=e.format_message())
@extensions.expected_errors((404, 409))
@wsgi.action('os-start')
def _start_server(self, req, id, body):
"""Start an instance."""
context = req.environ['nova.context']
instance = self._get_instance(context, id)
authorizer(context, instance, 'start')
LOG.debug('start instance', instance=instance)
try:
self.compute_api.start(context, instance)
except (exception.InstanceNotReady, exception.InstanceIsLocked,
exception.InstanceInvalidState) as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
return webob.Response(status_int=202)
@extensions.expected_errors((404, 409))
@wsgi.action('os-stop')
def _stop_server(self, req, id, body):
"""Stop an instance."""
context = req.environ['nova.context']
instance = self._get_instance(context, id)
authorizer(context, instance, 'stop')
LOG.debug('stop instance', instance=instance)
try:
self.compute_api.stop(context, instance)
except (exception.InstanceNotReady, exception.InstanceIsLocked,
exception.InstanceInvalidState) as e:
raise webob.exc.HTTPConflict(explanation=e.format_message())
return webob.Response(status_int=202)
def remove_invalid_options(context, search_options, allowed_search_options):
"""Remove search options that are not valid for non-admin API/context."""
if context.is_admin:
# Allow all options
return
# Otherwise, strip out all unknown options
unknown_options = [opt for opt in search_options
if opt not in allowed_search_options]
LOG.debug("Removing options '%s' from query",
", ".join(unknown_options))
for opt in unknown_options:
search_options.pop(opt, None)
class Servers(extensions.V3APIExtensionBase):
"""Servers."""
name = "Servers"
alias = "servers"
version = 1
def get_resources(self):
member_actions = {'action': 'POST'}
collection_actions = {'detail': 'GET'}
resources = [
extensions.ResourceExtension(
'servers',
ServersController(extension_info=self.extension_info),
member_name='server', collection_actions=collection_actions,
member_actions=member_actions)]
return resources
def get_controller_extensions(self):
return []
|
py | b402bcb841d1645d29d0b0035d0069ad6a25720d | # coding: utf-8
# (C) Copyright IBM Corp. 2020.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
CIS Zones
"""
from typing import Dict, List
import json
from ibm_cloud_sdk_core import BaseService, DetailedResponse
from ibm_cloud_sdk_core.authenticators.authenticator import Authenticator
from ibm_cloud_sdk_core.get_authenticator import get_authenticator_from_environment
from .common import get_sdk_headers
##############################################################################
# Service
##############################################################################
class ZonesV1(BaseService):
"""The Zones V1 service."""
DEFAULT_SERVICE_URL = 'https://api.cis.cloud.ibm.com'
DEFAULT_SERVICE_NAME = 'zones'
@classmethod
def new_instance(cls,
crn: str,
service_name: str = DEFAULT_SERVICE_NAME,
) -> 'ZonesV1':
"""
Return a new client for the Zones service using the specified parameters
and external configuration.
:param str crn: Full url-encoded CRN of the service instance.
"""
if crn is None:
raise ValueError('crn must be provided')
authenticator = get_authenticator_from_environment(service_name)
service = cls(
crn,
authenticator
)
service.configure_service(service_name)
return service
def __init__(self,
crn: str,
authenticator: Authenticator = None,
) -> None:
"""
Construct a new client for the Zones service.
:param str crn: Full url-encoded CRN of the service instance.
:param Authenticator authenticator: The authenticator specifies the authentication mechanism.
Get up to date information from https://github.com/IBM/python-sdk-core/blob/master/README.md
about initializing the authenticator of your choice.
"""
if crn is None:
raise ValueError('crn must be provided')
BaseService.__init__(self,
service_url=self.DEFAULT_SERVICE_URL,
authenticator=authenticator)
self.crn = crn
#########################
# CIS Zones
#########################
def list_zones(self,
**kwargs
) -> DetailedResponse:
"""
List all zones.
List all zones for a service instance.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ListZonesResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='list_zones')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
url = '/v1/{0}/zones'.format(
*self.encode_path_vars(self.crn))
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def create_zone(self,
*,
name: str = None,
**kwargs
) -> DetailedResponse:
"""
Create zone.
Add a new zone for a given service instance.
:param str name: (optional) name.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZoneResp` object
"""
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='create_zone')
headers.update(sdk_headers)
data = {
'name': name
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
url = '/v1/{0}/zones'.format(
*self.encode_path_vars(self.crn))
request = self.prepare_request(method='POST',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def delete_zone(self,
zone_identifier: str,
**kwargs
) -> DetailedResponse:
"""
Delete zone.
Delete a zone given its id.
:param str zone_identifier: Identifier of zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `DeleteZoneResp` object
"""
if zone_identifier is None:
raise ValueError('zone_identifier must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='delete_zone')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
url = '/v1/{0}/zones/{1}'.format(
*self.encode_path_vars(self.crn, zone_identifier))
request = self.prepare_request(method='DELETE',
url=url,
headers=headers)
response = self.send(request)
return response
def get_zone(self,
zone_identifier: str,
**kwargs
) -> DetailedResponse:
"""
Get zone.
Get the details of a zone for a given service instance and given zone id.
:param str zone_identifier: Zone identifier.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZoneResp` object
"""
if zone_identifier is None:
raise ValueError('zone_identifier must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='get_zone')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
url = '/v1/{0}/zones/{1}'.format(
*self.encode_path_vars(self.crn, zone_identifier))
request = self.prepare_request(method='GET',
url=url,
headers=headers)
response = self.send(request)
return response
def update_zone(self,
zone_identifier: str,
*,
paused: bool = None,
**kwargs
) -> DetailedResponse:
"""
Update zone.
Update the paused field of the zone.
:param str zone_identifier: Zone identifier.
:param bool paused: (optional) paused.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZoneResp` object
"""
if zone_identifier is None:
raise ValueError('zone_identifier must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='update_zone')
headers.update(sdk_headers)
data = {
'paused': paused
}
data = {k: v for (k, v) in data.items() if v is not None}
data = json.dumps(data)
headers['content-type'] = 'application/json'
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
url = '/v1/{0}/zones/{1}'.format(
*self.encode_path_vars(self.crn, zone_identifier))
request = self.prepare_request(method='PATCH',
url=url,
headers=headers,
data=data)
response = self.send(request)
return response
def zone_activation_check(self,
zone_identifier: str,
**kwargs
) -> DetailedResponse:
"""
Check zone.
Perform activation check on zone for status.
:param str zone_identifier: Identifier of zone.
:param dict headers: A `dict` containing the request headers
:return: A `DetailedResponse` containing the result, headers and HTTP status code.
:rtype: DetailedResponse with `dict` result representing a `ZoneActivationcheckResp` object
"""
if zone_identifier is None:
raise ValueError('zone_identifier must be provided')
headers = {}
sdk_headers = get_sdk_headers(service_name=self.DEFAULT_SERVICE_NAME,
service_version='V1',
operation_id='zone_activation_check')
headers.update(sdk_headers)
if 'headers' in kwargs:
headers.update(kwargs.get('headers'))
url = '/v1/{0}/zones/{1}/activation_check'.format(
*self.encode_path_vars(self.crn, zone_identifier))
request = self.prepare_request(method='PUT',
url=url,
headers=headers)
response = self.send(request)
return response
##############################################################################
# Models
##############################################################################
class DeleteZoneRespResult():
"""
result.
:attr str id: id.
"""
def __init__(self,
id: str) -> None:
"""
Initialize a DeleteZoneRespResult object.
:param str id: id.
"""
self.id = id
@classmethod
def from_dict(cls, _dict: Dict) -> 'DeleteZoneRespResult':
"""Initialize a DeleteZoneRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in DeleteZoneRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a DeleteZoneRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this DeleteZoneRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'DeleteZoneRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'DeleteZoneRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ZoneActivationcheckRespResult():
"""
result.
:attr str id: id.
"""
def __init__(self,
id: str) -> None:
"""
Initialize a ZoneActivationcheckRespResult object.
:param str id: id.
"""
self.id = id
@classmethod
def from_dict(cls, _dict: Dict) -> 'ZoneActivationcheckRespResult':
"""Initialize a ZoneActivationcheckRespResult object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
else:
raise ValueError('Required property \'id\' not present in ZoneActivationcheckRespResult JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ZoneActivationcheckRespResult object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ZoneActivationcheckRespResult object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ZoneActivationcheckRespResult') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ZoneActivationcheckRespResult') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class DeleteZoneResp():
"""
delete zone response.
:attr bool success: success.
:attr List[List[str]] errors: errors.
:attr List[List[str]] messages: messages.
:attr DeleteZoneRespResult result: result.
"""
def __init__(self,
success: bool,
errors: List[List[str]],
messages: List[List[str]],
result: 'DeleteZoneRespResult') -> None:
"""
Initialize a DeleteZoneResp object.
:param bool success: success.
:param List[List[str]] errors: errors.
:param List[List[str]] messages: messages.
:param DeleteZoneRespResult result: result.
"""
self.success = success
self.errors = errors
self.messages = messages
self.result = result
@classmethod
def from_dict(cls, _dict: Dict) -> 'DeleteZoneResp':
"""Initialize a DeleteZoneResp object from a json dictionary."""
args = {}
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in DeleteZoneResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in DeleteZoneResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in DeleteZoneResp JSON')
if 'result' in _dict:
args['result'] = DeleteZoneRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in DeleteZoneResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a DeleteZoneResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this DeleteZoneResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'DeleteZoneResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'DeleteZoneResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ListZonesResp():
"""
list zones response.
:attr bool success: success.
:attr List[List[str]] errors: errors.
:attr List[List[str]] messages: messages.
:attr List[ZoneDetails] result: zone list.
:attr ResultInfo result_info: result information.
"""
def __init__(self,
success: bool,
errors: List[List[str]],
messages: List[List[str]],
result: List['ZoneDetails'],
result_info: 'ResultInfo') -> None:
"""
Initialize a ListZonesResp object.
:param bool success: success.
:param List[List[str]] errors: errors.
:param List[List[str]] messages: messages.
:param List[ZoneDetails] result: zone list.
:param ResultInfo result_info: result information.
"""
self.success = success
self.errors = errors
self.messages = messages
self.result = result
self.result_info = result_info
@classmethod
def from_dict(cls, _dict: Dict) -> 'ListZonesResp':
"""Initialize a ListZonesResp object from a json dictionary."""
args = {}
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ListZonesResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ListZonesResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ListZonesResp JSON')
if 'result' in _dict:
args['result'] = [ZoneDetails.from_dict(x) for x in _dict.get('result')]
else:
raise ValueError('Required property \'result\' not present in ListZonesResp JSON')
if 'result_info' in _dict:
args['result_info'] = ResultInfo.from_dict(_dict.get('result_info'))
else:
raise ValueError('Required property \'result_info\' not present in ListZonesResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ListZonesResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = [x.to_dict() for x in self.result]
if hasattr(self, 'result_info') and self.result_info is not None:
_dict['result_info'] = self.result_info.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ListZonesResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ListZonesResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ListZonesResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ResultInfo():
"""
result information.
:attr int page: page.
:attr int per_page: per page.
:attr int count: count.
:attr int total_count: total count.
"""
def __init__(self,
page: int,
per_page: int,
count: int,
total_count: int) -> None:
"""
Initialize a ResultInfo object.
:param int page: page.
:param int per_page: per page.
:param int count: count.
:param int total_count: total count.
"""
self.page = page
self.per_page = per_page
self.count = count
self.total_count = total_count
@classmethod
def from_dict(cls, _dict: Dict) -> 'ResultInfo':
"""Initialize a ResultInfo object from a json dictionary."""
args = {}
if 'page' in _dict:
args['page'] = _dict.get('page')
else:
raise ValueError('Required property \'page\' not present in ResultInfo JSON')
if 'per_page' in _dict:
args['per_page'] = _dict.get('per_page')
else:
raise ValueError('Required property \'per_page\' not present in ResultInfo JSON')
if 'count' in _dict:
args['count'] = _dict.get('count')
else:
raise ValueError('Required property \'count\' not present in ResultInfo JSON')
if 'total_count' in _dict:
args['total_count'] = _dict.get('total_count')
else:
raise ValueError('Required property \'total_count\' not present in ResultInfo JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ResultInfo object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'page') and self.page is not None:
_dict['page'] = self.page
if hasattr(self, 'per_page') and self.per_page is not None:
_dict['per_page'] = self.per_page
if hasattr(self, 'count') and self.count is not None:
_dict['count'] = self.count
if hasattr(self, 'total_count') and self.total_count is not None:
_dict['total_count'] = self.total_count
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ResultInfo object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ResultInfo') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ResultInfo') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ZoneActivationcheckResp():
"""
zone activation check response.
:attr bool success: success.
:attr List[List[str]] errors: errors.
:attr List[List[str]] messages: messages.
:attr ZoneActivationcheckRespResult result: result.
"""
def __init__(self,
success: bool,
errors: List[List[str]],
messages: List[List[str]],
result: 'ZoneActivationcheckRespResult') -> None:
"""
Initialize a ZoneActivationcheckResp object.
:param bool success: success.
:param List[List[str]] errors: errors.
:param List[List[str]] messages: messages.
:param ZoneActivationcheckRespResult result: result.
"""
self.success = success
self.errors = errors
self.messages = messages
self.result = result
@classmethod
def from_dict(cls, _dict: Dict) -> 'ZoneActivationcheckResp':
"""Initialize a ZoneActivationcheckResp object from a json dictionary."""
args = {}
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ZoneActivationcheckResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ZoneActivationcheckResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ZoneActivationcheckResp JSON')
if 'result' in _dict:
args['result'] = ZoneActivationcheckRespResult.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ZoneActivationcheckResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ZoneActivationcheckResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ZoneActivationcheckResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ZoneActivationcheckResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ZoneActivationcheckResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ZoneDetails():
"""
zone details.
:attr str id: (optional) id.
:attr str created_on: (optional) created date.
:attr str modified_on: (optional) modified date.
:attr str name: (optional) name.
:attr str original_registrar: (optional) original registrar.
:attr str original_dnshost: (optional) orginal dns host.
:attr str status: (optional) status.
:attr bool paused: (optional) paused.
:attr List[str] original_name_servers: (optional) orginal name servers.
:attr List[str] name_servers: (optional) name servers.
"""
def __init__(self,
*,
id: str = None,
created_on: str = None,
modified_on: str = None,
name: str = None,
original_registrar: str = None,
original_dnshost: str = None,
status: str = None,
paused: bool = None,
original_name_servers: List[str] = None,
name_servers: List[str] = None) -> None:
"""
Initialize a ZoneDetails object.
:param str id: (optional) id.
:param str created_on: (optional) created date.
:param str modified_on: (optional) modified date.
:param str name: (optional) name.
:param str original_registrar: (optional) original registrar.
:param str original_dnshost: (optional) orginal dns host.
:param str status: (optional) status.
:param bool paused: (optional) paused.
:param List[str] original_name_servers: (optional) orginal name servers.
:param List[str] name_servers: (optional) name servers.
"""
self.id = id
self.created_on = created_on
self.modified_on = modified_on
self.name = name
self.original_registrar = original_registrar
self.original_dnshost = original_dnshost
self.status = status
self.paused = paused
self.original_name_servers = original_name_servers
self.name_servers = name_servers
@classmethod
def from_dict(cls, _dict: Dict) -> 'ZoneDetails':
"""Initialize a ZoneDetails object from a json dictionary."""
args = {}
if 'id' in _dict:
args['id'] = _dict.get('id')
if 'created_on' in _dict:
args['created_on'] = _dict.get('created_on')
if 'modified_on' in _dict:
args['modified_on'] = _dict.get('modified_on')
if 'name' in _dict:
args['name'] = _dict.get('name')
if 'original_registrar' in _dict:
args['original_registrar'] = _dict.get('original_registrar')
if 'original_dnshost' in _dict:
args['original_dnshost'] = _dict.get('original_dnshost')
if 'status' in _dict:
args['status'] = _dict.get('status')
if 'paused' in _dict:
args['paused'] = _dict.get('paused')
if 'original_name_servers' in _dict:
args['original_name_servers'] = _dict.get('original_name_servers')
if 'name_servers' in _dict:
args['name_servers'] = _dict.get('name_servers')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ZoneDetails object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'id') and self.id is not None:
_dict['id'] = self.id
if hasattr(self, 'created_on') and self.created_on is not None:
_dict['created_on'] = self.created_on
if hasattr(self, 'modified_on') and self.modified_on is not None:
_dict['modified_on'] = self.modified_on
if hasattr(self, 'name') and self.name is not None:
_dict['name'] = self.name
if hasattr(self, 'original_registrar') and self.original_registrar is not None:
_dict['original_registrar'] = self.original_registrar
if hasattr(self, 'original_dnshost') and self.original_dnshost is not None:
_dict['original_dnshost'] = self.original_dnshost
if hasattr(self, 'status') and self.status is not None:
_dict['status'] = self.status
if hasattr(self, 'paused') and self.paused is not None:
_dict['paused'] = self.paused
if hasattr(self, 'original_name_servers') and self.original_name_servers is not None:
_dict['original_name_servers'] = self.original_name_servers
if hasattr(self, 'name_servers') and self.name_servers is not None:
_dict['name_servers'] = self.name_servers
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ZoneDetails object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ZoneDetails') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ZoneDetails') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
class ZoneResp():
"""
zone response.
:attr bool success: success.
:attr List[List[str]] errors: errors.
:attr List[List[str]] messages: messages.
:attr ZoneDetails result: zone details.
"""
def __init__(self,
success: bool,
errors: List[List[str]],
messages: List[List[str]],
result: 'ZoneDetails') -> None:
"""
Initialize a ZoneResp object.
:param bool success: success.
:param List[List[str]] errors: errors.
:param List[List[str]] messages: messages.
:param ZoneDetails result: zone details.
"""
self.success = success
self.errors = errors
self.messages = messages
self.result = result
@classmethod
def from_dict(cls, _dict: Dict) -> 'ZoneResp':
"""Initialize a ZoneResp object from a json dictionary."""
args = {}
if 'success' in _dict:
args['success'] = _dict.get('success')
else:
raise ValueError('Required property \'success\' not present in ZoneResp JSON')
if 'errors' in _dict:
args['errors'] = _dict.get('errors')
else:
raise ValueError('Required property \'errors\' not present in ZoneResp JSON')
if 'messages' in _dict:
args['messages'] = _dict.get('messages')
else:
raise ValueError('Required property \'messages\' not present in ZoneResp JSON')
if 'result' in _dict:
args['result'] = ZoneDetails.from_dict(_dict.get('result'))
else:
raise ValueError('Required property \'result\' not present in ZoneResp JSON')
return cls(**args)
@classmethod
def _from_dict(cls, _dict):
"""Initialize a ZoneResp object from a json dictionary."""
return cls.from_dict(_dict)
def to_dict(self) -> Dict:
"""Return a json dictionary representing this model."""
_dict = {}
if hasattr(self, 'success') and self.success is not None:
_dict['success'] = self.success
if hasattr(self, 'errors') and self.errors is not None:
_dict['errors'] = self.errors
if hasattr(self, 'messages') and self.messages is not None:
_dict['messages'] = self.messages
if hasattr(self, 'result') and self.result is not None:
_dict['result'] = self.result.to_dict()
return _dict
def _to_dict(self):
"""Return a json dictionary representing this model."""
return self.to_dict()
def __str__(self) -> str:
"""Return a `str` version of this ZoneResp object."""
return json.dumps(self.to_dict(), indent=2)
def __eq__(self, other: 'ZoneResp') -> bool:
"""Return `true` when self and other are equal, false otherwise."""
if not isinstance(other, self.__class__):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other: 'ZoneResp') -> bool:
"""Return `true` when self and other are not equal, false otherwise."""
return not self == other
|
py | b402bcc88389d4e45e548a07c971e0ffe5fb9ab5 | from django.contrib.contenttypes.fields import GenericForeignKey
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.utils.translation import ugettext_lazy as _
from model_utils.fields import AutoCreatedField, AutoLastModifiedField
class CreatedAt(models.Model):
created_at = AutoCreatedField(_("created_at"))
class Meta:
abstract = True
class TimestampableModel(CreatedAt):
updated_at = AutoLastModifiedField(_("updated_at"))
class Meta:
abstract = True
def prefetch_generic_relations(qs): # noqa
"""
Prefetches the models attributed to all generic fields in a queryset
From https://djangosnippets.org/snippets/2492/ with some tweaks.
"""
def get_content_type(content_type_id, cache={}): # noqa
if content_type_id in cache:
return cache[content_type_id]
content_type = ContentType.objects.get_for_id(content_type_id)
cache[content_type_id] = content_type
return content_type
gfks = {}
for name, gfk in qs.model.__dict__.items():
if not isinstance(gfk, GenericForeignKey):
continue
gfks[name] = gfk
data = {}
for weak_model in qs:
for gfk_name, gfk_field in gfks.items():
fields = gfk_field.model._meta.get_fields()
field = None
for f in fields:
if f.name == gfk_field.ct_field:
field = f
if field is None:
continue
related_content_type_id = getattr(weak_model, field.get_attname())
if not related_content_type_id:
continue
related_content_type = get_content_type(related_content_type_id)
related_object_id = getattr(weak_model, gfk_field.fk_field)
if related_content_type not in data.keys():
data[related_content_type] = []
data[related_content_type].append(related_object_id)
for content_type, object_ids in data.items():
model_class = content_type.model_class()
models = prefetch_generic_relations(model_class.objects.filter(pk__in=object_ids))
for model in models:
for weak_model in qs:
for gfk_name, gfk_field in gfks.items():
fields = gfk_field.model._meta.get_fields()
field = None
for f in fields:
if f.name == gfk_field.ct_field:
field = f
if field is None:
continue
related_content_type_id = getattr(weak_model, field.get_attname())
if not related_content_type_id:
continue
related_content_type = get_content_type(related_content_type_id)
related_object_id = getattr(weak_model, gfk_field.fk_field)
if str(related_object_id) != str(model.pk):
continue
if related_content_type != content_type:
continue
setattr(weak_model, gfk_name, model)
return qs
|
py | b402bd21fca0cc19e3a493783c99425dfb5e9ec9 | # Generated by Django 3.0.2 on 2020-02-03 07:11
from django.db import migrations
class Migration(migrations.Migration):
initial = True
dependencies = [
('salesmanorders', '__first__'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
],
options={
'verbose_name': 'Order',
'verbose_name_plural': 'Orders',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('salesmanorders.order',),
),
migrations.CreateModel(
name='OrderItem',
fields=[
],
options={
'verbose_name': 'Item',
'verbose_name_plural': 'Items',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('salesmanorders.orderitem',),
),
migrations.CreateModel(
name='OrderNote',
fields=[
],
options={
'verbose_name': 'Note',
'verbose_name_plural': 'Notes',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('salesmanorders.ordernote',),
),
migrations.CreateModel(
name='OrderPayment',
fields=[
],
options={
'verbose_name': 'Payment',
'verbose_name_plural': 'Payments',
'proxy': True,
'indexes': [],
'constraints': [],
},
bases=('salesmanorders.orderpayment',),
),
]
|
py | b402bdec4242a8d8f08f3117d7288967adc69b23 | # -*- coding: utf-8 -*-
"""
authlib.jose.rfc7518
~~~~~~~~~~~~~~~~~~~~
"alg" (Algorithm) Header Parameter Values for JWS per `Section 3`_.
.. _`Section 3`: https://tools.ietf.org/html/rfc7518#section-3
"""
from cryptography.hazmat.primitives import hashes
from cryptography.hazmat.primitives.asymmetric.utils import (
decode_dss_signature, encode_dss_signature
)
from cryptography.hazmat.primitives.asymmetric.ec import ECDSA
from cryptography.hazmat.primitives.asymmetric import padding
from cryptography.exceptions import InvalidSignature
from authlib.jose.rfc7515 import JWSAlgorithm
from ._key_cryptography import RSAKey, ECKey
from ..util import encode_int, decode_int
class RSAAlgorithm(RSAKey, JWSAlgorithm):
"""RSA using SHA algorithms for JWS. Available algorithms:
- RS256: RSASSA-PKCS1-v1_5 using SHA-256
- RS384: RSASSA-PKCS1-v1_5 using SHA-384
- RS512: RSASSA-PKCS1-v1_5 using SHA-512
"""
SHA256 = hashes.SHA256
SHA384 = hashes.SHA384
SHA512 = hashes.SHA512
def __init__(self, sha_type):
self.name = 'RS{}'.format(sha_type)
self.hash_alg = getattr(self, 'SHA{}'.format(sha_type))
self.padding = padding.PKCS1v15()
def sign(self, msg, key):
return key.sign(msg, self.padding, self.hash_alg())
def verify(self, msg, key, sig):
try:
key.verify(sig, msg, self.padding, self.hash_alg())
return True
except InvalidSignature:
return False
class ECAlgorithm(ECKey, JWSAlgorithm):
"""ECDSA using SHA algorithms for JWS. Available algorithms:
- ES256: ECDSA using P-256 and SHA-256
- ES384: ECDSA using P-384 and SHA-384
- ES512: ECDSA using P-521 and SHA-512
"""
SHA256 = hashes.SHA256
SHA384 = hashes.SHA384
SHA512 = hashes.SHA512
def __init__(self, sha_type):
self.name = 'ES{}'.format(sha_type)
self.hash_alg = getattr(self, 'SHA{}'.format(sha_type))
def sign(self, msg, key):
der_sig = key.sign(msg, ECDSA(self.hash_alg()))
r, s = decode_dss_signature(der_sig)
size = key.curve.key_size
return encode_int(r, size) + encode_int(s, size)
def verify(self, msg, key, sig):
key_size = key.curve.key_size
length = (key_size + 7) // 8
if len(sig) != 2 * length:
return False
r = decode_int(sig[:length])
s = decode_int(sig[length:])
der_sig = encode_dss_signature(r, s)
try:
key.verify(der_sig, msg, ECDSA(self.hash_alg()))
return True
except InvalidSignature:
return False
class RSAPSSAlgorithm(RSAKey, JWSAlgorithm):
"""RSASSA-PSS using SHA algorithms for JWS. Available algorithms:
- PS256: RSASSA-PSS using SHA-256 and MGF1 with SHA-256
- PS384: RSASSA-PSS using SHA-384 and MGF1 with SHA-384
- PS512: RSASSA-PSS using SHA-512 and MGF1 with SHA-512
"""
SHA256 = hashes.SHA256
SHA384 = hashes.SHA384
SHA512 = hashes.SHA512
def __init__(self, sha_type):
self.name = 'PS{}'.format(sha_type)
self.hash_alg = getattr(self, 'SHA{}'.format(sha_type))
def sign(self, msg, key):
return key.sign(
msg,
padding.PSS(
mgf=padding.MGF1(self.hash_alg()),
salt_length=self.hash_alg.digest_size
),
self.hash_alg()
)
def verify(self, msg, key, sig):
try:
key.verify(
sig,
msg,
padding.PSS(
mgf=padding.MGF1(self.hash_alg()),
salt_length=self.hash_alg.digest_size
),
self.hash_alg()
)
return True
except InvalidSignature:
return False
JWS_ALGORITHMS = [
RSAAlgorithm(256),
RSAAlgorithm(384),
RSAAlgorithm(512),
ECAlgorithm(256),
ECAlgorithm(384),
ECAlgorithm(512),
RSAPSSAlgorithm(256),
RSAPSSAlgorithm(384),
RSAPSSAlgorithm(512),
]
|
py | b402c03593b82edcff52c4e5e89fd4f6d23e2e4c | """
Module: 'ds18x20' on micropython-v1.17-rp2
"""
# MCU: {'family': 'micropython', 'sysname': 'rp2', 'version': '1.17.0', 'build': '', 'mpy': 5637, 'port': 'rp2', 'platform': 'rp2', 'name': 'micropython', 'arch': 'armv7m', 'machine': 'Raspberry Pi Pico with RP2040', 'nodename': 'rp2', 'ver': 'v1.17', 'release': '1.17.0'}
# Stubber: 1.5.2
from typing import Any
def const(*args) -> Any:
...
class DS18X20:
""""""
def __init__(self, *args) -> None:
...
def scan(self, *args) -> Any:
...
def convert_temp(self, *args) -> Any:
...
def read_scratch(self, *args) -> Any:
...
def write_scratch(self, *args) -> Any:
...
def read_temp(self, *args) -> Any:
...
|
py | b402c0985524d563edebfb6c155e54317ccfa5b7 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Time : 2017/12/29 16:06
# @Author : WIX
# @File : QueueWithTwoStacks.py
"""
用两个栈实现一个队列。队列的声明如下,请实现它的两个函数appendTail
和deleteHead,分别完成在队列尾部插入结点和在队列头部删除结点的功能。
"""
class Solution:
def __init__(self):
self.stack1 = []
self.stack2 = []
def push(self, node):
self.stack1.append(node)
def pop(self):
if not self.stack2 and not self.stack1:
return None
elif not self.stack2:
while self.stack1:
self.stack2.append(self.stack1.pop())
return self.stack2.pop()
else:
return self.stack2.pop()
s = Solution()
s.push(1)
s.push(2)
s.push(3)
print(s.pop())
s.push(4)
print(s.pop())
print(s.pop())
s.push(5)
print(s.pop())
print(s.pop())
|
py | b402c0de0360d3056617fe3792b26bf5c32b3e4b | import sys
from sqlalchemy import Column, Integer, String, Float, ForeignKey
from sqlalchemy.orm import relationship
from setting import Base
from setting import ENGINE
class Tanuki(Base):
__tablename__="Tanuki"
id=Column(Integer, primary_key=True)
name=Column(String(255))
age=Column(Integer)
type=Column(String(255))
def main(args):
Base.metadata.create_all(bind=ENGINE)
if __name__ == "__main__":
main(sys.argv)
|
py | b402c0f3afbef4b8fb8a6156216aedc94ff23f90 | import unittest
import sys
import os
import numpy as np
import pandas as pd
import datetime
import transparentai.utils as utils
class TestUtils(unittest.TestCase):
def test_is_array_like(self):
self.assertFalse(utils.is_array_like({'test':0}))
self.assertFalse(utils.is_array_like(0))
self.assertFalse(utils.is_array_like('sqdfd'))
self.assertTrue(utils.is_array_like([1,2,3]))
self.assertTrue(utils.is_array_like(np.array([1,2,3])))
self.assertTrue(utils.is_array_like(pd.Series([1,2,3])))
self.assertTrue(utils.is_array_like(pd.DataFrame([1,2,3])))
self.assertFalse(utils.is_array_like([[1,2],[2,3],[3,4]]))
self.assertFalse(utils.is_array_like(np.array([[1,2],[2,3],[3,4]])))
self.assertFalse(utils.is_array_like(pd.Series([[1,2],[2,3],[3,4]])))
self.assertFalse(utils.is_array_like(pd.DataFrame([[1,2],[2,3],[3,4]])))
def test_find_dtype(self):
self.assertRaises(TypeError, utils.find_dtype)
self.assertEqual(utils.find_dtype([1,2]),'number')
self.assertEqual(utils.find_dtype(['1','2']),'object')
self.assertEqual(utils.find_dtype([datetime.datetime(1958,5,12),datetime.datetime(1980,12,12)]),'datetime')
self.assertEqual(utils.find_dtype(['blabla','2']),'object')
self.assertEqual(utils.find_dtype(pd.DataFrame([1,2])),'number')
self.assertEqual(utils.find_dtype(pd.Series(['1','2'])),'object')
self.assertEqual(utils.find_dtype(pd.Series([datetime.datetime(1958,5,12),datetime.datetime(1980,12,12)])),'datetime')
self.assertEqual(utils.find_dtype(pd.DataFrame(['blabla','2'])),'object')
if __name__ == '__main__':
unittest.main()
|
py | b402c1770b5a1e0e141fffe2a1e7cc2861aa2c08 | # -*- coding:utf-8 -*-
import postParser
class pusher(postParser.parser):
def __init__(self,baseUrl,seeLZ):
postParser.parser.__init__(self,baseUrl,seeLZ)
self.children=None
self.postId=None
self.title=""
self.mainContent=""
self.questionMode=False
self.answering=None
def getPostTitle(self,t):
self.title=t
############################
#sendpost begin
#创建根贴,返回id
def createRoot(self,title,content):
pass
#创建分支,返回分支id
def createFork(self,id,title,content,jump):
pass
#sendpost end
############################
#表示获取到了新的帖子
def getNewPost(self):
self.answering=None
self.questionMode=False
if self.postId==None:
if len(self.mainContent)!=0 :
rid=self.createRoot(self.title,self.mainContent)
self.children=None
self.postId=rid
self.mainContent=""
#内容,行号
def contentAppend(self,raw,num):
if self.answering==None:
self.mainContent+=raw
else:
if self.children!=None :
if (type(self.children[self.answering]).__name__ == 'dict') :
self.children[self.answering]["content"]+="\n"+raw
else:
self.mainContent+=raw
else:
self.mainContent+=raw
def finished(self):
#在此贴中已经执行过提交
if self.questionMode :
return
if self.children :
rid=0
tmpid=0
for item in self.children:
if item["resuming"] :
if rid==0 :
rid=self.createFork(self.postId , item["title"] , item["content"]+"\n"+self.mainContent , None)
else:
self.createFork(self.postId , item["title"] , item["content"] , rid)
else:
tmpid=self.createFork(self.postId , item["title"] , item["content"] , None)
self.mainContent=""
if rid!=0 :
self.postId=rid
elif tmpid!=0:
self.postId=tmpid
#如果两个都为0,说明出错了
#清空children
self.children={}
#内容,行号
def questionAppend(self,raw,num):
self.finished()
self.questionMode=True
self.children[num]={"title":raw , "content":raw , "resuming":False}
#内容,行号,回答行号,是否继续
def answerAppend(self,raw,num,ansnum,resuming):
#修改状态机
self.answering=ansnum
if self.children!=None :
if (type(self.children[ansnum]).__name__ == 'dict') :
self.children[ansnum]["content"]+="\n"+raw
self.children[ansnum]["resuming"]=resuming
|
py | b402c23e29964df75ae5ffebd42f616aa115a16e | """The tests for the Legacy Mqtt vacuum platform."""
from copy import deepcopy
import json
import pytest
from homeassistant.components import vacuum
from homeassistant.components.mqtt import CONF_COMMAND_TOPIC
from homeassistant.components.mqtt.vacuum import schema_legacy as mqttvacuum
from homeassistant.components.mqtt.vacuum.schema import services_to_strings
from homeassistant.components.mqtt.vacuum.schema_legacy import (
ALL_SERVICES,
SERVICE_TO_STRING,
)
from homeassistant.components.vacuum import (
ATTR_BATTERY_ICON,
ATTR_BATTERY_LEVEL,
ATTR_FAN_SPEED,
ATTR_FAN_SPEED_LIST,
ATTR_STATUS,
)
from homeassistant.const import CONF_NAME, CONF_PLATFORM, STATE_OFF, STATE_ON
from homeassistant.setup import async_setup_component
from .test_common import (
help_test_availability_without_topic,
help_test_custom_availability_payload,
help_test_default_availability_payload,
help_test_discovery_broken,
help_test_discovery_removal,
help_test_discovery_update,
help_test_discovery_update_attr,
help_test_entity_debug_info_message,
help_test_entity_device_info_remove,
help_test_entity_device_info_update,
help_test_entity_device_info_with_connection,
help_test_entity_device_info_with_identifier,
help_test_entity_id_update_discovery_update,
help_test_entity_id_update_subscriptions,
help_test_setting_attribute_via_mqtt_json_message,
help_test_setting_attribute_with_template,
help_test_unique_id,
help_test_update_with_json_attrs_bad_JSON,
help_test_update_with_json_attrs_not_dict,
)
from tests.common import async_fire_mqtt_message
from tests.components.vacuum import common
DEFAULT_CONFIG = {
CONF_PLATFORM: "mqtt",
CONF_NAME: "mqtttest",
CONF_COMMAND_TOPIC: "vacuum/command",
mqttvacuum.CONF_SEND_COMMAND_TOPIC: "vacuum/send_command",
mqttvacuum.CONF_BATTERY_LEVEL_TOPIC: "vacuum/state",
mqttvacuum.CONF_BATTERY_LEVEL_TEMPLATE: "{{ value_json.battery_level }}",
mqttvacuum.CONF_CHARGING_TOPIC: "vacuum/state",
mqttvacuum.CONF_CHARGING_TEMPLATE: "{{ value_json.charging }}",
mqttvacuum.CONF_CLEANING_TOPIC: "vacuum/state",
mqttvacuum.CONF_CLEANING_TEMPLATE: "{{ value_json.cleaning }}",
mqttvacuum.CONF_DOCKED_TOPIC: "vacuum/state",
mqttvacuum.CONF_DOCKED_TEMPLATE: "{{ value_json.docked }}",
mqttvacuum.CONF_ERROR_TOPIC: "vacuum/state",
mqttvacuum.CONF_ERROR_TEMPLATE: "{{ value_json.error }}",
mqttvacuum.CONF_FAN_SPEED_TOPIC: "vacuum/state",
mqttvacuum.CONF_FAN_SPEED_TEMPLATE: "{{ value_json.fan_speed }}",
mqttvacuum.CONF_SET_FAN_SPEED_TOPIC: "vacuum/set_fan_speed",
mqttvacuum.CONF_FAN_SPEED_LIST: ["min", "medium", "high", "max"],
}
DEFAULT_CONFIG_2 = {vacuum.DOMAIN: {"platform": "mqtt", "name": "test"}}
async def test_default_supported_features(hass, mqtt_mock):
"""Test that the correct supported features."""
assert await async_setup_component(
hass, vacuum.DOMAIN, {vacuum.DOMAIN: DEFAULT_CONFIG}
)
entity = hass.states.get("vacuum.mqtttest")
entity_features = entity.attributes.get(mqttvacuum.CONF_SUPPORTED_FEATURES, 0)
assert sorted(services_to_strings(entity_features, SERVICE_TO_STRING)) == sorted(
[
"turn_on",
"turn_off",
"stop",
"return_home",
"battery",
"status",
"clean_spot",
]
)
async def test_all_commands(hass, mqtt_mock):
"""Test simple commands to the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
await common.async_turn_on(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/command", "turn_on", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_turn_off(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/command", "turn_off", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_stop(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with("vacuum/command", "stop", 0, False)
mqtt_mock.async_publish.reset_mock()
await common.async_clean_spot(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/command", "clean_spot", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_locate(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/command", "locate", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_start_pause(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/command", "start_pause", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_return_to_base(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/command", "return_to_base", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_set_fan_speed(hass, "high", "vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/set_fan_speed", "high", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_send_command(hass, "44 FE 93", entity_id="vacuum.mqtttest")
mqtt_mock.async_publish.assert_called_once_with(
"vacuum/send_command", "44 FE 93", 0, False
)
mqtt_mock.async_publish.reset_mock()
await common.async_send_command(
hass, "44 FE 93", {"key": "value"}, entity_id="vacuum.mqtttest"
)
assert json.loads(mqtt_mock.async_publish.mock_calls[-1][1][1]) == {
"command": "44 FE 93",
"key": "value",
}
await common.async_send_command(
hass, "44 FE 93", {"key": "value"}, entity_id="vacuum.mqtttest"
)
assert json.loads(mqtt_mock.async_publish.mock_calls[-1][1][1]) == {
"command": "44 FE 93",
"key": "value",
}
async def test_commands_without_supported_features(hass, mqtt_mock):
"""Test commands which are not supported by the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
services = mqttvacuum.STRING_TO_SERVICE["status"]
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
services, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
await common.async_turn_on(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_turn_off(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_stop(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_clean_spot(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_locate(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_start_pause(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_return_to_base(hass, "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_set_fan_speed(hass, "high", "vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
await common.async_send_command(hass, "44 FE 93", entity_id="vacuum.mqtttest")
mqtt_mock.async_publish.assert_not_called()
mqtt_mock.async_publish.reset_mock()
async def test_attributes_without_supported_features(hass, mqtt_mock):
"""Test attributes which are not supported by the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
services = mqttvacuum.STRING_TO_SERVICE["turn_on"]
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
services, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"battery_level": 54,
"cleaning": true,
"docked": false,
"charging": false,
"fan_speed": "max"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BATTERY_LEVEL) is None
assert state.attributes.get(ATTR_BATTERY_ICON) is None
assert state.attributes.get(ATTR_FAN_SPEED) is None
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
async def test_status(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"battery_level": 54,
"cleaning": true,
"docked": false,
"charging": false,
"fan_speed": "max"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_ON
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50"
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54
assert state.attributes.get(ATTR_FAN_SPEED) == "max"
message = """{
"battery_level": 61,
"docked": true,
"cleaning": false,
"charging": true,
"fan_speed": "min"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_OFF
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-charging-60"
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 61
assert state.attributes.get(ATTR_FAN_SPEED) == "min"
async def test_status_battery(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"battery_level": 54
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50"
async def test_status_cleaning(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"cleaning": true
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_ON
async def test_status_docked(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"docked": true
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_OFF
async def test_status_charging(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"charging": true
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-outline"
async def test_status_fan_speed(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"fan_speed": "max"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_FAN_SPEED) == "max"
async def test_status_fan_speed_list(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_FAN_SPEED_LIST) == ["min", "medium", "high", "max"]
async def test_status_no_fan_speed_list(hass, mqtt_mock):
"""Test status updates from the vacuum.
If the vacuum doesn't support fan speed, fan speed list should be None.
"""
config = deepcopy(DEFAULT_CONFIG)
services = ALL_SERVICES - mqttvacuum.SUPPORT_FAN_SPEED
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
services, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_FAN_SPEED_LIST) is None
async def test_status_error(hass, mqtt_mock):
"""Test status updates from the vacuum."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
message = """{
"error": "Error1"
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_STATUS) == "Error: Error1"
message = """{
"error": ""
}"""
async_fire_mqtt_message(hass, "vacuum/state", message)
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_STATUS) == "Stopped"
async def test_battery_template(hass, mqtt_mock):
"""Test that you can use non-default templates for battery_level."""
config = deepcopy(DEFAULT_CONFIG)
config.update(
{
mqttvacuum.CONF_SUPPORTED_FEATURES: services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
),
mqttvacuum.CONF_BATTERY_LEVEL_TOPIC: "retroroomba/battery_level",
mqttvacuum.CONF_BATTERY_LEVEL_TEMPLATE: "{{ value }}",
}
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
async_fire_mqtt_message(hass, "retroroomba/battery_level", "54")
state = hass.states.get("vacuum.mqtttest")
assert state.attributes.get(ATTR_BATTERY_LEVEL) == 54
assert state.attributes.get(ATTR_BATTERY_ICON) == "mdi:battery-50"
async def test_status_invalid_json(hass, mqtt_mock):
"""Test to make sure nothing breaks if the vacuum sends bad JSON."""
config = deepcopy(DEFAULT_CONFIG)
config[mqttvacuum.CONF_SUPPORTED_FEATURES] = services_to_strings(
ALL_SERVICES, SERVICE_TO_STRING
)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
async_fire_mqtt_message(hass, "vacuum/state", '{"asdfasas false}')
state = hass.states.get("vacuum.mqtttest")
assert state.state == STATE_OFF
assert state.attributes.get(ATTR_STATUS) == "Stopped"
async def test_missing_battery_template(hass, mqtt_mock):
"""Test to make sure missing template is not allowed."""
config = deepcopy(DEFAULT_CONFIG)
config.pop(mqttvacuum.CONF_BATTERY_LEVEL_TEMPLATE)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state is None
async def test_missing_charging_template(hass, mqtt_mock):
"""Test to make sure missing template is not allowed."""
config = deepcopy(DEFAULT_CONFIG)
config.pop(mqttvacuum.CONF_CHARGING_TEMPLATE)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state is None
async def test_missing_cleaning_template(hass, mqtt_mock):
"""Test to make sure missing template is not allowed."""
config = deepcopy(DEFAULT_CONFIG)
config.pop(mqttvacuum.CONF_CLEANING_TEMPLATE)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state is None
async def test_missing_docked_template(hass, mqtt_mock):
"""Test to make sure missing template is not allowed."""
config = deepcopy(DEFAULT_CONFIG)
config.pop(mqttvacuum.CONF_DOCKED_TEMPLATE)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state is None
async def test_missing_error_template(hass, mqtt_mock):
"""Test to make sure missing template is not allowed."""
config = deepcopy(DEFAULT_CONFIG)
config.pop(mqttvacuum.CONF_ERROR_TEMPLATE)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state is None
async def test_missing_fan_speed_template(hass, mqtt_mock):
"""Test to make sure missing template is not allowed."""
config = deepcopy(DEFAULT_CONFIG)
config.pop(mqttvacuum.CONF_FAN_SPEED_TEMPLATE)
assert await async_setup_component(hass, vacuum.DOMAIN, {vacuum.DOMAIN: config})
state = hass.states.get("vacuum.mqtttest")
assert state is None
async def test_availability_without_topic(hass, mqtt_mock):
"""Test availability without defined availability topic."""
await help_test_availability_without_topic(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_default_availability_payload(hass, mqtt_mock):
"""Test availability by default payload with defined topic."""
await help_test_default_availability_payload(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_custom_availability_payload(hass, mqtt_mock):
"""Test availability by custom payload with defined topic."""
await help_test_custom_availability_payload(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_setting_attribute_via_mqtt_json_message(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_via_mqtt_json_message(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_setting_attribute_with_template(hass, mqtt_mock):
"""Test the setting of attribute via MQTT with JSON payload."""
await help_test_setting_attribute_with_template(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_update_with_json_attrs_not_dict(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_not_dict(
hass, mqtt_mock, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_update_with_json_attrs_bad_JSON(hass, mqtt_mock, caplog):
"""Test attributes get extracted from a JSON result."""
await help_test_update_with_json_attrs_bad_JSON(
hass, mqtt_mock, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_discovery_update_attr(hass, mqtt_mock, caplog):
"""Test update of discovered MQTTAttributes."""
await help_test_discovery_update_attr(
hass, mqtt_mock, caplog, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_unique_id(hass, mqtt_mock):
"""Test unique id option only creates one vacuum per unique_id."""
config = {
vacuum.DOMAIN: [
{
"platform": "mqtt",
"name": "Test 1",
"command_topic": "test_topic",
"unique_id": "TOTALLY_UNIQUE",
},
{
"platform": "mqtt",
"name": "Test 2",
"command_topic": "test_topic",
"unique_id": "TOTALLY_UNIQUE",
},
]
}
await help_test_unique_id(hass, vacuum.DOMAIN, config)
async def test_discovery_removal_vacuum(hass, mqtt_mock, caplog):
"""Test removal of discovered vacuum."""
data = json.dumps(DEFAULT_CONFIG_2[vacuum.DOMAIN])
await help_test_discovery_removal(hass, mqtt_mock, caplog, vacuum.DOMAIN, data)
async def test_discovery_update_vacuum(hass, mqtt_mock, caplog):
"""Test update of discovered vacuum."""
data1 = '{ "name": "Beer",' ' "command_topic": "test_topic" }'
data2 = '{ "name": "Milk",' ' "command_topic": "test_topic" }'
await help_test_discovery_update(
hass, mqtt_mock, caplog, vacuum.DOMAIN, data1, data2
)
@pytest.mark.no_fail_on_log_exception
async def test_discovery_broken(hass, mqtt_mock, caplog):
"""Test handling of bad discovery message."""
data1 = '{ "name": "Beer",' ' "command_topic": "test_topic#" }'
data2 = '{ "name": "Milk",' ' "command_topic": "test_topic" }'
await help_test_discovery_broken(
hass, mqtt_mock, caplog, vacuum.DOMAIN, data1, data2
)
async def test_entity_device_info_with_connection(hass, mqtt_mock):
"""Test MQTT vacuum device registry integration."""
await help_test_entity_device_info_with_connection(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_entity_device_info_with_identifier(hass, mqtt_mock):
"""Test MQTT vacuum device registry integration."""
await help_test_entity_device_info_with_identifier(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_entity_device_info_update(hass, mqtt_mock):
"""Test device registry update."""
await help_test_entity_device_info_update(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_entity_device_info_remove(hass, mqtt_mock):
"""Test device registry remove."""
await help_test_entity_device_info_remove(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_entity_id_update_subscriptions(hass, mqtt_mock):
"""Test MQTT subscriptions are managed when entity_id is updated."""
config = {
vacuum.DOMAIN: {
"platform": "mqtt",
"name": "test",
"battery_level_topic": "test-topic",
"battery_level_template": "{{ value_json.battery_level }}",
"command_topic": "command-topic",
"availability_topic": "avty-topic",
}
}
await help_test_entity_id_update_subscriptions(
hass, mqtt_mock, vacuum.DOMAIN, config, ["test-topic", "avty-topic"]
)
async def test_entity_id_update_discovery_update(hass, mqtt_mock):
"""Test MQTT discovery update when entity_id is updated."""
await help_test_entity_id_update_discovery_update(
hass, mqtt_mock, vacuum.DOMAIN, DEFAULT_CONFIG_2
)
async def test_entity_debug_info_message(hass, mqtt_mock):
"""Test MQTT debug info."""
config = {
vacuum.DOMAIN: {
"platform": "mqtt",
"name": "test",
"battery_level_topic": "test-topic",
"battery_level_template": "{{ value_json.battery_level }}",
"command_topic": "command-topic",
"availability_topic": "avty-topic",
}
}
await help_test_entity_debug_info_message(
hass, mqtt_mock, vacuum.DOMAIN, config, "test-topic"
)
|
py | b402c3f394d2bf894f9ca6e3b0515094ecc7d84c | import _plotly_utils.basevalidators
class ExponentformatValidator(_plotly_utils.basevalidators.EnumeratedValidator):
def __init__(
self, plotly_name="exponentformat", parent_name="layout.ternary.baxis", **kwargs
):
super(ExponentformatValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "style"),
values=kwargs.pop("values", ["none", "e", "E", "power", "SI", "B"]),
**kwargs
)
|
py | b402c3fb0c588392a99623f6eb8ec830a2b37c28 | ####################################################################################################
# #
# Research Question 1 implementation: evaluation of different undersampling rates, feature sets, #
# and different learners for the prediction of reviewer participation. #
# #
# Example: python3 -u is_reviewer.py #
# #
####################################################################################################
from imblearn.pipeline import Pipeline
from imblearn.under_sampling import RandomUnderSampler
from sklearn.metrics import auc, precision_recall_fscore_support
from sklearn.metrics import accuracy_score, precision_recall_curve, roc_curve
from sklearn.svm import LinearSVC
from sklearn.linear_model import LogisticRegression
from sklearn.ensemble import RandomForestClassifier
from sklearn.preprocessing import StandardScaler
from collections import Counter
import sys
sys.path.append('../lib')
from dataset import Dataset
####################################################################################################
# #
# Configuration for Research Questions 1: #
# - Path to file with all-time dataset in CSV format #
# - Name of training and test sets used #
# - Features used for each of the five executions #
# - Target Name: Target-IsReviewer #
# - Evaluated undersampling rates for the majority class (Target-IsReviewer=FALSE) #
# - File where results are written #
# #
# In this Research Question, we evaluate how the prediction for Target-IsReviewer is #
# affected by different sets of features, different undersampling rates and different learners. #
# #
####################################################################################################
DATASET_CSV_PATH = '../raw_dataset.csv'
OUTPUT_CSV_FILE_NAME = 'rq1_is_reviewer_results.csv'
TRAINING_SET_NAME = 'training-Y1'
TEST_SET_NAME = 'test-T1a'
TARGET_COLUMN_NAME = 'Target-IsReviewer'
UNDERSAMPLING_RATES = [0.05, 0.10, 0.15, 0.20, 0.25, 0.50]
EXECUTIONS_FEATURES = {
'LOC': ['F1-ChangedLOC'],
'Baseline': ['F9-FileReviewXp'],
'CO': ['F4-IsMaintainer', 'F5-ModuleReviewXp', 'F6-ModuleModifXp', 'F9-FileReviewXp', 'F10-FileModifXp'],
'WL': ['F11-OngoingAsAuthor', 'F12-OngoingAsReviewer'],
'TS': ['F2-SameTeam', 'F3-SameLocation', 'F7-TeamReviewXp', 'F8-TeamModifXp'],
'CO+TS+WL': ['F2-SameTeam', 'F3-SameLocation', 'F4-IsMaintainer', 'F5-ModuleReviewXp',
'F6-ModuleModifXp', 'F7-TeamReviewXp', 'F8-TeamModifXp', 'F9-FileReviewXp',
'F10-FileModifXp', 'F11-OngoingAsAuthor', 'F12-OngoingAsReviewer'],
'All': ['F1-ChangedLOC', 'F2-SameTeam', 'F3-SameLocation', 'F4-IsMaintainer',
'F5-ModuleReviewXp', 'F6-ModuleModifXp', 'F7-TeamReviewXp', 'F8-TeamModifXp',
'F9-FileReviewXp', 'F10-FileModifXp', 'F11-OngoingAsAuthor', 'F12-OngoingAsReviewer'],
}
####################################################################################################
# #
# Run Research Question 1 tests. #
# Results are written to both terminal and output CSV file. #
# #
####################################################################################################
# Load training and test sets
dataset = Dataset(DATASET_CSV_PATH)
training_df, test_df = dataset.get_training_and_test_by_name(TRAINING_SET_NAME, TEST_SET_NAME)
output = open(OUTPUT_CSV_FILE_NAME, 'w+')
csv_header = "Execution,Sampling,Model,AUPRC,AUROC,Accuracy,F1,Precision,Recall"
output.write(csv_header + "\n")
# For each combination of features...
for execution, features in EXECUTIONS_FEATURES.items():
print('\n\n' + '=' * 100)
print('{}: {}'.format(execution, ', '.join(features)))
# traning and test sets
exec_training_features = training_df.reindex(columns=features)
exec_training_target = training_df[TARGET_COLUMN_NAME]
exec_test_features = test_df.reindex(columns=features)
exec_test_target = test_df[TARGET_COLUMN_NAME]
base_learners = {
'LinearSVC': LinearSVC(random_state=0, loss='squared_hinge', penalty='l1', C=1e-2, max_iter=1000, fit_intercept=True, dual=False, tol=1e-4),
'Random Forest': RandomForestClassifier(random_state=0, n_estimators=300, min_samples_leaf=10, criterion='gini', max_features=None, max_depth=None),
'Logistic Regression': LogisticRegression(random_state=0, penalty='l1', C=1e-3, max_iter=100, solver='saga', fit_intercept=True, dual=False, tol=1e-4),
}
print("{:14s} {:>8s} {:20s} {:>5s} {:>5s} {:>5s} {:>5s} {:>5s} {:>5s}".format(
'Execution', 'Sampling', 'Model', 'AUPRC', 'AUROC', 'ACC', 'F1', 'Prec.', 'Recall'))
# For each undersampling rate...
for undersampling_rate in UNDERSAMPLING_RATES:
# For each base learner...
for learner_name, learner in base_learners.items():
# undersample majority class, which is "False"
counter = Counter(exec_training_target)
true_count = counter[True]
false_count = int(counter[False] * undersampling_rate)
undersampler = RandomUnderSampler(
sampling_strategy={True: true_count, False: false_count})
# build pipeline
steps = [('under', undersampler), ('scale', StandardScaler()), ('learner', learner)]
pipeline = Pipeline(steps=steps)
pipeline.fit(exec_training_features, exec_training_target)
# prediction
predicted = pipeline.predict(exec_test_features)
# evaluation
acc = accuracy_score(exec_test_target, predicted)
precision, recall, f1, _ = precision_recall_fscore_support(
exec_test_target, predicted, average='binary', zero_division=0)
if hasattr(pipeline, "predict_proba"):
false_positive_rate, true_positive_rate, _ = roc_curve(
exec_test_target, pipeline.predict_proba(exec_test_features)[:, 1])
else:
false_positive_rate, true_positive_rate, _ = roc_curve(
exec_test_target, pipeline['learner']._predict_proba_lr(exec_test_features)[:, 1])
auroc = auc(false_positive_rate, true_positive_rate)
# precision-recall AUC
if precision == 0.0 and recall == 0.0 and f1 == 0.0:
f1 = 'ND'
auprc = 'ND'
else:
precision_, recall_, _ = precision_recall_curve(exec_test_target, predicted)
f1 = '{:.3f}'.format(f1)
auprc = '{:.3f}'.format(auc(precision_, recall_))
output.write("{},{},{},{},{:.3f},{:.3f},{},{:.3f},{:.3f}\n".format(
execution, str(undersampling_rate*100) + "%", learner_name, auprc, auroc, acc,
f1, precision, recall))
print("{:14s} {:>8s} {:20s} {:>5} {:>.3f} {:>.3f} {:>5} {:>.3f} {:>.3f}".format(
execution, str(undersampling_rate*100) + "%", learner_name, auprc, auroc, acc,
f1, precision, recall))
output.flush()
output.close()
print("\n\nOutput written to: {}".format(OUTPUT_CSV_FILE_NAME))
|
py | b402c5c01b3d07030ca9f703587a0f14b937ac23 | # -*- coding: utf-8 -*-
# Licensed under the Apache License: http://www.apache.org/licenses/LICENSE-2.0
# For details: https://github.com/nedbat/coveragepy/blob/master/NOTICE.txt
#
# coverage.py documentation build configuration file, created by
# sphinx-quickstart on Wed May 13 22:18:33 2009.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.append(os.path.abspath('.'))
# on_rtd is whether we are on readthedocs.org
on_rtd = os.environ.get('READTHEDOCS', None) == 'True'
# -- General configuration -----------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.todo',
'sphinx.ext.ifconfig',
'sphinxcontrib.spelling',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Coverage.py'
copyright = u'2009\N{EN DASH}2019, Ned Batchelder.' # CHANGEME
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '5.0' # CHANGEME
# The full version, including alpha/beta/rc tags.
release = '5.0a5' # CHANGEME
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of documents that shouldn't be included in the build.
#unused_docs = []
# List of directories, relative to source directory, that shouldn't be searched
# for source files.
exclude_trees = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. Major themes that come with
# Sphinx are currently 'default' and 'sphinxdoc'.
#html_theme = 'default'
if not on_rtd: # only import and set the theme if we're building docs locally
import sphinx_rtd_theme
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
# otherwise, readthedocs.org uses their theme by default, so no need to specify it
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
#html_style = "neds.css"
#html_add_permalinks = ""
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_templates']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
html_use_modindex = False
# If false, no index is generated.
html_use_index = False
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
html_show_sourcelink = False
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# If nonempty, this is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = '.htm'
# Output file base name for HTML help builder.
htmlhelp_basename = 'coveragepydoc'
# -- Spelling ---
spelling_word_list_filename = 'dict.txt'
spelling_show_suggestions = False
# When auto-doc'ing a class, write the class' docstring and the __init__ docstring
# into the class docs.
autoclass_content = "class"
prerelease = bool(max(release).isalpha())
def setup(app):
app.add_stylesheet('coverage.css')
app.add_config_value('prerelease', False, 'env')
print("** Prerelease = %r" % prerelease)
|
py | b402c6785e97fa3b3a841b0c8b14616b65801424 | """Modules in this directory provide a "functional" API for rule writing.
Wikipedia defines functional programming (https://en.wikipedia.org/wiki/Functional_programming)
as a declarative programming paradigm where code is built by applying and composing functions.
The modules in this API provide classes and predicates for working with segments
and slices. The API is loosely inspired by packages such as Pandas and Numpy.
These classes provide a simpler, higher-level API for writing rules, resulting
in shorter, simpler, easier-to-read code. Rules can use these classes, the
lower-level classes, or a mix, but it is suggested that each rule primarily
use one or the other for readability.
"""
__all__ = ("Segments", "rsp", "sp", "tsp")
from sqlfluff.core.rules.functional.segments import Segments
import sqlfluff.core.rules.functional.raw_file_slice_predicates as rsp
import sqlfluff.core.rules.functional.templated_file_slice_predicates as tsp
import sqlfluff.core.rules.functional.segment_predicates as sp
|
py | b402c6b8e2fb3525618e1485bbd3ad6d02de586a | import unittest
import random
from selfdrive.can.tests.packer_old import CANPacker as CANPackerOld
from selfdrive.can.packer import CANPacker
import selfdrive.car.hyundai.hyundaican as hyundaican
from selfdrive.car.hyundai.values import CHECKSUM as hyundai_checksum
class TestPackerMethods(unittest.TestCase):
def setUp(self):
self.hyundai_cp_old = CANPackerOld("hyundai_kia_generic")
self.hyundai_cp = CANPacker("hyundai_kia_generic")
def test_correctness(self):
# Test all commands, randomize the params.
for _ in xrange(1000):
# Hyundai
car_fingerprint = hyundai_checksum["crc8"][0]
apply_steer = (random.randint(0, 2) % 2 == 0)
steer_req = (random.randint(0, 2) % 2 == 0)
cnt = random.randint(0, 65536)
enabled = (random.randint(0, 2) % 2 == 0)
lkas11 = {
"CF_Lkas_LdwsSysState": random.randint(0,65536),
"CF_Lkas_SysWarning": random.randint(0,65536),
"CF_Lkas_LdwsLHWarning": random.randint(0,65536),
"CF_Lkas_LdwsRHWarning": random.randint(0,65536),
"CF_Lkas_HbaLamp": random.randint(0,65536),
"CF_Lkas_FcwBasReq": random.randint(0,65536),
"CF_Lkas_ToiFlt": random.randint(0,65536),
"CF_Lkas_HbaSysState": random.randint(0,65536),
"CF_Lkas_FcwOpt": random.randint(0,65536),
"CF_Lkas_HbaOpt": random.randint(0,65536),
"CF_Lkas_FcwSysState": random.randint(0,65536),
"CF_Lkas_FcwCollisionWarning": random.randint(0,65536),
"CF_Lkas_FusionState": random.randint(0,65536),
"CF_Lkas_FcwOpt_USM": random.randint(0,65536),
"CF_Lkas_LdwsOpt_USM": random.randint(0,65536)
}
hud_alert = random.randint(0, 65536)
keep_stock = (random.randint(0, 2) % 2 == 0)
m_old = hyundaican.create_lkas11(self.hyundai_cp_old, car_fingerprint, apply_steer, steer_req, cnt, enabled,
lkas11, hud_alert, keep_stock)
m = hyundaican.create_lkas11(self.hyundai_cp, car_fingerprint, apply_steer, steer_req, cnt, enabled,
lkas11, hud_alert, keep_stock)
self.assertEqual(m_old, m)
clu11 = {
"CF_Clu_CruiseSwState": random.randint(0,65536),
"CF_Clu_CruiseSwMain": random.randint(0,65536),
"CF_Clu_SldMainSW": random.randint(0,65536),
"CF_Clu_ParityBit1": random.randint(0,65536),
"CF_Clu_VanzDecimal": random.randint(0,65536),
"CF_Clu_Vanz": random.randint(0,65536),
"CF_Clu_SPEED_UNIT": random.randint(0,65536),
"CF_Clu_DetentOut": random.randint(0,65536),
"CF_Clu_RheostatLevel": random.randint(0,65536),
"CF_Clu_CluInfo": random.randint(0,65536),
"CF_Clu_AmpInfo": random.randint(0,65536),
"CF_Clu_AliveCnt1": random.randint(0,65536),
}
button = random.randint(0, 65536)
m_old = hyundaican.create_clu11(self.hyundai_cp_old, clu11, button, cnt)
m = hyundaican.create_clu11(self.hyundai_cp, clu11, button, cnt)
self.assertEqual(m_old, m)
if __name__ == "__main__":
unittest.main()
|
py | b402c7a102fd866a4d38f1871256887cbd40e3d2 | #TODO 1. Create a dictionary in this format:
#TODO 2. Create a list of the phonetic code words from a word that the user inputs.
import pandas as pd
# Importing data_frame
data_frame = pd.read_csv("nato_phonetic_alphabet.csv")
# Creating a dictionary of the letters and each corresponding code
nato_dict = {row.letter: row.code for (index, row) in data_frame.iterrows()}
# Creating a list of the phonetic code words from a word that the user inputs.
is_letter = True
while is_letter:
try:
user_answer = input("Enter a word: ").upper()
nato_list = [nato_dict[letter] for letter in user_answer]
is_letter = False
except:
print("Sorry! Only letter in the alphabet please")
print(nato_list) |
py | b402c8d248f72ba2539c2a44c84d7d4aa6484c12 | """
Copyright (c) 2019 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from scipy.ndimage import interpolation
from ..config import ConfigError, BaseField, NumberField, ListField, StringField
from ..preprocessor import Preprocessor
from ..utils import get_or_parse_value
class Resize3D(Preprocessor):
__provider__ = 'resize3d'
@classmethod
def parameters(cls):
parameters = super().parameters()
parameters.update({
'size': BaseField(optional=True, description='Specifies resize'),
})
return parameters
def configure(self):
self.shape = self._check_size(
get_or_parse_value(self.config.get('size'), default=(128, 128, 128), casting_type=int))
def process(self, image, annotation_meta=None):
data = np.asarray(image.data)
shape = self.shape if len(data.shape) == 3 else (data.shape[0],) + self.shape
if len(data.shape) != len(shape):
raise RuntimeError('Shape of original data and resize shape are mismatched for {} preprocessor '
'(data shape - {}, resize shape - {})'.format(self.__provider__, data.shape, shape))
factor = [float(o) / i for i, o in zip(data.shape, shape)]
image.data = interpolation.zoom(data, zoom=factor, order=1)
return image
def _check_size(self, size):
if len(size) != 3:
raise ConfigError("Incorrect size dimenstion for {} - must be 3, but {} found"
.format(self.__provider__, len(size)))
if not all(np.array(size) > 0):
raise ConfigError("Size must be positive value for {}, but {} found".format(self.__provider__, size))
return size
class CropBraTS(Preprocessor):
__provider__ = 'crop_brats'
def process(self, image, annotation_meta=None):
def bbox3(img):
# Finds indexes non-zero voxels across axis 0, 1 and 2 correspondenly
nonzero_across_axis_0 = np.any(img, axis=(1, 2)).nonzero()
nonzero_across_axis_1 = np.any(img, axis=(0, 2)).nonzero()
nonzero_across_axis_2 = np.any(img, axis=(0, 1)).nonzero()
nonzero_across_axis_0 = nonzero_across_axis_0[0]
nonzero_across_axis_1 = nonzero_across_axis_1[0]
nonzero_across_axis_2 = nonzero_across_axis_2[0]
# If any axis contains only zero voxels than image is blank
bbox = np.array([[-1, -1, -1], [0, 0, 0]])
if nonzero_across_axis_0.size == 0:
return bbox
bbox[:, 0] = nonzero_across_axis_0[[0, -1]]
bbox[:, 1] = nonzero_across_axis_1[[0, -1]]
bbox[:, 2] = nonzero_across_axis_2[[0, -1]]
return bbox
bboxes = np.zeros((image.data.shape[0],) + (2, 3))
for i in range(image.data.shape[0]):
bboxes[i] = bbox3(image.data[i, :, :, :])
bbox_min = np.min(bboxes[:, 0, :], axis=0).ravel().astype(int)
bbox_max = np.max(bboxes[:, 1, :], axis=0).ravel().astype(int)
bbox = np.zeros((2, 3), dtype=int)
bbox[0] = bbox_min
bbox[1] = bbox_max
image.data = image.data[:, bbox[0, 0]:bbox[1, 0], bbox[0, 1]:bbox[1, 1], bbox[0, 2]:bbox[1, 2]]
image.metadata['box'] = bbox
return image
class NormalizeBrats(Preprocessor):
__provider__ = "normalize_brats"
_MASK_OPTIONS = {
'none': 0,
'nullify': 1,
'ignore': 2,
'all': 3,
}
@classmethod
def parameters(cls):
parameters = super().parameters()
parameters.update({
'masked': StringField(optional=True, choices=NormalizeBrats._MASK_OPTIONS.keys(),
default=False,
description='Does not apply normalization to zero values. '
'Applicable for brain tumor segmentation models'),
'cutoff': NumberField(optional=True, default=0, min_value=0,
description='Species range of values - [-cutoff, cutoff]'),
'shift_value': NumberField(optional=True, default=0, description='Specifies shift value'),
'normalize_value': NumberField(optional=True, default=1, description='Specifies normalize value')
})
return parameters
def configure(self):
self.masked = NormalizeBrats._MASK_OPTIONS[self.get_value_from_config('masked')]
self.cutoff = self.get_value_from_config('cutoff')
self.shift_value = self.get_value_from_config('shift_value')
self.normalize_value = self.get_value_from_config('normalize_value')
def process(self, image, annotation_meta=None):
image.data = self.normalize_img(image.data)
return image
def normalize_img(self, image):
for channel in range(image.shape[0]):
img = image[channel, :, :, :].copy()
if self.masked in (2, 3):
mask = img > 0
image_masked = np.ma.masked_array(img, ~mask)
mean, std = np.mean(image_masked), np.std(image_masked)
else:
mean, std = np.mean(img), np.std(img)
img -= mean
img /= std
if self.cutoff > 0:
img = np.clip(img, -self.cutoff, self.cutoff) # pylint: disable=E1130
img += self.shift_value
img /= self.normalize_value
if self.masked in (1, 3):
mask = image[channel, :, :, :] > 0
img[~mask] = 0
image[channel, :, :, :] = img
return image
class SwapModalitiesBrats(Preprocessor):
__provider__ = 'swap_modalities'
@classmethod
def parameters(cls):
parameters = super().parameters()
parameters.update({
'modality_order': ListField(value_type=NumberField(value_type=int, min_value=0, max_value=3),
validate_values=True,
description="Specifies order of modality according to model input")
})
return parameters
def configure(self):
self.modal_order = self.get_value_from_config('modality_order')
if len(self.modal_order) != 4:
raise ConfigError('{} supports only 4 modality, but found {}'
.format(self.__provider__, len(self.modal_order)))
if len(self.modal_order) != len(set(self.modal_order)):
raise ConfigError('Incorrect modality index found in {} for {}. Indexes must be unique'
.format(self.modal_order, self.__provider__))
def process(self, image, annotation_meta=None):
image.data = self.swap_modalities(image.data)
return image
def swap_modalities(self, image):
image = image[self.modal_order, :, :, :]
return image
|
py | b402c8e65961ff4dab35df860e0f0280f29beae7 | from threading import local
from attrdict import AttrDict
from uuid import uuid4
from contextlib import ContextDecorator
from django.conf import settings as django_settings
from security.config import settings
class SecurityLogger(ContextDecorator, local):
loggers = []
name = None
store = True
def __init__(self, id=None, related_objects=None, slug=None, data=None, extra_data=None):
self.id = id or (uuid4() if self.name else None)
self.parent = SecurityLogger.loggers[-1] if SecurityLogger.loggers else None
self.related_objects = set(related_objects) if related_objects else set()
self.slug = slug
if self.parent:
self.related_objects |= self.parent.related_objects
if not self.slug:
self.slug = self.parent.slug
self.data = {}
if data:
self.data.update(data)
self.parent_with_id = self._get_parent_with_id()
self.extra_data = extra_data
if self.extra_data is None:
self.extra_data = self.parent.extra_data if self.parent else {}
if self.store:
SecurityLogger.loggers.append(self)
if 'reversion' in django_settings.INSTALLED_APPS:
from reversion.signals import post_revision_commit
post_revision_commit.connect(self._post_revision_commit)
self.backend_logs = AttrDict()
def _get_parent_with_id(self):
parent = self.parent
while parent and not parent.id:
parent = parent.parent
return parent
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
if self.store:
self.close()
def set_slug(self, slug):
self.slug = slug
def add_related_objects(self, *related_objects):
self.related_objects |= set(related_objects)
def update_extra_data(self, data):
self.extra_data.update(data)
def close(self):
if not SecurityLogger.loggers or SecurityLogger.loggers[-1] != self:
raise RuntimeError('Log already finished')
else:
SecurityLogger.loggers.pop()
if 'reversion' in django_settings.INSTALLED_APPS:
from reversion.signals import post_revision_commit
post_revision_commit.disconnect(self._post_revision_commit)
def _post_revision_commit(self, **kwargs):
"""
Called as a post save of revision model of the reversion library.
If log context manager is active input logged request, command
log or celery task run log is joined with revision via related objects.
"""
revision = kwargs['revision']
self.related_objects.add(revision)
|
py | b402c8fc9d1ee66273c510e0679fe79642abe4ae | # coding: utf-8
"""
Account Management API
API for managing accounts, users, creating API keys, uploading trusted certificates
OpenAPI spec version: v3
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class TrustedCertificateRootReq(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'certificate': 'str',
'description': 'str',
'enrollment_mode': 'bool',
'name': 'str',
'service': 'str',
'signature': 'str',
'status': 'str'
}
attribute_map = {
'certificate': 'certificate',
'description': 'description',
'enrollment_mode': 'enrollment_mode',
'name': 'name',
'service': 'service',
'signature': 'signature',
'status': 'status'
}
def __init__(self, certificate=None, description=None, enrollment_mode=None, name=None, service=None, signature=None, status=None):
"""
TrustedCertificateRootReq - a model defined in Swagger
"""
self._certificate = certificate
self._description = description
self._enrollment_mode = enrollment_mode
self._name = name
self._service = service
self._signature = signature
self._status = status
self.discriminator = None
@property
def certificate(self):
"""
Gets the certificate of this TrustedCertificateRootReq.
A chain of X509.v3 trusted certificates in PEM format. The chain must contain all certificates from root to leaf. Otherwise, the signature parameter is required.
:return: The certificate of this TrustedCertificateRootReq.
:rtype: str
"""
return self._certificate
@certificate.setter
def certificate(self, certificate):
"""
Sets the certificate of this TrustedCertificateRootReq.
A chain of X509.v3 trusted certificates in PEM format. The chain must contain all certificates from root to leaf. Otherwise, the signature parameter is required.
:param certificate: The certificate of this TrustedCertificateRootReq.
:type: str
"""
if certificate is None:
raise ValueError("Invalid value for `certificate`, must not be `None`")
self._certificate = certificate
@property
def description(self):
"""
Gets the description of this TrustedCertificateRootReq.
Human readable description of this certificate, not longer than 500 characters.
:return: The description of this TrustedCertificateRootReq.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this TrustedCertificateRootReq.
Human readable description of this certificate, not longer than 500 characters.
:param description: The description of this TrustedCertificateRootReq.
:type: str
"""
self._description = description
@property
def enrollment_mode(self):
"""
Gets the enrollment_mode of this TrustedCertificateRootReq.
Certificate is used in enrollment mode. Default value is false.
:return: The enrollment_mode of this TrustedCertificateRootReq.
:rtype: bool
"""
return self._enrollment_mode
@enrollment_mode.setter
def enrollment_mode(self, enrollment_mode):
"""
Sets the enrollment_mode of this TrustedCertificateRootReq.
Certificate is used in enrollment mode. Default value is false.
:param enrollment_mode: The enrollment_mode of this TrustedCertificateRootReq.
:type: bool
"""
self._enrollment_mode = enrollment_mode
@property
def name(self):
"""
Gets the name of this TrustedCertificateRootReq.
Certificate name, not longer than 100 characters.
:return: The name of this TrustedCertificateRootReq.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this TrustedCertificateRootReq.
Certificate name, not longer than 100 characters.
:param name: The name of this TrustedCertificateRootReq.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._name = name
@property
def service(self):
"""
Gets the service of this TrustedCertificateRootReq.
Service name where the certificate must be used.
:return: The service of this TrustedCertificateRootReq.
:rtype: str
"""
return self._service
@service.setter
def service(self, service):
"""
Sets the service of this TrustedCertificateRootReq.
Service name where the certificate must be used.
:param service: The service of this TrustedCertificateRootReq.
:type: str
"""
if service is None:
raise ValueError("Invalid value for `service`, must not be `None`")
allowed_values = ["lwm2m", "bootstrap"]
if service not in allowed_values:
raise ValueError(
"Invalid value for `service` ({0}), must be one of {1}"
.format(service, allowed_values)
)
self._service = service
@property
def signature(self):
"""
Gets the signature of this TrustedCertificateRootReq.
DEPRECATED: Base64 encoded signature of the account ID signed by the certificate to be uploaded. The signature must be hashed with SHA256.
:return: The signature of this TrustedCertificateRootReq.
:rtype: str
"""
return self._signature
@signature.setter
def signature(self, signature):
"""
Sets the signature of this TrustedCertificateRootReq.
DEPRECATED: Base64 encoded signature of the account ID signed by the certificate to be uploaded. The signature must be hashed with SHA256.
:param signature: The signature of this TrustedCertificateRootReq.
:type: str
"""
self._signature = signature
@property
def status(self):
"""
Gets the status of this TrustedCertificateRootReq.
Status of the certificate.
:return: The status of this TrustedCertificateRootReq.
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this TrustedCertificateRootReq.
Status of the certificate.
:param status: The status of this TrustedCertificateRootReq.
:type: str
"""
allowed_values = ["ACTIVE", "INACTIVE"]
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}"
.format(status, allowed_values)
)
self._status = status
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, TrustedCertificateRootReq):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
py | b402c9a3c42c9608911d51cfd5d8c523d17f687c | import os
import numpy as np
import tensorflow as tf
from tic_tac_toe.agent import TicTacToeAgent
from tic_tac_toe.environment import EpisodeStateCode, TickTacToeEnvironment
class AgentData:
def __init__(self, id, model, n_wins=0, last_action=None, last_observation=None):
self.id = id
self.model = model
self.last_action = last_action
self.last_observation = last_observation
self.n_wins = n_wins
class TicTacToeAgentTrainer:
def __init__(self, sess, env, agent1_model, agent2_model, weights_folder_path,
win_reward=5,
lose_reward=-5,
draw_reward=-1,
step_reward=0,
epsilon_exploration=1.0,
epsilon_minimum=0.05,
epsilon_decay=0.000001):
self.sess = sess
self.env = env
self.agent1_data = AgentData(id=1, model=agent1_model)
self.agent2_data = AgentData(id=2, model=agent2_model)
self.unique_games = []
self.win_reward = win_reward
self.lose_reward = lose_reward
self.draw_reward = draw_reward
self.step_reward = step_reward
self._create_agent_sync_op(agent1_model, agent2_model)
self.weights_saver = tf.train.Saver(save_relative_paths=True)
self.weights_path = os.path.join(weights_folder_path, 'weights', 'tic_tac_toe.ckpt')
self.save_frequency = 20
self.epsilon_exploration = epsilon_exploration
self.epsilon_minimum = epsilon_minimum
self.epsilon_decay = epsilon_decay
def _create_agent_sync_op(self, agent1, agent2):
self.agent1_to_2_sync = [tf.assign(a2, a1) for a2, a1 in zip(agent2.get_params(), agent1.get_params())]
self.agent2_to_1_sync = [tf.assign(a1, a2) for a1, a2 in zip(agent1.get_params(), agent2.get_params())]
def _sync_agents(self, episode, agent1, agent2):
if episode % 100 == 0:
if agent1.n_wins > agent2.n_wins:
self.sess.run(self.agent1_to_2_sync)
else:
self.sess.run(self.agent2_to_1_sync)
def _render(self):
n_unique_games = len(self.unique_games)
if n_unique_games > 5000 and n_unique_games % 1000 == 0:
self.env.render()
def _print_statistics_and_save_models(self, episode, episode_state_code):
unique_game = True
for game in self.unique_games:
if np.array_equal(game, self.env.game_cells):
unique_game = False
break
if unique_game:
self.unique_games.append(self.env.game_cells)
print('Ep:', episode,
'\tAgent1 wins:', self.agent1_data.n_wins,
'\tAgent2 wins:', self.agent2_data.n_wins,
'\tOutcome:', EpisodeStateCode(episode_state_code).name,
'\tUnique games:', len(self.unique_games),
'\te:', self.epsilon_exploration)
if len(self.unique_games) % self.save_frequency == 0:
self.weights_saver.save(self.sess, self.weights_path)
print('Models saved')
def _store_win_memory(self, current_agent, competitor_agent, current_state, next_state, episode_state_code):
if episode_state_code == EpisodeStateCode.WIN:
current_agent.n_wins += 1
current_agent_reward, competitor_agent_reward = self.win_reward, self.lose_reward
else:
current_agent_reward, competitor_agent_reward = self.draw_reward, self.draw_reward
current_agent.model.memory.remember(current_state,
current_agent.last_action,
current_agent_reward, next_state)
competitor_agent.model.memory.remember(competitor_agent.last_observation,
competitor_agent.last_action,
competitor_agent_reward, next_state)
def train(self, n_episodes, sync_agents=False, render=False):
for episode in range(n_episodes):
self.agent1_data.last_action = None
self.agent2_data.last_action = None
self.agent1_data.last_observation, self.agent2_data.last_observation = None, None
agents = [self.agent1_data, self.agent2_data] if episode % 2 == 0 else [self.agent2_data,
self.agent1_data]
step_counter = 1
current_state = self.env.reset()
while True:
current_agent, competitor_agent = agents[0], agents[1]
if render:
self._render()
current_agent.last_action = current_agent.model.act(current_state, self.epsilon_exploration)
next_state, episode_state_code = self.env.step(current_agent.last_action, current_agent.id,
competitor_agent.id)
if episode_state_code != EpisodeStateCode.IN_PROGRESS:
if render:
self._render()
if sync_agents:
self._sync_agents(episode, current_agent, competitor_agent)
self._store_win_memory(current_agent, competitor_agent,
current_state, next_state, episode_state_code)
self._print_statistics_and_save_models(episode, episode_state_code)
if episode > 1:
current_agent.model.update()
competitor_agent.model.update()
if self.epsilon_exploration > self.epsilon_minimum:
self.epsilon_exploration -= self.epsilon_decay
if episode_state_code != EpisodeStateCode.IN_PROGRESS:
break
if step_counter > 2:
competitor_agent.model.memory.remember(competitor_agent.last_observation,
competitor_agent.last_action,
self.step_reward, next_state)
current_agent.last_observation = current_state
current_state = next_state
agents.reverse()
step_counter += 1
def dqn_agents_train():
env = TickTacToeEnvironment()
with tf.Session() as sess:
agent1 = TicTacToeAgent(env, sess, 'agent1')
agent2 = TicTacToeAgent(env, sess, 'agent2')
sess.run(tf.global_variables_initializer())
weights_folder_path = os.path.dirname(os.path.abspath(__file__))
agent_trainer = TicTacToeAgentTrainer(sess, env, agent1, agent2, weights_folder_path)
agent_trainer.train(1000000, render=True)
if __name__ == "__main__":
dqn_agents_train()
|
py | b402cb6cd66dd457e93aeda4f72e0357083621d2 | import sys
from unittest import TestCase, SkipTest, expectedFailure
from packaging.version import Version
import numpy as np
import pandas as pd
import holoviews as hv
class TestGeo(TestCase):
def setUp(self):
if sys.platform == "win32":
raise SkipTest("Skip geo tests on windows for now")
try:
import xarray as xr
import rasterio # noqa
import geoviews # noqa
import cartopy.crs as ccrs
except:
raise SkipTest('xarray, rasterio, geoviews, or cartopy not available')
import hvplot.xarray # noqa
import hvplot.pandas # noqa
self.da = (xr.open_rasterio(
'https://github.com/mapbox/rasterio/raw/master/tests/data/RGB.byte.tif')
.sel(band=1))
self.crs = ccrs.epsg(self.da.crs.split('epsg:')[1])
def assertCRS(self, plot, proj='utm'):
import cartopy
if Version(cartopy.__version__) < Version('0.20'):
assert plot.crs.proj4_params['proj'] == proj
else:
assert plot.crs.to_dict()['proj'] == proj
def assert_projection(self, plot, proj):
opts = hv.Store.lookup_options('bokeh', plot, 'plot')
assert opts.kwargs['projection'].proj4_params['proj'] == proj
class TestCRSInference(TestGeo):
def setUp(self):
if sys.platform == "win32":
raise SkipTest("Skip CRS inference on Windows")
super().setUp()
def test_plot_with_crs_as_proj_string(self):
plot = self.da.hvplot.image('x', 'y', crs=self.da.crs)
self.assertCRS(plot)
def test_plot_with_geo_as_true_crs_undefined(self):
plot = self.da.hvplot.image('x', 'y', geo=True)
self.assertCRS(plot)
class TestProjections(TestGeo):
def test_plot_with_crs_as_object(self):
plot = self.da.hvplot.image('x', 'y', crs=self.crs)
self.assertCRS(plot)
def test_plot_with_crs_as_attr_str(self):
da = self.da.copy()
da.attrs = {'bar': self.crs}
plot = da.hvplot.image('x', 'y', crs='bar')
self.assertCRS(plot)
def test_plot_with_crs_as_nonexistent_attr_str(self):
# Used to test crs='foo' but this is parsed under-the-hood
# by PROJ (projinfo) which matches a geographic projection named
# 'Amersfoort'
with self.assertRaisesRegex(ValueError, "'name_of_some_invalid_projection' must be"):
self.da.hvplot.image('x', 'y', crs='name_of_some_invalid_projection')
def test_plot_with_geo_as_true_crs_no_crs_on_data_returns_default(self):
da = self.da.copy()
da.attrs = {'bar': self.crs}
plot = da.hvplot.image('x', 'y', geo=True)
self.assertCRS(plot, 'eqc')
def test_plot_with_projection_as_string(self):
da = self.da.copy()
plot = da.hvplot.image('x', 'y', crs=self.crs, projection='Robinson')
self.assert_projection(plot, 'robin')
def test_plot_with_projection_as_string_google_mercator(self):
da = self.da.copy()
plot = da.hvplot.image('x', 'y', crs=self.crs, projection='GOOGLE_MERCATOR')
self.assert_projection(plot, 'merc')
def test_plot_with_projection_as_invalid_string(self):
with self.assertRaisesRegex(ValueError, "Projection must be defined"):
self.da.hvplot.image('x', 'y', projection='foo')
class TestGeoAnnotation(TestCase):
def setUp(self):
try:
import geoviews # noqa
import cartopy.crs as ccrs # noqa
except:
raise SkipTest('geoviews or cartopy not available')
import hvplot.pandas # noqa
self.crs = ccrs.PlateCarree()
self.df = pd.DataFrame(np.random.rand(10, 2), columns=['x', 'y'])
def test_plot_with_coastline(self):
import geoviews as gv
plot = self.df.hvplot.points('x', 'y', geo=True, coastline=True)
self.assertEqual(len(plot), 2)
coastline = plot.get(1)
self.assertIsInstance(coastline, gv.Feature)
def test_plot_with_coastline_sets_geo_by_default(self):
import geoviews as gv
plot = self.df.hvplot.points('x', 'y', coastline=True)
self.assertEqual(len(plot), 2)
coastline = plot.get(1)
self.assertIsInstance(coastline, gv.Feature)
def test_plot_with_coastline_scale(self):
plot = self.df.hvplot.points('x', 'y', geo=True, coastline='10m')
opts = plot.get(1).opts.get('plot')
self.assertEqual(opts.kwargs, {'scale': '10m'})
def test_plot_with_tiles(self):
plot = self.df.hvplot.points('x', 'y', geo=True, tiles=True)
self.assertEqual(len(plot), 2)
self.assertIsInstance(plot.get(0), hv.Tiles)
self.assertIn('openstreetmap', plot.get(0).data)
def test_plot_with_specific_tiles(self):
plot = self.df.hvplot.points('x', 'y', geo=True, tiles='ESRI')
self.assertEqual(len(plot), 2)
self.assertIsInstance(plot.get(0), hv.Tiles)
self.assertIn('ArcGIS', plot.get(0).data)
def test_plot_with_specific_tile_class(self):
plot = self.df.hvplot.points('x', 'y', geo=True, tiles=hv.element.tiles.EsriImagery)
self.assertEqual(len(plot), 2)
self.assertIsInstance(plot.get(0), hv.Tiles)
self.assertIn('ArcGIS', plot.get(0).data)
def test_plot_with_specific_tile_obj(self):
plot = self.df.hvplot.points('x', 'y', geo=True, tiles=hv.element.tiles.EsriImagery())
self.assertEqual(len(plot), 2)
self.assertIsInstance(plot.get(0), hv.Tiles)
self.assertIn('ArcGIS', plot.get(0).data)
def test_plot_with_specific_gv_tile_obj(self):
import geoviews as gv
plot = self.df.hvplot.points('x', 'y', geo=True, tiles=gv.tile_sources.CartoDark)
self.assertEqual(len(plot), 2)
self.assertIsInstance(plot.get(0), gv.element.WMTS)
class TestGeoElements(TestCase):
def setUp(self):
try:
import geoviews # noqa
import cartopy.crs as ccrs # noqa
except:
raise SkipTest('geoviews or cartopy not available')
import hvplot.pandas # noqa
self.crs = ccrs.PlateCarree()
self.df = pd.DataFrame(np.random.rand(10, 2), columns=['x', 'y'])
def test_geo_hexbin(self):
hextiles = self.df.hvplot.hexbin('x', 'y', geo=True)
self.assertEqual(hextiles.crs, self.crs)
def test_geo_points(self):
points = self.df.hvplot.points('x', 'y', geo=True)
self.assertEqual(points.crs, self.crs)
def test_geo_points_color_internally_set_to_dim(self):
altered_df = self.df.copy().assign(red=np.random.choice(['a', 'b'], len(self.df)))
plot = altered_df.hvplot.points('x', 'y', c='red', geo=True)
opts = hv.Store.lookup_options('bokeh', plot, 'style')
self.assertIsInstance(opts.kwargs['color'], hv.dim)
self.assertEqual(opts.kwargs['color'].dimension.name, 'red')
def test_geo_opts(self):
points = self.df.hvplot.points('x', 'y', geo=True)
opts = hv.Store.lookup_options('bokeh', points, 'plot').kwargs
self.assertEqual(opts.get('data_aspect'), 1)
self.assertEqual(opts.get('width'), None)
def test_geo_opts_with_width(self):
points = self.df.hvplot.points('x', 'y', geo=True, width=200)
opts = hv.Store.lookup_options('bokeh', points, 'plot').kwargs
self.assertEqual(opts.get('data_aspect'), 1)
self.assertEqual(opts.get('width'), 200)
self.assertEqual(opts.get('height'), None)
class TestGeoPandas(TestCase):
def setUp(self):
try:
import geopandas as gpd # noqa
import geoviews # noqa
import cartopy.crs as ccrs # noqa
except:
raise SkipTest('geopandas, geoviews, or cartopy not available')
import hvplot.pandas # noqa
geometry = gpd.points_from_xy(
x=[12.45339, 12.44177, 9.51667, 6.13000, 158.14997],
y=[41.90328, 43.93610, 47.13372, 49.61166, 6.91664],
crs='EPSG:4326'
)
names = ['Vatican City', 'San Marino', 'Vaduz', 'Luxembourg', 'Palikir']
self.cities = gpd.GeoDataFrame(dict(name=names), geometry=geometry)
def test_points_hover_cols_is_empty_by_default(self):
points = self.cities.hvplot()
assert points.kdims == ['x', 'y']
assert points.vdims == []
def test_points_hover_cols_does_not_include_geometry_when_all(self):
points = self.cities.hvplot(x='x', y='y', hover_cols='all')
assert points.kdims == ['x', 'y']
assert points.vdims == ['index', 'name']
def test_points_hover_cols_when_all_and_use_columns_is_false(self):
points = self.cities.hvplot(x='x', hover_cols='all', use_index=False)
assert points.kdims == ['x', 'y']
assert points.vdims == ['name']
def test_points_hover_cols_index_in_list(self):
points = self.cities.hvplot(y='y', hover_cols=['index'])
assert points.kdims == ['x', 'y']
assert points.vdims == ['index']
def test_points_hover_cols_with_c_set_to_name(self):
points = self.cities.hvplot(c='name')
assert points.kdims == ['x', 'y']
assert points.vdims == ['name']
opts = hv.Store.lookup_options('bokeh', points, 'style').kwargs
assert opts['color'] == 'name'
@expectedFailure
def test_points_hover_cols_with_by_set_to_name(self):
points = self.cities.hvplot(by='name')
assert points.kdims == ['x', 'y']
assert points.vdims == ['name']
|
py | b402cc6017ad03c27987511bcccaac78af7a2467 | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
import hashlib
from ccxt.base.errors import ExchangeError
from ccxt.base.errors import AuthenticationError
from ccxt.base.errors import PermissionDenied
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import BadSymbol
from ccxt.base.errors import InsufficientFunds
from ccxt.base.errors import InvalidAddress
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import CancelPending
from ccxt.base.errors import NotSupported
from ccxt.base.errors import DDoSProtection
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import OnMaintenance
from ccxt.base.errors import InvalidNonce
from ccxt.base.errors import RequestTimeout
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class bitget(Exchange):
def describe(self):
return self.deep_extend(super(bitget, self).describe(), {
'id': 'bitget',
'name': 'Bitget',
'countries': ['SG'],
'version': 'v3',
'rateLimit': 1000, # up to 3000 requests per 5 minutes ≈ 600 requests per minute ≈ 10 requests per second ≈ 100 ms
'has': {
'CORS': None,
'spot': True,
'margin': False,
'swap': None, # has but unimplemented
'future': None, # has but unimplemented
'option': False,
'cancelOrder': True,
'cancelOrders': True,
'createOrder': True,
'fetchAccounts': True,
'fetchBalance': True,
'fetchBorrowRate': False,
'fetchBorrowRateHistories': False,
'fetchBorrowRateHistory': False,
'fetchBorrowRates': False,
'fetchBorrowRatesPerSymbol': False,
'fetchClosedOrders': True,
'fetchCurrencies': True,
'fetchDeposits': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrderTrades': True,
'fetchPosition': True,
'fetchPositions': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'fetchWithdrawals': True,
},
'timeframes': {
'1m': '1m',
'5m': '5m',
'15m': '15m',
'30m': '30m',
'1h': '1h',
'2h': '2h',
'4h': '4h',
'6h': '6h',
'12h': '12h',
'1d': '1d',
'1w': '1w',
},
'hostname': 'bitget.com',
'urls': {
'logo': 'https://user-images.githubusercontent.com/51840849/88317935-a8a21c80-cd22-11ea-8e2b-4b9fac5975eb.jpg',
'api': {
'data': 'https://api.{hostname}',
'api': 'https://api.{hostname}',
'capi': 'https://capi.{hostname}',
'swap': 'https://capi.{hostname}',
},
'www': 'https://www.bitget.com',
'doc': [
'https://bitgetlimited.github.io/apidoc/en/swap',
'https://bitgetlimited.github.io/apidoc/en/spot',
],
'fees': 'https://www.bitget.cc/zh-CN/rate?tab=1',
'test': {
'rest': 'https://testnet.bitget.com',
},
'referral': 'https://www.bitget.com/expressly?languageType=0&channelCode=ccxt&vipCode=tg9j',
},
'api': {
'data': {
'get': [
'market/history/kline', # Kline data
'market/detail/merged', # Get aggregated ticker
'market/tickers', # Get all trading tickers
'market/allticker', # Get all trading market method 2
'market/depth', # Get Market Depth Data
'market/trade', # Get Trade Detail Data
'market/history/trade', # Get record of trading
'market/detail', # Get Market Detail 24h Volume
'common/symbols', # Query all trading pairs and accuracy supported in the station
'common/currencys', # Query all currencies supported in the station
'common/timestamp', # Query system current time
],
},
'api': {
'get': [
'account/accounts', # Get all accounts of current user(即account_id)。
'accounts/{account_id}/balance', # Get the balance of the specified account
'order/orders', # Query order, deprecated
'order/orders/openOrders',
'order/orders/history',
'order/deposit_withdraw', # Query assets history
],
'post': [
'order/orders/place', # Place order
'order/orders/{order_id}/submitcancel', # Request to cancel an order request
'order/orders/batchcancel', # Bulk order cancellation
'order/orders/{order_id}', # Query an order details
'order/orders/{order_id}/matchresults', # Query the transaction details of an order
'order/matchresults', # Query current order, order history
],
},
'capi': {
'get': [
'market/time',
'market/contracts',
'market/depth',
'market/tickers',
'market/ticker',
'market/trades',
'market/candles',
'market/index',
'market/open_count',
'market/open_interest',
'market/price_limit',
'market/funding_time',
'market/mark_price',
'market/open_count',
'market/historyFundRate',
],
},
'swap': {
'get': [
'account/accounts',
'account/account',
'account/settings',
'position/allPosition',
'position/singlePosition',
'position/holds',
'order/detail',
'order/orders',
'order/fills',
'order/current',
'order/currentPlan', # conditional
'order/history',
'order/historyPlan', # conditional
'trace/closeTrack',
'trace/currentTrack',
'trace/historyTrack',
'trace/summary',
'trace/profitSettleTokenIdGroup',
'trace/profitDateGroupList',
'trace/profitDateList',
'trace/waitProfitDateList',
],
'post': [
'account/leverage',
'account/adjustMargin',
'account/modifyAutoAppendMargin',
'order/placeOrder',
'order/batchOrders',
'order/cancel_order',
'order/cancel_batch_orders',
'order/plan_order',
'order/cancel_plan',
'position/changeHoldModel',
'trace/closeTrackOrder',
],
},
},
'fees': {
'spot': {
'taker': self.parse_number('0.002'),
'maker': self.parse_number('0.002'),
},
'swap': {
'taker': self.parse_number('0.0006'),
'maker': self.parse_number('0.0004'),
},
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'password': True,
},
'exceptions': {
# http error codes
# 400 Bad Request — Invalid request format
# 401 Unauthorized — Invalid API Key
# 403 Forbidden — You do not have access to the requested resource
# 404 Not Found
# 500 Internal Server Error — We had a problem with our server
'exact': {
'1': ExchangeError, # {"code": 1, "message": "System error"}
# undocumented
'failure to get a peer from the ring-balancer': ExchangeNotAvailable, # {"message": "failure to get a peer from the ring-balancer"}
'4010': PermissionDenied, # {"code": 4010, "message": "For the security of your funds, withdrawals are not permitted within 24 hours after changing fund password / mobile number / Google Authenticator settings "}
# common
# '0': ExchangeError, # 200 successful,when the order placement / cancellation / operation is successful
'4001': ExchangeError, # no data received in 30s
'4002': ExchangeError, # Buffer full. cannot write data
# --------------------------------------------------------
'30001': AuthenticationError, # {"code": 30001, "message": 'request header "OK_ACCESS_KEY" cannot be blank'}
'30002': AuthenticationError, # {"code": 30002, "message": 'request header "OK_ACCESS_SIGN" cannot be blank'}
'30003': AuthenticationError, # {"code": 30003, "message": 'request header "OK_ACCESS_TIMESTAMP" cannot be blank'}
'30004': AuthenticationError, # {"code": 30004, "message": 'request header "OK_ACCESS_PASSPHRASE" cannot be blank'}
'30005': InvalidNonce, # {"code": 30005, "message": "invalid OK_ACCESS_TIMESTAMP"}
'30006': AuthenticationError, # {"code": 30006, "message": "invalid OK_ACCESS_KEY"}
'30007': BadRequest, # {"code": 30007, "message": 'invalid Content_Type, please use "application/json" format'}
'30008': RequestTimeout, # {"code": 30008, "message": "timestamp request expired"}
'30009': ExchangeError, # {"code": 30009, "message": "system error"}
'30010': AuthenticationError, # {"code": 30010, "message": "API validation failed"}
'30011': PermissionDenied, # {"code": 30011, "message": "invalid IP"}
'30012': AuthenticationError, # {"code": 30012, "message": "invalid authorization"}
'30013': AuthenticationError, # {"code": 30013, "message": "invalid sign"}
'30014': DDoSProtection, # {"code": 30014, "message": "request too frequent"}
'30015': AuthenticationError, # {"code": 30015, "message": 'request header "OK_ACCESS_PASSPHRASE" incorrect'}
'30016': ExchangeError, # {"code": 30015, "message": "you are using v1 apiKey, please use v1 endpoint. If you would like to use v3 endpoint, please subscribe to v3 apiKey"}
'30017': ExchangeError, # {"code": 30017, "message": "apikey's broker id does not match"}
'30018': ExchangeError, # {"code": 30018, "message": "apikey's domain does not match"}
'30019': ExchangeNotAvailable, # {"code": 30019, "message": "Api is offline or unavailable"}
'30020': BadRequest, # {"code": 30020, "message": "body cannot be blank"}
'30021': BadRequest, # {"code": 30021, "message": "Json data format error"}, {"code": 30021, "message": "json data format error"}
'30022': PermissionDenied, # {"code": 30022, "message": "Api has been frozen"}
'30023': BadRequest, # {"code": 30023, "message": "{0} parameter cannot be blank"}
'30024': BadSymbol, # {"code":30024,"message":"\"instrument_id\" is an invalid parameter"}
'30025': BadRequest, # {"code": 30025, "message": "{0} parameter category error"}
'30026': DDoSProtection, # {"code": 30026, "message": "requested too frequent"}
'30027': AuthenticationError, # {"code": 30027, "message": "login failure"}
'30028': PermissionDenied, # {"code": 30028, "message": "unauthorized execution"}
'30029': AccountSuspended, # {"code": 30029, "message": "account suspended"}
'30030': ExchangeError, # {"code": 30030, "message": "endpoint request failed. Please try again"}
'30031': BadRequest, # {"code": 30031, "message": "token does not exist"}
'30032': BadSymbol, # {"code": 30032, "message": "pair does not exist"}
'30033': BadRequest, # {"code": 30033, "message": "exchange domain does not exist"}
'30034': ExchangeError, # {"code": 30034, "message": "exchange ID does not exist"}
'30035': ExchangeError, # {"code": 30035, "message": "trading is not supported in self website"}
'30036': ExchangeError, # {"code": 30036, "message": "no relevant data"}
'30037': ExchangeNotAvailable, # {"code": 30037, "message": "endpoint is offline or unavailable"}
# '30038': AuthenticationError, # {"code": 30038, "message": "user does not exist"}
'30038': OnMaintenance, # {"client_oid":"","code":"30038","error_code":"30038","error_message":"Matching engine is being upgraded. Please try in about 1 minute.","message":"Matching engine is being upgraded. Please try in about 1 minute.","order_id":"-1","result":false}
# futures
'32001': AccountSuspended, # {"code": 32001, "message": "futures account suspended"}
'32002': PermissionDenied, # {"code": 32002, "message": "futures account does not exist"}
'32003': CancelPending, # {"code": 32003, "message": "canceling, please wait"}
'32004': ExchangeError, # {"code": 32004, "message": "you have no unfilled orders"}
'32005': InvalidOrder, # {"code": 32005, "message": "max order quantity"}
'32006': InvalidOrder, # {"code": 32006, "message": "the order price or trigger price exceeds USD 1 million"}
'32007': InvalidOrder, # {"code": 32007, "message": "leverage level must be the same for orders on the same side of the contract"}
'32008': InvalidOrder, # {"code": 32008, "message": "Max. positions to open(cross margin)"}
'32009': InvalidOrder, # {"code": 32009, "message": "Max. positions to open(fixed margin)"}
'32010': ExchangeError, # {"code": 32010, "message": "leverage cannot be changed with open positions"}
'32011': ExchangeError, # {"code": 32011, "message": "futures status error"}
'32012': ExchangeError, # {"code": 32012, "message": "futures order update error"}
'32013': ExchangeError, # {"code": 32013, "message": "token type is blank"}
'32014': ExchangeError, # {"code": 32014, "message": "your number of contracts closing is larger than the number of contracts available"}
'32015': ExchangeError, # {"code": 32015, "message": "margin ratio is lower than 100% before opening positions"}
'32016': ExchangeError, # {"code": 32016, "message": "margin ratio is lower than 100% after opening position"}
'32017': ExchangeError, # {"code": 32017, "message": "no BBO"}
'32018': ExchangeError, # {"code": 32018, "message": "the order quantity is less than 1, please try again"}
'32019': ExchangeError, # {"code": 32019, "message": "the order price deviates from the price of the previous minute by more than 3%"}
'32020': ExchangeError, # {"code": 32020, "message": "the price is not in the range of the price limit"}
'32021': ExchangeError, # {"code": 32021, "message": "leverage error"}
'32022': ExchangeError, # {"code": 32022, "message": "self function is not supported in your country or region according to the regulations"}
'32023': ExchangeError, # {"code": 32023, "message": "self account has outstanding loan"}
'32024': ExchangeError, # {"code": 32024, "message": "order cannot be placed during delivery"}
'32025': ExchangeError, # {"code": 32025, "message": "order cannot be placed during settlement"}
'32026': ExchangeError, # {"code": 32026, "message": "your account is restricted from opening positions"}
'32027': ExchangeError, # {"code": 32027, "message": "cancelled over 20 orders"}
'32028': AccountSuspended, # {"code": 32028, "message": "account is suspended and liquidated"}
'32029': ExchangeError, # {"code": 32029, "message": "order info does not exist"}
'32030': InvalidOrder, # The order cannot be cancelled
'32031': ArgumentsRequired, # client_oid or order_id is required.
'32038': AuthenticationError, # User does not exist
'32040': ExchangeError, # User have open contract orders or position
'32044': ExchangeError, # {"code": 32044, "message": "The margin ratio after submitting self order is lower than the minimum requirement({0}) for your tier."}
'32045': ExchangeError, # String of commission over 1 million
'32046': ExchangeError, # Each user can hold up to 10 trade plans at the same time
'32047': ExchangeError, # system error
'32048': InvalidOrder, # Order strategy track range error
'32049': ExchangeError, # Each user can hold up to 10 track plans at the same time
'32050': InvalidOrder, # Order strategy rang error
'32051': InvalidOrder, # Order strategy ice depth error
'32052': ExchangeError, # String of commission over 100 thousand
'32053': ExchangeError, # Each user can hold up to 6 ice plans at the same time
'32057': ExchangeError, # The order price is zero. Market-close-all function cannot be executed
'32054': ExchangeError, # Trade not allow
'32055': InvalidOrder, # cancel order error
'32056': ExchangeError, # iceberg per order average should between {0}-{1} contracts
'32058': ExchangeError, # Each user can hold up to 6 initiative plans at the same time
'32059': InvalidOrder, # Total amount should exceed per order amount
'32060': InvalidOrder, # Order strategy type error
'32061': InvalidOrder, # Order strategy initiative limit error
'32062': InvalidOrder, # Order strategy initiative range error
'32063': InvalidOrder, # Order strategy initiative rate error
'32064': ExchangeError, # Time Stringerval of orders should set between 5-120s
'32065': ExchangeError, # Close amount exceeds the limit of Market-close-all(999 for BTC, and 9999 for the rest tokens)
'32066': ExchangeError, # You have open orders. Please cancel all open orders before changing your leverage level.
'32067': ExchangeError, # Account equity < required margin in self setting. Please adjust your leverage level again.
'32068': ExchangeError, # The margin for self position will fall short of the required margin in self setting. Please adjust your leverage level or increase your margin to proceed.
'32069': ExchangeError, # Target leverage level too low. Your account balance is insufficient to cover the margin required. Please adjust the leverage level again.
'32070': ExchangeError, # Please check open position or unfilled order
'32071': ExchangeError, # Your current liquidation mode does not support self action.
'32072': ExchangeError, # The highest available margin for your order’s tier is {0}. Please edit your margin and place a new order.
'32073': ExchangeError, # The action does not apply to the token
'32074': ExchangeError, # The number of contracts of your position, open orders, and the current order has exceeded the maximum order limit of self asset.
'32075': ExchangeError, # Account risk rate breach
'32076': ExchangeError, # Liquidation of the holding position(s) at market price will require cancellation of all pending close orders of the contracts.
'32077': ExchangeError, # Your margin for self asset in futures account is insufficient and the position has been taken over for liquidation.(You will not be able to place orders, close positions, transfer funds, or add margin during self period of time. Your account will be restored after the liquidation is complete.)
'32078': ExchangeError, # Please cancel all open orders before switching the liquidation mode(Please cancel all open orders before switching the liquidation mode)
'32079': ExchangeError, # Your open positions are at high risk.(Please add margin or reduce positions before switching the mode)
'32080': ExchangeError, # Funds cannot be transferred out within 30 minutes after futures settlement
'32083': ExchangeError, # The number of contracts should be a positive multiple of %%. Please place your order again
# token and margin trading
'33001': PermissionDenied, # {"code": 33001, "message": "margin account for self pair is not enabled yet"}
'33002': AccountSuspended, # {"code": 33002, "message": "margin account for self pair is suspended"}
'33003': InsufficientFunds, # {"code": 33003, "message": "no loan balance"}
'33004': ExchangeError, # {"code": 33004, "message": "loan amount cannot be smaller than the minimum limit"}
'33005': ExchangeError, # {"code": 33005, "message": "repayment amount must exceed 0"}
'33006': ExchangeError, # {"code": 33006, "message": "loan order not found"}
'33007': ExchangeError, # {"code": 33007, "message": "status not found"}
'33008': InsufficientFunds, # {"code": 33008, "message": "loan amount cannot exceed the maximum limit"}
'33009': ExchangeError, # {"code": 33009, "message": "user ID is blank"}
'33010': ExchangeError, # {"code": 33010, "message": "you cannot cancel an order during session 2 of call auction"}
'33011': ExchangeError, # {"code": 33011, "message": "no new market data"}
'33012': ExchangeError, # {"code": 33012, "message": "order cancellation failed"}
'33013': InvalidOrder, # {"code": 33013, "message": "order placement failed"}
'33014': OrderNotFound, # {"code": 33014, "message": "order does not exist"}
'33015': InvalidOrder, # {"code": 33015, "message": "exceeded maximum limit"}
'33016': ExchangeError, # {"code": 33016, "message": "margin trading is not open for self token"}
'33017': InsufficientFunds, # {"code": 33017, "message": "insufficient balance"}
'33018': ExchangeError, # {"code": 33018, "message": "self parameter must be smaller than 1"}
'33020': ExchangeError, # {"code": 33020, "message": "request not supported"}
'33021': BadRequest, # {"code": 33021, "message": "token and the pair do not match"}
'33022': InvalidOrder, # {"code": 33022, "message": "pair and the order do not match"}
'33023': ExchangeError, # {"code": 33023, "message": "you can only place market orders during call auction"}
'33024': InvalidOrder, # {"code": 33024, "message": "trading amount too small"}
'33025': InvalidOrder, # {"code": 33025, "message": "base token amount is blank"}
'33026': ExchangeError, # {"code": 33026, "message": "transaction completed"}
'33027': InvalidOrder, # {"code": 33027, "message": "cancelled order or order cancelling"}
'33028': InvalidOrder, # {"code": 33028, "message": "the decimal places of the trading price exceeded the limit"}
'33029': InvalidOrder, # {"code": 33029, "message": "the decimal places of the trading size exceeded the limit"}
'33034': ExchangeError, # {"code": 33034, "message": "You can only place limit order after Call Auction has started"}
'33035': ExchangeError, # This type of order cannot be canceled(This type of order cannot be canceled)
'33036': ExchangeError, # Exceeding the limit of entrust order
'33037': ExchangeError, # The buy order price should be lower than 130% of the trigger price
'33038': ExchangeError, # The sell order price should be higher than 70% of the trigger price
'33039': ExchangeError, # The limit of callback rate is 0 < x <= 5%
'33040': ExchangeError, # The trigger price of a buy order should be lower than the latest transaction price
'33041': ExchangeError, # The trigger price of a sell order should be higher than the latest transaction price
'33042': ExchangeError, # The limit of price variance is 0 < x <= 1%
'33043': ExchangeError, # The total amount must be larger than 0
'33044': ExchangeError, # The average amount should be 1/1000 * total amount <= x <= total amount
'33045': ExchangeError, # The price should not be 0, including trigger price, order price, and price limit
'33046': ExchangeError, # Price variance should be 0 < x <= 1%
'33047': ExchangeError, # Sweep ratio should be 0 < x <= 100%
'33048': ExchangeError, # Per order limit: Total amount/1000 < x <= Total amount
'33049': ExchangeError, # Total amount should be X > 0
'33050': ExchangeError, # Time interval should be 5 <= x <= 120s
'33051': ExchangeError, # cancel order number not higher limit: plan and track entrust no more than 10, ice and time entrust no more than 6
'33059': BadRequest, # {"code": 33059, "message": "client_oid or order_id is required"}
'33060': BadRequest, # {"code": 33060, "message": "Only fill in either parameter client_oid or order_id"}
'33061': ExchangeError, # Value of a single market price order cannot exceed 100,000 USD
'33062': ExchangeError, # The leverage ratio is too high. The borrowed position has exceeded the maximum position of self leverage ratio. Please readjust the leverage ratio
'33063': ExchangeError, # Leverage multiple is too low, there is insufficient margin in the account, please readjust the leverage ratio
'33064': ExchangeError, # The setting of the leverage ratio cannot be less than 2, please readjust the leverage ratio
'33065': ExchangeError, # Leverage ratio exceeds maximum leverage ratio, please readjust leverage ratio
# account
'21009': ExchangeError, # Funds cannot be transferred out within 30 minutes after swap settlement(Funds cannot be transferred out within 30 minutes after swap settlement)
'34001': PermissionDenied, # {"code": 34001, "message": "withdrawal suspended"}
'34002': InvalidAddress, # {"code": 34002, "message": "please add a withdrawal address"}
'34003': ExchangeError, # {"code": 34003, "message": "sorry, self token cannot be withdrawn to xx at the moment"}
'34004': ExchangeError, # {"code": 34004, "message": "withdrawal fee is smaller than minimum limit"}
'34005': ExchangeError, # {"code": 34005, "message": "withdrawal fee exceeds the maximum limit"}
'34006': ExchangeError, # {"code": 34006, "message": "withdrawal amount is lower than the minimum limit"}
'34007': ExchangeError, # {"code": 34007, "message": "withdrawal amount exceeds the maximum limit"}
'34008': InsufficientFunds, # {"code": 34008, "message": "insufficient balance"}
'34009': ExchangeError, # {"code": 34009, "message": "your withdrawal amount exceeds the daily limit"}
'34010': ExchangeError, # {"code": 34010, "message": "transfer amount must be larger than 0"}
'34011': ExchangeError, # {"code": 34011, "message": "conditions not met"}
'34012': ExchangeError, # {"code": 34012, "message": "the minimum withdrawal amount for NEO is 1, and the amount must be an integer"}
'34013': ExchangeError, # {"code": 34013, "message": "please transfer"}
'34014': ExchangeError, # {"code": 34014, "message": "transfer limited"}
'34015': ExchangeError, # {"code": 34015, "message": "subaccount does not exist"}
'34016': PermissionDenied, # {"code": 34016, "message": "transfer suspended"}
'34017': AccountSuspended, # {"code": 34017, "message": "account suspended"}
'34018': AuthenticationError, # {"code": 34018, "message": "incorrect trades password"}
'34019': PermissionDenied, # {"code": 34019, "message": "please bind your email before withdrawal"}
'34020': PermissionDenied, # {"code": 34020, "message": "please bind your funds password before withdrawal"}
'34021': InvalidAddress, # {"code": 34021, "message": "Not verified address"}
'34022': ExchangeError, # {"code": 34022, "message": "Withdrawals are not available for sub accounts"}
'34023': PermissionDenied, # {"code": 34023, "message": "Please enable futures trading before transferring your funds"}
'34026': ExchangeError, # transfer too frequently(transfer too frequently)
'34036': ExchangeError, # Parameter is incorrect, please refer to API documentation
'34037': ExchangeError, # Get the sub-account balance interface, account type is not supported
'34038': ExchangeError, # Since your C2C transaction is unusual, you are restricted from fund transfer. Please contact our customer support to cancel the restriction
'34039': ExchangeError, # You are now restricted from transferring out your funds due to abnormal trades on C2C Market. Please transfer your fund on our website or app instead to verify your identity
# swap
'35001': ExchangeError, # {"code": 35001, "message": "Contract does not exist"}
'35002': ExchangeError, # {"code": 35002, "message": "Contract settling"}
'35003': ExchangeError, # {"code": 35003, "message": "Contract paused"}
'35004': ExchangeError, # {"code": 35004, "message": "Contract pending settlement"}
'35005': AuthenticationError, # {"code": 35005, "message": "User does not exist"}
'35008': InvalidOrder, # {"code": 35008, "message": "Risk ratio too high"}
'35010': InvalidOrder, # {"code": 35010, "message": "Position closing too large"}
'35012': InvalidOrder, # {"code": 35012, "message": "Incorrect order size"}
'35014': InvalidOrder, # {"code": 35014, "message": "Order price is not within limit"}
'35015': InvalidOrder, # {"code": 35015, "message": "Invalid leverage level"}
'35017': ExchangeError, # {"code": 35017, "message": "Open orders exist"}
'35019': InvalidOrder, # {"code": 35019, "message": "Order size too large"}
'35020': InvalidOrder, # {"code": 35020, "message": "Order price too high"}
'35021': InvalidOrder, # {"code": 35021, "message": "Order size exceeded current tier limit"}
'35022': ExchangeError, # {"code": 35022, "message": "Contract status error"}
'35024': ExchangeError, # {"code": 35024, "message": "Contract not initialized"}
'35025': InsufficientFunds, # {"code": 35025, "message": "No account balance"}
'35026': ExchangeError, # {"code": 35026, "message": "Contract settings not initialized"}
'35029': OrderNotFound, # {"code": 35029, "message": "Order does not exist"}
'35030': InvalidOrder, # {"code": 35030, "message": "Order size too large"}
'35031': InvalidOrder, # {"code": 35031, "message": "Cancel order size too large"}
'35032': ExchangeError, # {"code": 35032, "message": "Invalid user status"}
'35037': ExchangeError, # No last traded price in cache
'35039': ExchangeError, # {"code": 35039, "message": "Open order quantity exceeds limit"}
'35040': InvalidOrder, # {"error_message":"Invalid order type","result":"true","error_code":"35040","order_id":"-1"}
'35044': ExchangeError, # {"code": 35044, "message": "Invalid order status"}
'35046': InsufficientFunds, # {"code": 35046, "message": "Negative account balance"}
'35047': InsufficientFunds, # {"code": 35047, "message": "Insufficient account balance"}
'35048': ExchangeError, # {"code": 35048, "message": "User contract is frozen and liquidating"}
'35049': InvalidOrder, # {"code": 35049, "message": "Invalid order type"}
'35050': InvalidOrder, # {"code": 35050, "message": "Position settings are blank"}
'35052': InsufficientFunds, # {"code": 35052, "message": "Insufficient cross margin"}
'35053': ExchangeError, # {"code": 35053, "message": "Account risk too high"}
'35055': InsufficientFunds, # {"code": 35055, "message": "Insufficient account balance"}
'35057': ExchangeError, # {"code": 35057, "message": "No last traded price"}
'35058': ExchangeError, # {"code": 35058, "message": "No limit"}
'35059': BadRequest, # {"code": 35059, "message": "client_oid or order_id is required"}
'35060': BadRequest, # {"code": 35060, "message": "Only fill in either parameter client_oid or order_id"}
'35061': BadRequest, # {"code": 35061, "message": "Invalid instrument_id"}
'35062': InvalidOrder, # {"code": 35062, "message": "Invalid match_price"}
'35063': InvalidOrder, # {"code": 35063, "message": "Invalid order_size"}
'35064': InvalidOrder, # {"code": 35064, "message": "Invalid client_oid"}
'35066': InvalidOrder, # Order interval error
'35067': InvalidOrder, # Time-weighted order ratio error
'35068': InvalidOrder, # Time-weighted order range error
'35069': InvalidOrder, # Time-weighted single transaction limit error
'35070': InvalidOrder, # Algo order type error
'35071': InvalidOrder, # Order total must be larger than single order limit
'35072': InvalidOrder, # Maximum 6 unfulfilled time-weighted orders can be held at the same time
'35073': InvalidOrder, # Order price is 0. Market-close-all not available
'35074': InvalidOrder, # Iceberg order single transaction average error
'35075': InvalidOrder, # Failed to cancel order
'35076': InvalidOrder, # LTC 20x leverage. Not allowed to open position
'35077': InvalidOrder, # Maximum 6 unfulfilled iceberg orders can be held at the same time
'35078': InvalidOrder, # Order amount exceeded 100,000
'35079': InvalidOrder, # Iceberg order price variance error
'35080': InvalidOrder, # Callback rate error
'35081': InvalidOrder, # Maximum 10 unfulfilled trail orders can be held at the same time
'35082': InvalidOrder, # Trail order callback rate error
'35083': InvalidOrder, # Each user can only hold a maximum of 10 unfulfilled stop-limit orders at the same time
'35084': InvalidOrder, # Order amount exceeded 1 million
'35085': InvalidOrder, # Order amount is not in the correct range
'35086': InvalidOrder, # Price exceeds 100 thousand
'35087': InvalidOrder, # Price exceeds 100 thousand
'35088': InvalidOrder, # Average amount error
'35089': InvalidOrder, # Price exceeds 100 thousand
'35090': ExchangeError, # No stop-limit orders available for cancelation
'35091': ExchangeError, # No trail orders available for cancellation
'35092': ExchangeError, # No iceberg orders available for cancellation
'35093': ExchangeError, # No trail orders available for cancellation
'35094': ExchangeError, # Stop-limit order last traded price error
'35095': BadRequest, # Instrument_id error
'35096': ExchangeError, # Algo order status error
'35097': ExchangeError, # Order status and order ID cannot exist at the same time
'35098': ExchangeError, # An order status or order ID must exist
'35099': ExchangeError, # Algo order ID error
# option
'36001': BadRequest, # Invalid underlying index.
'36002': BadRequest, # Instrument does not exist.
'36005': ExchangeError, # Instrument status is invalid.
'36101': AuthenticationError, # Account does not exist.
'36102': PermissionDenied, # Account status is invalid.
'36103': AccountSuspended, # Account is suspended due to ongoing liquidation.
'36104': PermissionDenied, # Account is not enabled for options trading.
'36105': PermissionDenied, # Please enable the account for option contract.
'36106': AccountSuspended, # Funds cannot be transferred in or out, as account is suspended.
'36107': PermissionDenied, # Funds cannot be transferred out within 30 minutes after option exercising or settlement.
'36108': InsufficientFunds, # Funds cannot be transferred in or out, as equity of the account is less than zero.
'36109': PermissionDenied, # Funds cannot be transferred in or out during option exercising or settlement.
'36201': PermissionDenied, # New order function is blocked.
'36202': PermissionDenied, # Account does not have permission to short option.
'36203': InvalidOrder, # Invalid format for client_oid.
'36204': ExchangeError, # Invalid format for request_id.
'36205': BadRequest, # Instrument id does not match underlying index.
'36206': BadRequest, # Order_id and client_oid can not be used at the same time.
'36207': InvalidOrder, # Either order price or fartouch price must be present.
'36208': InvalidOrder, # Either order price or size must be present.
'36209': InvalidOrder, # Either order_id or client_oid must be present.
'36210': InvalidOrder, # Either order_ids or client_oids must be present.
'36211': InvalidOrder, # Exceeding max batch size for order submission.
'36212': InvalidOrder, # Exceeding max batch size for oder cancellation.
'36213': InvalidOrder, # Exceeding max batch size for order amendment.
'36214': ExchangeError, # Instrument does not have valid bid/ask quote.
'36216': OrderNotFound, # Order does not exist.
'36217': InvalidOrder, # Order submission failed.
'36218': InvalidOrder, # Order cancellation failed.
'36219': InvalidOrder, # Order amendment failed.
'36220': InvalidOrder, # Order is pending cancel.
'36221': InvalidOrder, # Order qty is not valid multiple of lot size.
'36222': InvalidOrder, # Order price is breaching highest buy limit.
'36223': InvalidOrder, # Order price is breaching lowest sell limit.
'36224': InvalidOrder, # Exceeding max order size.
'36225': InvalidOrder, # Exceeding max open order count for instrument.
'36226': InvalidOrder, # Exceeding max open order count for underlying.
'36227': InvalidOrder, # Exceeding max open size across all orders for underlying
'36228': InvalidOrder, # Exceeding max available qty for instrument.
'36229': InvalidOrder, # Exceeding max available qty for underlying.
'36230': InvalidOrder, # Exceeding max position limit for underlying.
# --------------------------------------------------------
# swap
'400': BadRequest, # Bad Request
'401': AuthenticationError, # Unauthorized access
'403': PermissionDenied, # Access prohibited
'404': BadRequest, # Request address does not exist
'405': BadRequest, # The HTTP Method is not supported
'415': BadRequest, # The current media type is not supported
'429': DDoSProtection, # Too many requests
'500': ExchangeNotAvailable, # System busy
'1001': RateLimitExceeded, # The request is too frequent and has been throttled
'1002': ExchangeError, # {0} verifications within 24 hours
'1003': ExchangeError, # You failed more than {0} times today, the current operation is locked, please try again in 24 hours
# '00000': ExchangeError, # success
'40001': AuthenticationError, # ACCESS_KEY cannot be empty
'40002': AuthenticationError, # SECRET_KEY cannot be empty
'40003': AuthenticationError, # Signature cannot be empty
'40004': InvalidNonce, # Request timestamp expired
'40005': InvalidNonce, # Invalid ACCESS_TIMESTAMP
'40006': AuthenticationError, # Invalid ACCESS_KEY
'40007': BadRequest, # Invalid Content_Type
'40008': InvalidNonce, # Request timestamp expired
'40009': AuthenticationError, # sign signature error
'40010': AuthenticationError, # sign signature error
'40011': AuthenticationError, # ACCESS_PASSPHRASE cannot be empty
'40012': AuthenticationError, # apikey/password is incorrect
'40013': ExchangeError, # User status is abnormal
'40014': PermissionDenied, # Incorrect permissions
'40015': ExchangeError, # System is abnormal, please try again later
'40016': PermissionDenied, # The user must bind the phone or Google
'40017': ExchangeError, # Parameter verification failed
'40018': PermissionDenied, # Invalid IP
'40102': BadRequest, # Contract configuration does not exist, please check the parameters
'40103': BadRequest, # Request method cannot be empty
'40104': ExchangeError, # Lever adjustment failure
'40105': ExchangeError, # Abnormal access to current price limit data
'40106': ExchangeError, # Abnormal get next settlement time
'40107': ExchangeError, # Abnormal access to index price data
'40108': InvalidOrder, # Wrong order quantity
'40109': OrderNotFound, # The data of the order cannot be found, please confirm the order number
'40200': OnMaintenance, # Server upgrade, please try again later
'40201': InvalidOrder, # Order number cannot be empty
'40202': ExchangeError, # User information cannot be empty
'40203': BadRequest, # The amount of adjustment margin cannot be empty or negative
'40204': BadRequest, # Adjustment margin type cannot be empty
'40205': BadRequest, # Adjusted margin type data is wrong
'40206': BadRequest, # The direction of the adjustment margin cannot be empty
'40207': BadRequest, # The adjustment margin data is wrong
'40208': BadRequest, # The accuracy of the adjustment margin amount is incorrect
'40209': BadRequest, # The current page number is wrong, please confirm
'40300': ExchangeError, # User does not exist
'40301': PermissionDenied, # Permission has not been obtained yet. If you need to use it, please contact customer service
'40302': BadRequest, # Parameter abnormality
'40303': BadRequest, # Can only query up to 20,000 data
'40304': BadRequest, # Parameter type is abnormal
'40305': BadRequest, # Client_oid length is not greater than 50, and cannot be Martian characters
'40306': ExchangeError, # Batch processing orders can only process up to 20
'40308': OnMaintenance, # The contract is being temporarily maintained
'40309': BadSymbol, # The contract has been removed
'40400': ExchangeError, # Status check abnormal
'40401': ExchangeError, # The operation cannot be performed
'40402': BadRequest, # The opening direction cannot be empty
'40403': BadRequest, # Wrong opening direction format
'40404': BadRequest, # Whether to enable automatic margin call parameters cannot be empty
'40405': BadRequest, # Whether to enable the automatic margin call parameter type is wrong
'40406': BadRequest, # Whether to enable automatic margin call parameters is of unknown type
'40407': ExchangeError, # The query direction is not the direction entrusted by the plan
'40408': ExchangeError, # Wrong time range
'40409': ExchangeError, # Time format error
'40500': InvalidOrder, # Client_oid check error
'40501': ExchangeError, # Channel name error
'40502': ExchangeError, # If it is a copy user, you must pass the copy to whom
'40503': ExchangeError, # With the single type
'40504': ExchangeError, # Platform code must pass
'40505': ExchangeError, # Not the same as single type
'40506': AuthenticationError, # Platform signature error
'40507': AuthenticationError, # Api signature error
'40508': ExchangeError, # KOL is not authorized
'40509': ExchangeError, # Abnormal copy end
'40600': ExchangeError, # Copy function suspended
'40601': ExchangeError, # Followers cannot be KOL
'40602': ExchangeError, # The number of copies has reached the limit and cannot process the request
'40603': ExchangeError, # Abnormal copy end
'40604': ExchangeNotAvailable, # Server is busy, please try again later
'40605': ExchangeError, # Copy type, the copy number must be passed
'40606': ExchangeError, # The type of document number is wrong
'40607': ExchangeError, # Document number must be passed
'40608': ExchangeError, # No documented products currently supported
'40609': ExchangeError, # The contract product does not support copying
'40700': BadRequest, # Cursor parameters are incorrect
'40701': ExchangeError, # KOL is not authorized
'40702': ExchangeError, # Unauthorized copying user
'40703': ExchangeError, # Bill inquiry start and end time cannot be empty
'40704': ExchangeError, # Can only check the data of the last three months
'40705': BadRequest, # The start and end time cannot exceed 90 days
'40706': InvalidOrder, # Wrong order price
'40707': BadRequest, # Start time is greater than end time
'40708': BadRequest, # Parameter verification is abnormal
'40709': ExchangeError, # There is no position in self position, and no automatic margin call can be set
'40710': ExchangeError, # Abnormal account status
'40711': InsufficientFunds, # Insufficient contract account balance
'40712': InsufficientFunds, # Insufficient margin
'40713': ExchangeError, # Cannot exceed the maximum transferable margin amount
'40714': ExchangeError, # No direct margin call is allowed
# spot
'invalid sign': AuthenticationError,
'invalid currency': BadSymbol, # invalid trading pair
'invalid symbol': BadSymbol,
'invalid period': BadRequest, # invalid Kline type
'invalid user': ExchangeError,
'invalid amount': InvalidOrder,
'invalid type': InvalidOrder, # {"status":"error","ts":1595700344504,"err_code":"invalid-parameter","err_msg":"invalid type"}
'invalid orderId': InvalidOrder,
'invalid record': ExchangeError,
'invalid accountId': BadRequest,
'invalid address': BadRequest,
'accesskey not None': AuthenticationError, # {"status":"error","ts":1595704360508,"err_code":"invalid-parameter","err_msg":"accesskey not null"}
'illegal accesskey': AuthenticationError,
'sign not null': AuthenticationError,
'req_time is too much difference from server time': InvalidNonce,
'permissions not right': PermissionDenied, # {"status":"error","ts":1595704490084,"err_code":"invalid-parameter","err_msg":"permissions not right"}
'illegal sign invalid': AuthenticationError, # {"status":"error","ts":1595684716042,"err_code":"invalid-parameter","err_msg":"illegal sign invalid"}
'user locked': AccountSuspended,
'Request Frequency Is Too High': RateLimitExceeded,
'more than a daily rate of cash': BadRequest,
'more than the maximum daily withdrawal amount': BadRequest,
'need to bind email or mobile': ExchangeError,
'user forbid': PermissionDenied,
'User Prohibited Cash Withdrawal': PermissionDenied,
'Cash Withdrawal Is Less Than The Minimum Value': BadRequest,
'Cash Withdrawal Is More Than The Maximum Value': BadRequest,
'the account with in 24 hours ban coin': PermissionDenied,
'order cancel fail': BadRequest, # {"status":"error","ts":1595703343035,"err_code":"bad-request","err_msg":"order cancel fail"}
'base symbol error': BadSymbol,
'base date error': ExchangeError,
'api signature not valid': AuthenticationError,
'gateway internal error': ExchangeError,
'audit failed': ExchangeError,
'order queryorder invalid': BadRequest,
'market no need price': InvalidOrder,
'limit need price': InvalidOrder,
'userid not equal to account_id': ExchangeError,
'your balance is low': InsufficientFunds, # {"status":"error","ts":1595594160149,"err_code":"invalid-parameter","err_msg":"invalid size, valid range: [1,2000]"}
'address invalid cointype': ExchangeError,
'system exception': ExchangeError, # {"status":"error","ts":1595711862763,"err_code":"system exception","err_msg":"system exception"}
'50003': ExchangeError, # No record
'50004': BadSymbol, # The transaction pair is currently not supported or has been suspended
'50006': PermissionDenied, # The account is forbidden to withdraw. If you have any questions, please contact customer service.
'50007': PermissionDenied, # The account is forbidden to withdraw within 24 hours. If you have any questions, please contact customer service.
'50008': RequestTimeout, # network timeout
'50009': RateLimitExceeded, # The operation is too frequent, please try again later
'50010': ExchangeError, # The account is abnormally frozen. If you have any questions, please contact customer service.
'50014': InvalidOrder, # The transaction amount under minimum limits
'50015': InvalidOrder, # The transaction amount exceed maximum limits
'50016': InvalidOrder, # The price can't be higher than the current price
'50017': InvalidOrder, # Price under minimum limits
'50018': InvalidOrder, # The price exceed maximum limits
'50019': InvalidOrder, # The amount under minimum limits
'50020': InsufficientFunds, # Insufficient balance
'50021': InvalidOrder, # Price is under minimum limits
'50026': InvalidOrder, # Market price parameter error
'invalid order query time': ExchangeError, # start time is greater than end time; or the time interval between start time and end time is greater than 48 hours
'invalid start time': BadRequest, # start time is a date 30 days ago; or start time is a date in the future
'invalid end time': BadRequest, # end time is a date 30 days ago; or end time is a date in the future
'20003': ExchangeError, # operation failed, {"status":"error","ts":1595730308979,"err_code":"bad-request","err_msg":"20003"}
'01001': ExchangeError, # order failed, {"status":"fail","err_code":"01001","err_msg":"系统异常,请稍后重试"}
},
'broad': {
'invalid size, valid range': ExchangeError,
},
},
'precisionMode': TICK_SIZE,
'commonCurrencies': {
'JADE': 'Jade Protocol',
},
'options': {
'createMarketBuyOrderRequiresPrice': True,
'fetchMarkets': [
'spot',
'swap',
],
'parseOHLCV': {
'volume': {
'spot': 'amount',
'swap': 5,
},
},
'defaultType': 'spot', # 'spot', 'swap'
'accountId': None, # '1012838157',
'timeframes': {
'spot': {
'1m': '1min',
'5m': '5min',
'15m': '15min',
'30m': '30min',
'1h': '60min',
'2h': '120min',
'4h': '240min',
'6h': '360min',
'12h': '720min',
'1d': '1day',
'1w': '1week',
},
'swap': {
'1m': '60',
'5m': '300',
'15m': '900',
'30m': '1800',
'1h': '3600',
'2h': '7200',
'4h': '14400',
'6h': '21600',
'12h': '43200',
'1d': '86400',
'1w': '604800',
},
},
},
})
async def fetch_time(self, params={}):
response = await self.dataGetCommonTimestamp(params)
#
# {
# "status":"ok",
# "data":"1595525139400"
# }
#
return self.safe_integer(response, 'data')
async def fetch_markets(self, params={}):
types = self.safe_value(self.options, 'fetchMarkets')
if not len(types):
types = [
self.options['defaultType'],
]
result = []
for i in range(0, len(types)):
markets = await self.fetch_markets_by_type(types[i], params)
result = self.array_concat(result, markets)
return result
def parse_markets(self, markets):
result = []
for i in range(0, len(markets)):
result.append(self.parse_market(markets[i]))
return result
def parse_market(self, market):
#
# spot
#
# {
# "symbol": "BTC_USDT",
# "status": "online",
# "base_currency": "BTC",
# "quote_currency": "USDT",
# "tick_size": "2",
# "size_increment": "4",
# "base_asset_precision": "4"
# }
#
# swap
#
# {
# "symbol":"btcusd",
# "underlying_index":"BTC",
# "quote_currency":"USD",
# "coin":"BTC",
# "contract_val":"1",
# "listing":null,
# "delivery":["07:00:00","15:00:00","23:00:00"],
# "size_increment":"0",
# "tick_size":"1",
# "forwardContractFlag":false,
# "priceEndStep":5
# }
#
id = self.safe_string(market, 'symbol')
marketType = 'spot'
spot = True
swap = False
baseId = self.safe_string_2(market, 'base_currency', 'underlying_index')
quoteId = self.safe_string(market, 'quote_currency')
settleId = self.safe_string(market, 'coin')
contractVal = self.safe_number(market, 'contract_val')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
settle = self.safe_currency_code(settleId)
symbol = base + '/' + quote
if contractVal is not None:
marketType = 'swap'
spot = False
swap = True
symbol = symbol + ':' + settle
status = self.safe_string(market, 'status')
active = None
if status is not None:
active = (status == '1') or (status == 'online')
fees = self.safe_value_2(self.fees, marketType, 'trading', {})
return self.extend(fees, {
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': settle,
'baseId': baseId,
'quoteId': quoteId,
'settleId': settleId,
'type': marketType,
'spot': spot,
'margin': False,
'swap': swap,
'future': False,
'option': False,
'active': active,
'contract': swap,
'linear': (base == settle),
'inverse': (quote == settle),
'contractSize': contractVal,
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': self.parse_number(self.parse_precision(self.safe_string(market, 'size_increment'))),
'price': self.parse_number(self.parse_precision(self.safe_string(market, 'tick_size'))),
'base': self.parse_number(self.parse_precision(self.safe_string(market, 'base_asset_precision'))),
},
'limits': {
'leverage': {
'min': None,
'max': None,
},
'amount': {
'min': self.safe_number_2(market, 'min_size', 'base_min_size'),
'max': None,
},
'price': {
'min': None,
'max': None,
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
async def fetch_markets_by_type(self, type, params={}):
if type == 'spot':
response = await self.dataGetCommonSymbols(params)
#
# {
# "status":"ok",
# "ts":1595526622408,
# "data":[
# {
# "base_currency":"btc",
# "quote_currency":"usdt",
# "symbol":"btc_usdt",
# "tick_size":"2",
# "size_increment":"4",
# "status":"1",
# "base_asset_precision":"8"
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_markets(data)
elif type == 'swap':
response = await self.capiGetMarketContracts(params)
#
# {
# "data":{
# "contractApis":[
# {
# "instrument_id":"btcusd",
# "underlying_index":"BTC",
# "quote_currency":"USD",
# "coin":"BTC",
# "contract_val":"1",
# "delivery":["07:00:00","15:00:00","23:00:00"],
# "size_increment":"0",
# "tick_size":"1",
# "forwardContractFlag":false,
# "priceEndStep":"5"
# },
# ]
# },
# "status":"ok",
# "err_code":"00000"
# }
#
return self.parse_markets(response)
else:
raise NotSupported(self.id + ' fetchMarketsByType does not support market type ' + type)
async def fetch_currencies(self, params={}):
response = await self.dataGetCommonCurrencys(params)
#
# {
# "status":"ok",
# "ts":1595537740466,
# "data":[
# "btc",
# "bft",
# "usdt",
# "usdt-omni",
# "usdt-erc20"
# ]
# }
#
result = {}
data = self.safe_value(response, 'data', [])
for i in range(0, len(data)):
id = data[i]
code = self.safe_currency_code(id)
result[code] = {
'id': id,
'code': code,
'info': id,
'type': None,
'name': None,
'active': None,
'deposit': None,
'withdraw': None,
'fee': None,
'precision': None,
'limits': {
'amount': {'min': None, 'max': None},
'withdraw': {'min': None, 'max': None},
},
}
return result
async def fetch_order_book(self, symbol, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
method = None
if market['spot']:
method = 'dataGetMarketDepth'
request['type'] = 'step0' # step0, step1, step2, step3, step4, step5, do not merge depth if step0
elif market['swap']:
method = 'capiGetMarketDepth'
request['limit'] = 100 if (limit is None) else limit # max 100
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# {
# "status":"ok",
# "ch":"market.btc_usdt.depth.step0",
# "ts":1595607628197,
# "data":{
# "id":"1595607628197",
# "ts":"1595607628197",
# "bids":[
# ["9534.99","15.36160000000000000000"],
# ["9534.85","0.14580000000000000000"],
# ["9534.73","0.02100000000000000000"],
# ],
# "asks":[
# ["9535.02","7.37160000000000000000"],
# ["9535.03","0.09040000000000000000"],
# ["9535.05","0.02180000000000000000"],
# ]
# }
# }
#
# swap
#
# {
# "asks":[
# ["9579.0","119865",1],
# ["9579.5","90069",1],
# ["9580.0","256673",1],
# ],
# "bids":[
# ["9578.5","2417",1],
# ["9577.5","3024",1],
# ["9577.0","21548",1],
# ],
# "timestamp":"1595664767349"
# }
#
data = self.safe_value(response, 'data', response)
timestamp = self.safe_integer_2(data, 'timestamp', 'ts')
nonce = self.safe_integer(data, 'id')
orderbook = self.parse_order_book(data, symbol, timestamp)
orderbook['nonce'] = nonce
return orderbook
def parse_ticker(self, ticker, market=None):
#
# spot
#
# fetchTicker
#
# {
# "id":"1595538241113",
# "bid":["0.028474000000","1.139400000000"],
# "ask":["0.028482000000","0.353100000000"],
# "amount":"2850.6649",
# "count":"818",
# "open":"0.02821",
# "close":"0.028474",
# "low":"0.02821",
# "high":"0.029091",
# "vol":"79.4548693404"
# }
#
# fetchTickers
#
# {
# "amount":"30086.8095",
# "count":"22450",
# "open":"9525.11",
# "close":"9591.81",
# "low":"9510.68",
# "high":"9659.7",
# "vol":"286239092.250461",
# "symbol":"btc_usdt"
# }
#
# swap
#
# {
# "instrument_id":"btcusd",
# "last":"9574.5",
# "best_ask":"9575.0",
# "best_bid":"9574.0",
# "high_24h":"9672",
# "low_24h":"9512",
# "volume_24h":"567697050",
# "timestamp":"1595538450096"
# }
#
timestamp = self.safe_integer_2(ticker, 'timestamp', 'id')
symbol = None
marketId = self.safe_string_2(ticker, 'instrument_id', 'symbol')
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
elif marketId is not None:
parts = marketId.split('_')
numParts = len(parts)
if numParts == 2:
baseId, quoteId = parts
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
else:
symbol = marketId
if (symbol is None) and (market is not None):
symbol = market['symbol']
last = self.safe_string_2(ticker, 'last', 'close')
open = self.safe_string(ticker, 'open')
bidVolume = None
askVolume = None
bid = self.safe_value(ticker, 'bid')
if bid is None:
bid = self.safe_string(ticker, 'best_bid')
else:
bidVolume = self.safe_string(bid, 1)
bid = self.safe_string(bid, 0)
ask = self.safe_value(ticker, 'ask')
if ask is None:
ask = self.safe_string(ticker, 'best_ask')
else:
askVolume = self.safe_string(ask, 1)
ask = self.safe_string(ask, 0)
baseVolume = self.safe_string_2(ticker, 'amount', 'volume_24h')
quoteVolume = self.safe_string(ticker, 'vol')
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': self.safe_string_2(ticker, 'high', 'high_24h'),
'low': self.safe_string_2(ticker, 'low', 'low_24h'),
'bid': bid,
'bidVolume': bidVolume,
'ask': ask,
'askVolume': askVolume,
'vwap': None,
'open': open,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': None,
'average': None,
'baseVolume': baseVolume,
'quoteVolume': quoteVolume,
'info': ticker,
}, market, False)
async def fetch_ticker(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
method = None
if market['spot']:
method = 'dataGetMarketDetailMerged'
elif market['swap']:
method = 'capiGetMarketTicker'
request = {
'symbol': market['id'],
}
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# {
# "status":"ok",
# "ch":"market.eth_btc.detail.merged",
# "ts":1595538241474,
# "data":{
# "id":"1595538241113",
# "bid":["0.028474000000","1.139400000000"],
# "ask":["0.028482000000","0.353100000000"],
# "amount":"2850.6649",
# "count":"818",
# "open":"0.02821",
# "close":"0.028474",
# "low":"0.02821",
# "high":"0.029091",
# "vol":"79.4548693404"
# }
# }
#
# swap
#
# {
# "symbol":"btcusd",
# "last":"9575.5",
# "best_ask":"9576.0",
# "best_bid":"9575.0",
# "high_24h":"9646",
# "low_24h":"9516",
# "volume_24h":"516656839",
# "timestamp":"1595664217405"
# }
#
data = self.safe_value(response, 'data', response)
return self.parse_ticker(data, market)
async def fetch_tickers_by_type(self, type, symbols=None, params={}):
await self.load_markets()
method = None
if type == 'spot':
method = 'dataGetMarketTickers'
elif type == 'swap':
method = 'capiGetMarketTickers'
response = await getattr(self, method)(params)
#
# spot
#
# {
# "status":"ok",
# "ts":1595542893250,
# "data":[
# {
# "amount":"30086.8095",
# "count":"22450",
# "open":"9525.11",
# "close":"9591.81",
# "low":"9510.68",
# "high":"9659.7",
# "vol":"286239092.250461",
# "symbol":"btc_usdt"
# }
# ]
# }
#
# swap
#
# [
# {
# "symbol":"btcusd",
# "last":"9572",
# "best_ask":"9571.5",
# "best_bid":"9570.5",
# "high_24h":"9646",
# "low_24h":"9516",
# "volume_24h":"515401635",
# "timestamp":"1595664479952"
# }
# ]
#
data = self.safe_value(response, 'data', response)
timestamp = None
if not isinstance(response, list):
timestamp = self.safe_integer(response, 'ts')
result = {}
for i in range(0, len(data)):
ticker = self.parse_ticker(self.extend({
'timestamp': timestamp,
}, data[i]))
symbol = ticker['symbol']
result[symbol] = ticker
return self.filter_by_array(result, 'symbol', symbols)
async def fetch_tickers(self, symbols=None, params={}):
defaultType = self.safe_string_2(self.options, 'fetchTickers', 'defaultType')
type = self.safe_string(params, 'type', defaultType)
return await self.fetch_tickers_by_type(type, symbols, self.omit(params, 'type'))
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# spot
#
# {
# "id":"1",
# "price":"9533.81",
# "amount":"0.7326",
# "direction":"sell",
# "ts":"1595604964000"
# }
#
# swap
#
# {
# "trade_id":"670581881367954915",
# "price":"9553.00",
# "size":"20",
# "side":"sell",
# "timestamp":"1595605100004",
# "symbol":"btcusd"
# }
#
# spot fetchMyTrades(private)
#
# {
# "id": 29555,
# "order_id": 59378,
# "match_id": 59335,
# "symbol": "eth_usdt",
# "type": "buy-limit",
# "source": "api",
# "price": "100.1000000000",
# "filled_amount": "0.9845000000",
# "filled_fees": "0.0019690000",
# "created_at": 1494901400487
# }
#
# fetchOrderTrades(private)
#
# spot
#
# {
# "id":"614164775",
# "created_at":"1596298860602",
# "filled_amount":"0.0417000000000000",
# "filled_fees":"0.0000834000000000",
# "match_id":"673491702661292033",
# "order_id":"673491720340279296",
# "price":"359.240000000000",
# "source":"接口",
# "symbol":"eth_usdt",
# "type":"buy-market"
# }
#
# swap
#
# {
# "trade_id":"6667390",
# "symbol":"cmt_btcusdt",
# "order_id":"525946425993854915",
# "price":"9839.00",
# "order_qty":"3466",
# "fee":"-0.0000528407360000",
# "timestamp":"1561121514442",
# "exec_type":"M",
# "side":"3"
# }
#
symbol = None
marketId = self.safe_string(trade, 'symbol')
base = None
quote = None
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
symbol = market['symbol']
base = market['base']
quote = market['quote']
elif marketId is not None:
parts = marketId.split('_')
numParts = len(parts)
if numParts == 2:
baseId, quoteId = parts
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
symbol = base + '/' + quote
else:
symbol = marketId.upper()
if (symbol is None) and (market is not None):
symbol = market['symbol']
base = market['base']
quote = market['quote']
timestamp = self.safe_integer(trade, 'created_at')
timestamp = self.safe_integer_2(trade, 'timestamp', 'ts', timestamp)
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string_2(trade, 'filled_amount', 'order_qty')
amountString = self.safe_string_2(trade, 'size', 'amount', amountString)
takerOrMaker = self.safe_string_2(trade, 'exec_type', 'liquidity')
if takerOrMaker == 'M':
takerOrMaker = 'maker'
elif takerOrMaker == 'T':
takerOrMaker = 'taker'
orderType = self.safe_string(trade, 'type')
side = None
type = None
if orderType is not None:
side = self.safe_string(trade, 'type')
type = self.parse_order_type(side)
side = self.parse_order_side(side)
else:
side = self.safe_string_2(trade, 'side', 'direction')
type = self.parse_order_type(side)
side = self.parse_order_side(side)
feeCostString = self.safe_string(trade, 'fee')
if feeCostString is None:
feeCostString = self.safe_string(trade, 'filled_fees')
else:
feeCostString = Precise.string_neg(feeCostString)
fee = None
if feeCostString is not None:
feeCurrency = base if (side == 'buy') else quote
fee = {
# fee is either a positive number(invitation rebate)
# or a negative number(transaction fee deduction)
# therefore we need to invert the fee
# more about it https://github.com/ccxt/ccxt/issues/5909
'cost': feeCostString,
'currency': feeCurrency,
}
orderId = self.safe_string(trade, 'order_id')
id = self.safe_string_2(trade, 'trade_id', 'id')
return self.safe_trade({
'info': trade,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': symbol,
'id': id,
'order': orderId,
'type': type,
'takerOrMaker': takerOrMaker,
'side': side,
'price': priceString,
'amount': amountString,
'cost': None,
'fee': fee,
}, market)
async def fetch_trades(self, symbol, limit=None, since=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
method = None
if market['spot']:
method = 'dataGetMarketHistoryTrade'
elif market['swap']:
method = 'capiGetMarketTrades'
if market['spot']:
if limit is not None:
request['size'] = limit # default 1, max 2000
elif market['swap']:
if limit is None:
limit = 100 # default 20, max 100
request['limit'] = limit
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# {
# "status":"ok",
# "ch":"market.btc_usdt.trade.detail",
# "ts":1595604968430,
# "data":{
# "ts":"1595604964000",
# "data":[
# {"id":"1","price":"9533.81","amount":"0.7326","direction":"sell","ts":"1595604964000"},
# {"id":"2","price":"9533.67","amount":"1.1591","direction":"buy","ts":"1595604961000"},
# {"id":"3","price":"9533.67","amount":"1.5022","direction":"sell","ts":"1595604959000"},
# ]
# }
# }
#
# swap
#
# [
# {"trade_id":"670833198971748613","price":"9578.50","size":"5412","side":"sell","timestamp":"1595665018790","symbol":"btcusd"},
# {"trade_id":"670833194240574915","price":"9579.00","size":"3972","side":"buy","timestamp":"1595665017662","symbol":"btcusd"},
# {"trade_id":"670833194240573915","price":"9579.00","size":"1227","side":"buy","timestamp":"1595665017662","symbol":"btcusd"},
# ]
#
trades = None
if isinstance(response, list):
trades = response
else:
data = self.safe_value(response, 'data', {})
trades = self.safe_value_2(data, 'data', [])
return self.parse_trades(trades, market, since, limit)
def parse_ohlcv(self, ohlcv, market=None, timeframe='1m'):
#
# spot
#
# {
# "id":"1594694700000",
# "amount":"283.6811",
# "count":"234",
# "open":"9230.00",
# "close":"9227.15",
# "low":"9206.66",
# "high":"9232.33",
# "vol":"2618015.032504000000"
# }
#
# swap
#
# [
# "1594693800000",
# "9240",
# "9241",
# "9222",
# "9228.5",
# "3913370",
# "424.003616350563"
# ]
#
options = self.safe_value(self.options, 'parseOHLCV', {})
volume = self.safe_value(options, 'volume', {})
if isinstance(ohlcv, list):
volumeIndex = self.safe_string(volume, market['type'], 'amount')
return [
self.safe_integer(ohlcv, 0), # timestamp
self.safe_number(ohlcv, 1), # Open
self.safe_number(ohlcv, 2), # High
self.safe_number(ohlcv, 3), # Low
self.safe_number(ohlcv, 4), # Close
# self.safe_number(ohlcv, 5), # Quote Volume
# self.safe_number(ohlcv, 6), # Base Volume
self.safe_number(ohlcv, volumeIndex), # Volume, bitget will return base volume in the 7th element for future markets
]
else:
volumeIndex = self.safe_value(volume, market['type'], 6)
return [
self.safe_integer(ohlcv, 'id'),
self.safe_number(ohlcv, 'open'), # Open
self.safe_number(ohlcv, 'high'), # High
self.safe_number(ohlcv, 'low'), # Low
self.safe_number(ohlcv, 'close'), # Close
self.safe_number(ohlcv, volumeIndex), # Base Volume
]
async def fetch_ohlcv(self, symbol, timeframe='1m', since=None, limit=None, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
method = None
type = market['type']
options = self.safe_value(self.options, 'timeframes', {})
intervals = self.safe_value(options, type, {})
interval = self.safe_value(intervals, self.timeframes[timeframe])
if market['spot']:
method = 'dataGetMarketHistoryKline'
request['period'] = interval
if limit is not None:
request['size'] = limit # default 150, max 1000
elif market['swap']:
duration = self.parse_timeframe(timeframe)
method = 'capiGetMarketCandles'
request['granularity'] = interval
now = self.milliseconds()
if since is None:
if limit is None:
limit = 1000
request['start'] = self.iso8601(now - limit * duration * 1000)
request['end'] = self.iso8601(now)
else:
request['start'] = self.iso8601(since)
if limit is None:
request['end'] = self.iso8601(now)
else:
request['end'] = self.iso8601(self.sum(since, limit * duration * 1000))
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# {
# "status":"ok",
# "ch":"market.btc_usdt.kline.15min",
# "ts":1595594183874,
# "data":[
# {"id":"1594694700000","amount":"283.6811","count":"234","open":"9230.00","close":"9227.15","low":"9206.66","high":"9232.33","vol":"2618015.032504000000"},
# {"id":"1594695600000","amount":"457.2904","count":"238","open":"9227.15","close":"9229.46","low":"9223.80","high":"9235.14","vol":"4220734.684570000000"},
# {"id":"1594696500000","amount":"501.2353","count":"255","open":"9229.46","close":"9227.78","low":"9222.69","high":"9230.74","vol":"4625779.185006000000"},
# ]
# }
#
# swap
#
# [
# ["1594764900000","9255.5","9261","9251","9255.5","3958946","427.742307964305"],
# ["1594765800000","9255.5","9264","9252","9258","3609496","389.832756058107"],
# ["1594766700000","9258","9260","9244.5","9250.5","3738600","403.97870345085"],
# ]
#
candles = response
if not isinstance(response, list):
candles = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(candles, market, timeframe, since, limit)
def parse_spot_balance(self, response):
#
# {
# "status":"ok",
# "ts":1595681450932,
# "data":{
# "list":[
# {"balance":"0.0000000000000000","currency":"BTC","type":"trade"},
# {"balance":"0.0000000000000000","currency":"BTC","type":"frozen"},
# {"balance":"0.0000000000000000","currency":"BTC","type":"lock"},
# ],
# "id":"7420922606",
# "type":"spot",
# "state":"working"
# }
# }
#
result = {'info': response}
data = self.safe_value(response, 'data')
balances = self.safe_value(data, 'list')
for i in range(0, len(balances)):
balance = balances[i]
currencyId = self.safe_string(balance, 'currency')
code = self.safe_currency_code(currencyId)
if not (code in result):
account = self.account()
result[code] = account
type = self.safe_value(balance, 'type')
if type == 'trade':
result[code]['free'] = self.safe_string(balance, 'balance')
elif (type == 'frozen') or (type == 'lock'):
used = self.safe_string(result[code], 'used')
result[code]['used'] = Precise.string_add(used, self.safe_string(balance, 'balance'))
return self.safe_balance(result)
def parse_swap_balance(self, response):
#
# swap
#
# [
# {"equity":"0","fixed_balance":"0","total_avail_balance":"0","margin":"0","realized_pnl":"0","unrealized_pnl":"0","symbol":"bchusd","margin_frozen":"0","timestamp":"1595673431547","margin_mode":"fixed","forwardContractFlag":false},
# {"equity":"0","fixed_balance":"0","total_avail_balance":"0","margin":"0","realized_pnl":"0","unrealized_pnl":"0","symbol":"ethusd","margin_frozen":"0","timestamp":"1595673431573","margin_mode":"fixed","forwardContractFlag":false},
# {"equity":"0","fixed_balance":"0","total_avail_balance":"0","margin":"0","realized_pnl":"0","unrealized_pnl":"0","symbol":"cmt_btcsusdt","margin_frozen":"0","timestamp":"1595673431577","margin_mode":"fixed","forwardContractFlag":true},
# ]
#
#
result = {}
for i in range(0, len(response)):
balance = response[i]
marketId = self.safe_string(balance, 'symbol')
symbol = marketId
if marketId in self.markets_by_id:
symbol = self.markets_by_id[marketId]['symbol']
account = self.account()
# it may be incorrect to use total, free and used for swap accounts
account['total'] = self.safe_string(balance, 'equity')
account['free'] = self.safe_string(balance, 'total_avail_balance')
result[symbol] = account
return self.safe_balance(result)
async def fetch_accounts(self, params={}):
request = {
'method': 'accounts',
}
response = await self.apiGetAccountAccounts(self.extend(request, params))
#
# {
# "status":"ok",
# "ts":1595679591824,
# "data":[
# {"id":"7420922606","type":"spot","state":"working"}
# ]
# }
#
data = self.safe_value(response, 'data', [])
result = []
for i in range(0, len(data)):
account = data[i]
accountId = self.safe_string(account, 'id')
type = self.safe_string_lower(account, 'type')
result.append({
'id': accountId,
'type': type,
'currency': None,
'info': account,
})
return result
async def find_account_by_type(self, type):
await self.load_markets()
await self.load_accounts()
accountsByType = self.group_by(self.accounts, 'type')
accounts = self.safe_value(accountsByType, type)
if accounts is None:
raise ExchangeError(self.id + " findAccountByType() could not find an accountId with type '" + type + "', specify the 'accountId' parameter instead") # eslint-disable-line quotes
numAccounts = len(accounts)
if numAccounts > 1:
raise ExchangeError(self.id + " findAccountByType() found more than one accountId with type '" + type + "', specify the 'accountId' parameter instead") # eslint-disable-line quotes
return accounts[0]
async def get_account_id(self, params):
await self.load_markets()
await self.load_accounts()
defaultAccountId = self.safe_string(self.options, 'accountId')
accountId = self.safe_string(params, 'accountId', defaultAccountId)
if accountId is not None:
return accountId
defaultType = self.safe_string(self.options, 'defaultType', 'margin')
type = self.safe_string(params, 'type', defaultType)
params = self.omit(params, 'type')
if type is None:
raise ArgumentsRequired(self.id + " getAccountId() requires an 'accountId' parameter")
account = await self.find_account_by_type(type)
return account['id']
async def fetch_balance(self, params={}):
await self.load_markets()
await self.load_accounts()
marketType, query = self.handle_market_type_and_params('fetchBalance', None, params)
method = self.get_supported_mapping(marketType, {
'spot': 'apiGetAccountsAccountIdBalance',
'swap': 'swapGetAccountAccounts',
})
if marketType == 'spot':
accountId = await self.get_account_id(query)
query['account_id'] = accountId
query['method'] = 'balance'
response = await getattr(self, method)(query)
#
# spot
#
# {
# "status":"ok",
# "ts":1595681450932,
# "data":{
# "list":[
# {"balance":"0.0000000000000000","currency":"BTC","type":"trade"},
# {"balance":"0.0000000000000000","currency":"BTC","type":"frozen"},
# {"balance":"0.0000000000000000","currency":"BTC","type":"lock"},
# ],
# "id":"7420922606",
# "type":"spot",
# "state":"working"
# }
# }
#
# swap
#
# [
# {"equity":"0","fixed_balance":"0","total_avail_balance":"0","margin":"0","realized_pnl":"0","unrealized_pnl":"0","symbol":"bchusd","margin_frozen":"0","timestamp":"1595673431547","margin_mode":"fixed","forwardContractFlag":false},
# {"equity":"0","fixed_balance":"0","total_avail_balance":"0","margin":"0","realized_pnl":"0","unrealized_pnl":"0","symbol":"ethusd","margin_frozen":"0","timestamp":"1595673431573","margin_mode":"fixed","forwardContractFlag":false},
# {"equity":"0","fixed_balance":"0","total_avail_balance":"0","margin":"0","realized_pnl":"0","unrealized_pnl":"0","symbol":"cmt_btcsusdt","margin_frozen":"0","timestamp":"1595673431577","margin_mode":"fixed","forwardContractFlag":true},
# ]
#
return self.parse_balance_by_type(marketType, response)
def parse_balance_by_type(self, type, response):
if type == 'spot':
return self.parse_spot_balance(response)
elif type == 'swap':
return self.parse_swap_balance(response)
raise NotSupported(self.id + " fetchBalance does not support the '" + type + "' type(the type must be one of 'account', 'spot', or 'swap')")
def parse_order_status(self, status):
statuses = {
'submitted': 'open',
'partial-filled': 'open',
'partial-canceled': 'canceled',
'filled': 'closed',
'canceled': 'canceled',
'-2': 'failed',
'-1': 'canceled',
'0': 'open',
'1': 'open',
'2': 'closed',
'3': 'open',
'4': 'canceled',
}
return self.safe_string(statuses, status, status)
def parse_order_side(self, side):
sides = {
'buy-market': 'buy',
'sell-market': 'sell',
'buy-limit': 'buy',
'sell-limit': 'sell',
'1': 'long', # open long
'2': 'short', # open short
'3': 'long', # close long
'4': 'short', # close short
}
return self.safe_string(sides, side, side)
def parse_order_type(self, type):
types = {
'buy-market': 'market',
'sell-market': 'market',
'buy-limit': 'limit',
'sell-limit': 'limit',
'1': 'open', # open long
'2': 'open', # open short
'3': 'close', # close long
'4': 'close', # close short
}
return self.safe_string(types, type, type)
def parse_order(self, order, market=None):
#
# createOrder
#
# spot
#
# {
# "status":"ok",
# "ts":1595792596056,
# "data":671368296142774272
# }
#
# swap
#
# {
# "client_oid":"58775e54-0592-491c-97e8-e2369025f2d1",
# "order_id":"671757564085534713"
# }
#
# cancelOrder
#
# spot
#
# {
# "status": "ok",
# "ts": 1595818631279,
# "data": 671368296142774272
# }
#
# swap
#
# {
# "order_id":"671757564085534713",
# "client_oid":"58775e54-0592-491c-97e8-e2369025f2d1",
# "symbol":"cmt_ethusdt",
# "result":true,
# "err_code":null,
# "err_msg":null
# }
#
# fetchOpenOrders, fetchClosedOrders, fetchOrder
#
# spot
#
# {
# "account_id":"7420922606",
# "amount":"0.1000000000000000",
# "canceled_at":"1595872129618",
# "created_at":"1595872089525",
# "filled_amount":"0.000000000000",
# "filled_cash_amount":"0.000000000000",
# "filled_fees":"0.000000000000",
# "finished_at":"1595872129618",
# "id":"671701716584665088",
# "price":"150.000000000000",
# "source":"接口",
# "state":"canceled",
# "symbol":"eth_usdt",
# "type":"buy-limit"
# }
#
# swap
#
# {
# "symbol":"cmt_ethusdt",
# "size":"1",
# "timestamp":"1595885546770",
# "client_oid":"f3aa81d6-9a4c-4eab-bebe-ebc19da21cf2",
# "createTime":"1595885521200",
# "filled_qty":"0",
# "fee":"0.00000000",
# "order_id":"671758053112020913",
# "price":"150.00",
# "price_avg":"0.00",
# "status":"0",
# "type":"1",
# "order_type":"0",
# "totalProfits":null
# }
#
id = self.safe_string(order, 'order_id')
id = self.safe_string_2(order, 'id', 'data', id)
timestamp = self.safe_integer_2(order, 'created_at', 'createTime')
type = self.safe_string(order, 'type')
side = self.parse_order_side(type)
type = self.parse_order_type(type)
# if (side != 'buy') and (side != 'sell'):
# side = self.parse_order_side(type)
# }
# if (type != 'limit') and (type != 'market'):
# if 'pnl' in order:
# type = 'future'
# else:
# type = 'swap'
# }
# }
symbol = None
marketId = self.safe_string(order, 'symbol')
if marketId is not None:
if marketId in self.markets_by_id:
market = self.markets_by_id[marketId]
else:
symbol = marketId.upper()
if (symbol is None) and (market is not None):
symbol = market['symbol']
amount = self.safe_string_2(order, 'amount', 'size')
filled = self.safe_string_2(order, 'filled_amount', 'filled_qty')
cost = self.safe_string(order, 'filled_cash_amount')
price = self.safe_string(order, 'price')
average = self.safe_string(order, 'price_avg')
status = self.parse_order_status(self.safe_string_2(order, 'state', 'status'))
feeCost = self.safe_number_2(order, 'filled_fees', 'fee')
fee = None
if feeCost is not None:
feeCurrency = None
fee = {
'cost': feeCost,
'currency': feeCurrency,
}
clientOrderId = self.safe_string(order, 'client_oid')
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': clientOrderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'lastTradeTimestamp': None,
'symbol': symbol,
'type': type,
'timeInForce': None,
'postOnly': None,
'side': side,
'price': price,
'stopPrice': None,
'average': average,
'cost': cost,
'amount': amount,
'filled': filled,
'remaining': None,
'status': status,
'fee': fee,
'trades': None,
}, market)
async def create_order(self, symbol, type, side, amount, price=None, params={}):
await self.load_markets()
await self.load_accounts()
market = self.market(symbol)
#
# spot
#
# account_id True string Account ID, obtained using the accounts method. Currency transactions use the accountid of the'spot' account; for loan asset transactions, please use the accountid of the'margin' account
# amount True string A limit order indicates the quantity of the order, when a market price buy order indicates how much money to buy, and when a market price sell order indicates how much currency to sell
# price False string Order price, market order does not pass self parameter
# source False string Order source api
# symbol True string Trading pair btc_usdt, eth_btc ...
# type True string Order Type buy-market: buy at market price, sell-market: sell at market price, buy-limit: buy at limit price, sell-limit: sell at limit price
#
# swap
#
# symbol String Yes Contract ID
# client_oid String Yes customize order IDs to identify your orders.(Less than 50 characters without special characters,
# size String Yes Quantity to buy or sell(value not equal to 0 or negative)
# type String Yes 1 Open long 2Open short 3 Close long 4 Close short
# order_type String Yes 0: Normal order(Unfilled and 0 imply normal limit order) 1: Post only 2: Fill or Kill 3: Immediate Or Cancel
# match_price String Yes 0 Limit price 1 market price
# price String No Price of each contract
#
request = {
'symbol': market['id'],
}
clientOrderId = self.safe_string_2(params, 'client_oid', 'clientOrderId', self.uuid())
params = self.omit(params, ['client_oid', 'clientOrderId'])
method = None
if market['spot']:
accountId = await self.get_account_id({
'type': market['type'],
})
method = 'apiPostOrderOrdersPlace'
request['account_id'] = accountId
request['method'] = 'place'
request['type'] = side + '-' + type
if type == 'limit':
request['amount'] = self.amount_to_precision(symbol, amount)
request['price'] = self.price_to_precision(symbol, price)
elif type == 'market':
# for market buy it requires the amount of quote currency to spend
if side == 'buy':
cost = self.safe_number(params, 'amount')
createMarketBuyOrderRequiresPrice = self.safe_value(self.options, 'createMarketBuyOrderRequiresPrice', True)
if createMarketBuyOrderRequiresPrice:
if price is not None:
if cost is None:
cost = amount * price
elif cost is None:
raise InvalidOrder(self.id + " createOrder() requires the price argument with market buy orders to calculate total order cost(amount to spend), where cost = amount * price. Supply a price argument to createOrder() call if you want the cost to be calculated for you from price and amount, or, alternatively, add .options['createMarketBuyOrderRequiresPrice'] = False and supply the total cost value in the 'amount' argument or in the 'amount' extra parameter(the exchange-specific behaviour)")
else:
cost = amount if (cost is None) else cost
request['amount'] = self.cost_to_precision(symbol, cost)
elif side == 'sell':
request['amount'] = self.amount_to_precision(symbol, amount)
# ...
elif market['swap']:
request['order_type'] = '0' # '0' = Normal order, None and 0 imply a normal limit order, '1' = Post only, '2' = Fill or Kill, '3' = Immediate Or Cancel
request['client_oid'] = clientOrderId
orderType = self.safe_string(params, 'type')
if orderType is None:
raise ArgumentsRequired(self.id + " createOrder() requires a type parameter, '1' = open long, '2' = open short, '3' = close long, '4' = close short for " + market['type'] + ' orders')
request['size'] = self.amount_to_precision(symbol, amount)
request['type'] = orderType
# if match_price is set to '1', the price parameter will be ignored for market orders
if type == 'limit':
request['match_price'] = '0'
request['price'] = self.price_to_precision(symbol, price)
elif type == 'market':
request['match_price'] = '1'
method = 'swapPostOrderPlaceOrder'
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# {
# "status":"ok",
# "ts":1595792596056,
# "data":"671368296142774272"
# }
#
# swap
#
# {
# "client_oid":"58775e54-0592-491c-97e8-e2369025f2d1",
# "order_id":"671757564085534713"
# }
#
return self.parse_order(response, market)
async def cancel_order(self, id, symbol=None, params={}):
await self.load_markets()
market = None
type = None
if symbol is None:
defaultType = self.safe_string_2(self.options, 'cancelOrder', 'defaultType')
type = self.safe_string(params, 'type', defaultType)
if type == 'spot':
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrder() requires a symbol argument for spot orders')
else:
market = self.market(symbol)
type = market['type']
query = self.omit(params, 'type')
method = None
request = {}
if type == 'spot':
method = 'apiPostOrderOrdersOrderIdSubmitcancel'
request['order_id'] = id
request['method'] = 'submitcancel'
elif type == 'swap':
method = 'swapPostOrderCancelOrder'
request['orderId'] = id
request['symbol'] = market['id']
response = await getattr(self, method)(self.extend(request, query))
#
# spot
#
# {"status": "ok", "ts": 1595818631279, "data": 671368296142774272}
#
# swap
#
# {
# "order_id":"671757564085534713",
# "client_oid":"58775e54-0592-491c-97e8-e2369025f2d1",
# "symbol":"cmt_ethusdt",
# "result":true,
# "err_code":null,
# "err_msg":null
# }
#
return self.parse_order(response, market)
async def cancel_orders(self, ids, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' cancelOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
type = self.safe_string(params, 'type', market['type'])
if type is None:
raise ArgumentsRequired(self.id + " cancelOrders() requires a type parameter(one of 'spot', 'swap').")
params = self.omit(params, 'type')
request = {}
method = None
if type == 'spot':
method = 'apiPostOrderOrdersBatchcancel'
request['method'] = 'batchcancel'
jsonIds = self.json(ids)
parts = jsonIds.split('"')
request['order_ids'] = ''.join(parts)
elif type == 'swap':
method = 'swapPostOrderCancelBatchOrders'
request['symbol'] = market['id']
request['ids'] = ids
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# {
# "status": "ok",
# "data": {
# "success": [
# "673451224205135872",
# ],
# "failed": [
# {
# "err-msg": "invalid record",
# "order-id": "673451224205135873",
# "err-code": "base record invalid"
# }
# ]
# }
# }
#
# swap
#
# {
# "result":true,
# "symbol":"cmt_btcusdt",
# "order_ids":[
# "258414711",
# "478585558"
# ],
# "fail_infos":[
# {
# "order_id":"258414711",
# "err_code":"401",
# "err_msg":""
# }
# ]
# }
#
return response
async def fetch_order(self, id, symbol=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrder() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
type = self.safe_string(params, 'type', market['type'])
if type is None:
raise ArgumentsRequired(self.id + " fetchOrder() requires a type parameter(one of 'spot', 'swap').")
method = None
request = {}
if type == 'spot':
clientOid = self.safe_string(params, 'client_oid')
if clientOid is not None:
method = 'apiPostOrderOrdersClientOid'
request['client_oid'] = clientOid
else:
method = 'apiPostOrderOrdersOrderId'
request['order_id'] = id
request['method'] = 'getOrder'
elif type == 'swap':
method = 'swapGetOrderDetail'
request['symbol'] = market['id']
request['orderId'] = id
query = self.omit(params, 'type')
response = await getattr(self, method)(self.extend(request, query))
#
# spot
#
# {
# "status":"ok",
# "ts":1595897886717,
# "data":{
# "account_id":"7420922606",
# "amount":"0.1000000000000000",
# "canceled_at":"1595818631541",
# "created_at":"1595792595897",
# "filled_amount":"0.000000000000",
# "filled_cash_amount":"0.000000000000",
# "filled_fees":"0.000000000000",
# "finished_at":"1595818631541",
# "id":"671368296142774272",
# "price":"150.000000000000",
# "source":"接口",
# "state":"canceled",
# "symbol":"eth_usdt",
# "type":"buy-limit"
# }
# }
#
#
# swap
#
# {
# "symbol":"cmt_ethusdt",
# "size":"1",
# "timestamp":"1595896459890",
# "client_oid":"58775e54-0592-491c-97e8-e2369025f2d1",
# "createTime":"1595885404607",
# "filled_qty":"0",
# "fee":"0",
# "order_id":"671757564085534713",
# "price":"150",
# "price_avg":"0",
# "status":"-1",
# "type":"1",
# "order_type":"0",
# "totalProfits":"0"
# }
#
data = self.safe_value(response, 'data', response)
return self.parse_order(data, market)
async def fetch_open_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOpenOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
marketType, query = self.handle_market_type_and_params('fetchOpenOrders', market, params)
request = {
'symbol': market['id'],
}
if marketType == 'spot':
# request['from'] = self.safe_string(params, 'from') # order id
# request['direct'] = 'next' # or 'prev'
request['method'] = 'openOrders'
if limit is None:
request['size'] = limit # default 100, max 1000
elif marketType == 'swap':
request['status'] = '3' # 0 Failed, 1 Partially Filled, 2 Fully Filled 3 = Open + Partially Filled, 4 Canceling
request['from'] = '1'
request['to'] = '1'
if limit is None:
request['limit'] = 100 # default 100, max 100
method = self.get_supported_mapping(marketType, {
'spot': 'apiGetOrderOrdersOpenOrders',
'swap': 'swapGetOrderOrders',
})
response = await getattr(self, method)(self.extend(request, query))
#
# spot
#
#
# {
# "status":"ok",
# "ts":1595875165865,
# "data":[
# {
# "account_id":"7420922606",
# "amount":"0.1000000000000000",
# "canceled_at":"1595872129618",
# "created_at":"1595872089525",
# "filled_amount":"0.000000000000",
# "filled_cash_amount":"0.000000000000",
# "filled_fees":"0.000000000000",
# "finished_at":"1595872129618",
# "id":"671701716584665088",
# "price":"150.000000000000",
# "source":"接口",
# "state":"canceled",
# "symbol":"eth_usdt",
# "type":"buy-limit"
# }
# ]
# }
#
# swap
#
# [
# {
# "symbol":"cmt_ethusdt",
# "size":"1",
# "timestamp":"1595885546770",
# "client_oid":"f3aa81d6-9a4c-4eab-bebe-ebc19da21cf2",
# "createTime":"1595885521200",
# "filled_qty":"0",
# "fee":"0.00000000",
# "order_id":"671758053112020913",
# "price":"150.00",
# "price_avg":"0.00",
# "status":"0",
# "type":"1",
# "order_type":"0",
# "totalProfits":null
# }
# ]
#
data = response
if not isinstance(response, list):
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, None, limit)
async def fetch_closed_orders(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchClosedOrders() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
marketType, query = self.handle_market_type_and_params('fetchClosedOrders', market, params)
request = {
'symbol': market['id'],
}
if marketType == 'spot':
# Value range [((end_time) – 48h),(end_time)]
# the query window is 48 hours at most
# the window shift range is the last 30 days
if since is not None:
request['start_time'] = since
# request['end_time'] = self.safe_integer(params, 'end_time')
# request['from'] = self.safe_string(params, 'from') # order id
# request['direct'] = 'next' # or 'prev'
request['method'] = 'openOrders'
if limit is None:
request['size'] = limit # default 100, max 1000
elif marketType == 'swap':
request['status'] = '2' # 0 Failed, 1 Partially Filled, 2 Fully Filled 3 = Open + Partially Filled, 4 Canceling
request['from'] = '1'
request['to'] = '1'
if limit is None:
request['limit'] = 100 # default 100, max 100
method = self.get_supported_mapping(marketType, {
'spot': 'apiGetOrderOrdersHistory',
'swap': 'swapGetOrderOrders',
})
response = await getattr(self, method)(self.extend(request, query))
#
# spot
#
#
# {
# "status":"ok",
# "ts":1595875165865,
# "data":[
# {
# "account_id":"7420922606",
# "amount":"0.1000000000000000",
# "canceled_at":"1595872129618",
# "created_at":"1595872089525",
# "filled_amount":"0.000000000000",
# "filled_cash_amount":"0.000000000000",
# "filled_fees":"0.000000000000",
# "finished_at":"1595872129618",
# "id":"671701716584665088",
# "price":"150.000000000000",
# "source":"接口",
# "state":"canceled",
# "symbol":"eth_usdt",
# "type":"buy-limit"
# }
# ]
# }
#
# swap
#
# [
# {
# "symbol":"cmt_ethusdt",
# "size":"1",
# "timestamp":"1595885546770",
# "client_oid":"f3aa81d6-9a4c-4eab-bebe-ebc19da21cf2",
# "createTime":"1595885521200",
# "filled_qty":"0",
# "fee":"0.00000000",
# "order_id":"671758053112020913",
# "price":"150.00",
# "price_avg":"0.00",
# "status":"0",
# "type":"1",
# "order_type":"0",
# "totalProfits":null
# }
# ]
#
data = response
if not isinstance(response, list):
data = self.safe_value(response, 'data', [])
return self.parse_orders(data, market, None, limit)
async def fetch_deposits(self, code=None, since=None, limit=None, params={}):
if code is None:
raise ArgumentsRequired(self.id + ' fetchDeposits() requires a currency code argument')
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'method': 'deposit_withdraw',
'type': 'deposit',
'size': 12,
}
response = await self.apiGetOrderDepositWithdraw(self.extend(request, params))
#
# {
# "status": "ok",
# "data": [
# {
# "id": 1171,
# "type": "deposit",
# "currency": "usdt",
# "tx_hash": "ed03094b84eafbe4bc16e7ef766ee959885ee5bcb265872baaa9c64e1cf86c2b",
# "amount": 7.457467,
# "address": "rae93V8d2mdoUQHwBDBdM4NHCMehRJAsbm",
# "address_tag": "100040",
# "fee": 0,
# "state": "safe",
# "created_at": 1510912472199,
# "updated_at": 1511145876575
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_transactions(data, currency, since, limit, params)
async def fetch_withdrawals(self, code=None, since=None, limit=None, params={}):
if code is None:
raise ArgumentsRequired(self.id + ' fetchWithdrawals() requires a currency code argument')
await self.load_markets()
currency = self.currency(code)
request = {
'currency': currency['id'],
'method': 'deposit_withdraw',
'type': 'withdraw',
'size': 12,
}
response = await self.apiGetOrderDepositWithdraw(self.extend(request, params))
#
# {
# "status": "ok",
# "data": [
# {
# "id": 1171,
# "type": "withdraw",
# "currency": "usdt",
# "tx_hash": "ed03094b84eafbe4bc16e7ef766ee959885ee5bcb265872baaa9c64e1cf86c2b",
# "amount": 7.457467,
# "address": "rae93V8d2mdoUQHwBDBdM4NHCMehRJAsbm",
# "address_tag": "100040",
# "fee": 0,
# "state": "safe",
# "created_at": 1510912472199,
# "updated_at": 1511145876575
# },
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_transactions(data, currency, since, limit, params)
def parse_transaction_status(self, status):
statuses = {
# withdrawals
'WaitForOperation': 'pending', # 等待提现
'OperationLock': 'pending', # 初审锁定成功
'OperationSuccess': 'ok', # 提现成功
'Cancel': 'canceled', # 用户撤销
'Sure': 'ok', # 复审锁定成功
'Fail': 'failed', # 出币异常
'WaitForChainSure': 'ok', # 等待链上确认
# deposits
'WAIT_0': 'pending', # 待确认
'WAIT_1': 'pending', # 待确认
'DATA_CHANGE': 'pending', # 待确认中
'SUCCESS': 'ok', # 充值成功
}
return self.safe_string(statuses, status, status)
def parse_transaction(self, transaction, currency=None):
#
# fetchDeposits, fetchWithdrawals
#
# {
# "id": 1171,
# "type": "withdraw",
# "currency": "usdt",
# "tx_hash": "ed03094b84eafbe4bc16e7ef766ee959885ee5bcb265872baaa9c64e1cf86c2b",
# "amount": 7.457467,
# "address": "rae93V8d2mdoUQHwBDBdM4NHCMehRJAsbm",
# "address_tag": "100040",
# "fee": 0,
# "state": "safe",
# "created_at": 1510912472199,
# "updated_at": 1511145876575
# }
#
id = self.safe_string(transaction, 'id')
address = self.safe_string(transaction, 'address')
tag = self.safe_string(transaction, 'address_tag')
tagFrom = None
tagTo = tag
addressFrom = None
addressTo = address
type = self.safe_string(transaction, 'type')
if type == 'withdraw':
type = 'withdrawal'
elif type == 'deposit':
type = 'deposit'
currencyId = self.safe_string(transaction, 'currency')
code = self.safe_currency_code(currencyId)
amount = self.safe_number(transaction, 'amount')
status = self.parse_transaction_status(self.safe_string(transaction, 'state'))
txid = self.safe_string(transaction, 'tx_hash')
timestamp = self.safe_integer(transaction, 'created_at')
updated = self.safe_integer(transaction, 'updated_at')
feeCost = self.safe_number(transaction, 'fee')
fee = None
if feeCost is not None:
fee = {
'currency': code,
'cost': feeCost,
}
return {
'info': transaction,
'id': id,
'currency': code,
'amount': amount,
'network': None,
'addressFrom': addressFrom,
'addressTo': addressTo,
'address': address,
'tagFrom': tagFrom,
'tagTo': tagTo,
'tag': tag,
'status': status,
'type': type,
'updated': updated,
'txid': txid,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': fee,
}
async def fetch_my_trades(self, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchMyTrades() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
type = self.safe_string(params, 'type', market['type'])
query = self.omit(params, 'type')
if type == 'swap':
raise ArgumentsRequired(self.id + ' fetchMyTrades() is not supported for ' + type + ' type')
#
# spot
#
# POST /api/v1/order/matchresults Query current order, order history
# symbol True string trading pair btc_usdt, eth_btc ...
# types False string Query order type combination buy-market, sell-market, buy-limit, sell-limit
# start_date False string Query start date, date format yyyy-mm-dd -61 days [-61day, end-date]
# end_date False string Query end date, date format yyyy-mm-dd Now [start-date, now]
# from False string Query start ID order record id
# direct False string Query direction ‘next’ is default , the transaction record ID is sorted from large to small prev,next
# size False string Query record size 100 <=100
#
request = {
'symbol': market['id'],
'method': 'matchresults',
# 'types': 'buy-market,sell-market,buy-limit,sell-limit',
# 'start_date': self.yyyymmdd(since),
# 'end_date': self.yyyymmdd(self.milliseconds()),
# 'size': 100,
# 'direct': 'next',
}
if since is not None:
request['start_date'] = self.yyyymmdd(since)
end = self.sum(since, 2 * 24 * 60 * 60 * 1000)
request['end_date'] = self.yyyymmdd(end)
if limit is not None:
request['size'] = limit # default 100, max 100
response = await self.apiPostOrderMatchresults(self.extend(request, query))
#
# {
# "status": "ok",
# "data": [
# {
# "id": 29555,
# "order_id": 59378,
# "match_id": 59335,
# "symbol": "eth_usdt",
# "type": "buy-limit",
# "source": "api",
# "price": "100.1000000000",
# "filled_amount": "0.9845000000",
# "filled_fees": "0.0019690000",
# "created_at": 1494901400487
# }
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_trades(data, market, since, limit)
async def fetch_order_trades(self, id, symbol=None, since=None, limit=None, params={}):
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchOrderTrades() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
type = self.safe_string(params, 'type', market['type'])
params = self.omit(params, 'type')
method = None
request = {}
if type == 'spot':
request['order_id'] = id
request['method'] = 'matchresults'
method = 'apiPostOrderOrdersOrderIdMatchresults'
elif type == 'swap':
request['orderId'] = id
request['symbol'] = market['id']
method = 'swapGetOrderFills'
response = await getattr(self, method)(self.extend(request, params))
#
# spot
#
# {
# "status":"ok",
# "ts":1596298917277,
# "data":[
# {
# "id":"614164775",
# "created_at":"1596298860602",
# "filled_amount":"0.0417000000000000",
# "filled_fees":"0.0000834000000000",
# "match_id":"673491702661292033",
# "order_id":"673491720340279296",
# "price":"359.240000000000",
# "source":"接口",
# "symbol":"eth_usdt",
# "type":"buy-market"
# }
# ]
# }
#
# swap
#
#
# [
# {
# "trade_id":"6667390",
# "symbol":"cmt_btcusdt",
# "order_id":"525946425993854915",
# "price":"9839.00",
# "order_qty":"3466",
# "fee":"-0.0000528407360000",
# "timestamp":"1561121514442",
# "exec_type":"M",
# "side":"3"
# }
# ]
#
data = response
if not isinstance(data, list):
data = self.safe_value(response, 'data', [])
return await self.parse_trades(data, market, since, limit)
async def fetch_position(self, symbol, params={}):
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.swapGetPositionSinglePosition(self.extend(request, params))
#
# {
# "margin_mode":"fixed", # Margin mode: crossed / fixed
# "holding":[
# {
# "symbol":"cmt_btcusdt", # Contract name
# "liquidation_price":"0.00", # Estimated liquidation price
# "position":"0", # Position Margin, the margin for holding current positions
# "avail_position":"0", # Available position
# "avg_cost":"0.00", # Transaction average price
# "leverage":"2", # Leverage
# "realized_pnl":"0.00000000", # Realized Profit and loss
# "keepMarginRate":"0.005", # Maintenance margin rate
# "side":"1", # Position Direction Long or short, Mark obsolete
# "holdSide":"1", # Position Direction Long or short
# "timestamp":"1557571623963", # System timestamp
# "margin":"0.0000000000000000", # Used margin
# "unrealized_pnl":"0.00000000", # Unrealized profit and loss
# }
# ]
# }
return response
async def fetch_positions(self, symbols=None, params={}):
await self.load_markets()
response = await self.swapGetPositionAllPosition(params)
#
# [
# {
# "margin_mode":"fixed",
# "holding":[
# {
# "liquidation_price":"0.00",
# "position":"0",
# "avail_position":"0",
# "avg_cost":"0.00",
# "symbol":"btcusd",
# "leverage":"20",
# "keepMarginRate":"0.005",
# "realized_pnl":"0.00000000",
# "unrealized_pnl":"0",
# "side":"long",
# "holdSide":"1",
# "timestamp":"1595698564915",
# "margin":"0.0000000000000000"
# },
# ]
# },
# ]
#
# todo unify parsePosition/parsePositions
return response
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
request = '/' + self.implode_params(path, params)
if (api == 'capi') or (api == 'swap'):
request = '/api/swap/' + self.version + request
else:
request = '/' + api + '/v1' + request
query = self.omit(params, self.extract_params(path))
url = self.implode_hostname(self.urls['api'][api]) + request
if (api == 'data') or (api == 'capi'):
if query:
url += '?' + self.urlencode(query)
elif api == 'swap':
self.check_required_credentials()
timestamp = str(self.milliseconds())
auth = timestamp + method + request
if method == 'POST':
body = self.json(params)
auth += body
else:
if params:
query = self.urlencode(self.keysort(params))
url += '?' + query
auth += '?' + query
signature = self.hmac(self.encode(auth), self.encode(self.secret), hashlib.sha256, 'base64')
headers = {
'ACCESS-KEY': self.apiKey,
'ACCESS-SIGN': signature,
'ACCESS-TIMESTAMP': timestamp,
'ACCESS-PASSPHRASE': self.password,
}
if method == 'POST':
headers['Content-Type'] = 'application/json'
elif api == 'api':
timestamp = str(self.milliseconds())
auth = ''
query = self.keysort(query)
auth = self.rawencode(query)
hash = self.hash(self.encode(self.secret), 'sha1')
signed = auth
signature = self.hmac(self.encode(auth), self.encode(hash), hashlib.md5)
if len(auth) > 0:
signed += '&'
signed += 'sign=' + signature + '&req_time=' + timestamp + '&accesskey=' + self.apiKey
if method == 'GET':
if query:
url += '?' + signed
elif method == 'POST':
url += '?sign=' + signature + '&req_time=' + timestamp + '&accesskey=' + self.apiKey
body = auth
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
}
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if not response:
return # fallback to default error handler
#
# spot
#
# {"status":"fail","err_code":"01001","err_msg":"系统异常,请稍后重试"}
# {"status":"error","ts":1595594160149,"err_code":"invalid-parameter","err_msg":"invalid size, valid range: [1,2000]"}
# {"status":"error","ts":1595684716042,"err_code":"invalid-parameter","err_msg":"illegal sign invalid"}
# {"status":"error","ts":1595700216275,"err_code":"bad-request","err_msg":"your balance is low!"}
# {"status":"error","ts":1595700344504,"err_code":"invalid-parameter","err_msg":"invalid type"}
# {"status":"error","ts":1595703343035,"err_code":"bad-request","err_msg":"order cancel fail"}
# {"status":"error","ts":1595704360508,"err_code":"invalid-parameter","err_msg":"accesskey not null"}
# {"status":"error","ts":1595704490084,"err_code":"invalid-parameter","err_msg":"permissions not right"}
# {"status":"error","ts":1595711862763,"err_code":"system exception","err_msg":"system exception"}
# {"status":"error","ts":1595730308979,"err_code":"bad-request","err_msg":"20003"}
#
# swap
#
# {"code":"40015","msg":"","requestTime":1595698564931,"data":null}
# {"code":"40017","msg":"Order id must not be blank","requestTime":1595702477835,"data":null}
# {"code":"40017","msg":"Order Type must not be blank","requestTime":1595698516162,"data":null}
# {"code":"40301","msg":"","requestTime":1595667662503,"data":null}
# {"code":"40017","msg":"Contract code must not be blank","requestTime":1595703151651,"data":null}
# {"code":"40108","msg":"","requestTime":1595885064600,"data":null}
# {"order_id":"513468410013679613","client_oid":null,"symbol":"ethusd","result":false,"err_code":"order_no_exist_error","err_msg":"订单不存在!"}
#
message = self.safe_string(response, 'err_msg')
errorCode = self.safe_string_2(response, 'code', 'err_code')
feedback = self.id + ' ' + body
nonEmptyMessage = ((message is not None) and (message != ''))
if nonEmptyMessage:
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], message, feedback)
nonZeroErrorCode = (errorCode is not None) and (errorCode != '00000')
if nonZeroErrorCode:
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
if nonZeroErrorCode or nonEmptyMessage:
raise ExchangeError(feedback) # unknown message
|
py | b402ccf23f6040ecdd9af879dd5efb31fdda9150 | # -*- coding: utf-8 -*-
from resources.lib.kodi import utils
def get_kodi_volume():
result = utils.kodi_json_request({"jsonrpc": "2.0",
"method": "Application.GetProperties",
"params": {"properties": ["volume"]},
"id": 7})
return result["volume"]
def set_kodi_volume(volume):
utils.kodi_json_request({"jsonrpc": "2.0",
"method": "Application.SetVolume",
"params": {"volume": volume}, "id": 8})
def get_youtube_plugin_path(videoid, seek=0): # type: (str, str) -> str
if utils.get_setting("playback-addon") == "Tubed":
return "plugin://plugin.video.tubed/?mode=play&video_id={}&start_offset={}".format(
videoid, float(seek))
else:
return "plugin://plugin.video.youtube/play/?video_id={}&seek={}".format(
videoid, float(seek))
def remote_connected(name):
utils.notification(message="{} {}!".format(name, utils.get_string(32006)))
def remote_disconnected(name):
utils.notification(message="{} {}!".format(name, utils.get_string(32007)))
|
py | b402cd190eeef177eb257de7e257c3a8f7cfed63 | #!/usr/bin/python
# Copyright 2017 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Helper functions for working with prow.
"""
import argparse
import json
import logging
import os
import re
import time
from google.cloud import storage # pylint: disable=no-name-in-module
from kubeflow.tf_operator import test_util, util
# Default repository organization and name.
# This should match the values used in Go imports.
GO_REPO_OWNER = "kubeflow"
GO_REPO_NAME = "training-operator"
GCS_REGEX = re.compile("gs://([^/]*)/(.*)")
def get_gcs_output():
"""Return the GCS directory where test outputs should be written to."""
job_name = os.getenv("JOB_NAME")
# GCS layout is defined here:
# https://github.com/kubernetes/test-infra/tree/master/gubernator#job-artifact-gcs-layout
pull_number = os.getenv("PULL_NUMBER")
if pull_number:
output = ("gs://kubernetes-jenkins/pr-logs/pull/{owner}_{repo}/"
"{pull_number}/{job}/{build}").format(
owner=GO_REPO_OWNER,
repo=GO_REPO_NAME,
pull_number=pull_number,
job=job_name,
build=os.getenv("BUILD_NUMBER"))
return output
elif os.getenv("REPO_OWNER"):
# It is a postsubmit job
output = ("gs://kubernetes-jenkins/logs/{owner}_{repo}/"
"{job}/{build}").format(
owner=GO_REPO_OWNER,
repo=GO_REPO_NAME,
job=job_name,
build=os.getenv("BUILD_NUMBER"))
return output
# Its a periodic job
output = ("gs://kubernetes-jenkins/logs/{job}/{build}").format(
job=job_name, build=os.getenv("BUILD_NUMBER"))
return output
def get_symlink_output(pull_number, job_name, build_number):
"""Return the location where the symlink should be created."""
# GCS layout is defined here:
# https://github.com/kubernetes/test-infra/tree/master/gubernator#job-artifact-gcs-layout
if not pull_number:
# Symlinks are only created for pull requests.
return ""
output = ("gs://kubernetes-jenkins/pr-logs/directory/"
"{job}/{build}.txt").format(
job=job_name, build=build_number)
return output
def create_started(gcs_client, output_dir, sha):
"""Create the started output in GCS.
Args:
gcs_client: GCS client
output_dir: The GCS directory where the output should be written.
sha: Sha for the mlkube.io repo
Returns:
blob: The created blob.
"""
# See:
# https://github.com/kubernetes/test-infra/tree/master/gubernator#job-artifact-gcs-layout
# For a list of fields expected by gubernator
started = {
"timestamp": int(time.time()),
"repos": {
# List all repos used and their versions.
GO_REPO_OWNER + "/" + GO_REPO_NAME:
sha,
},
}
PULL_REFS = os.getenv("PULL_REFS", "")
if PULL_REFS:
started["pull"] = PULL_REFS
m = GCS_REGEX.match(output_dir)
bucket = m.group(1)
path = m.group(2)
bucket = gcs_client.get_bucket(bucket)
blob = bucket.blob(os.path.join(path, "started.json"))
blob.upload_from_string(json.dumps(started))
return blob
def create_finished(gcs_client, output_dir, success):
"""Create the finished output in GCS.
Args:
gcs_client: GCS client
output_dir: The GCS directory where the output should be written.
success: Boolean indicating whether the test was successful.
Returns:
blob: The blob object that we created.
"""
result = "FAILURE"
if success:
result = "SUCCESS"
finished = {
"timestamp": int(time.time()),
"result": result,
# Dictionary of extra key value pairs to display to the user.
# TODO(jlewi): Perhaps we should add the GCR path of the Docker image
# we are running in. We'd have to plumb this in from bootstrap.
"metadata": {},
}
m = GCS_REGEX.match(output_dir)
bucket = m.group(1)
path = m.group(2)
bucket = gcs_client.get_bucket(bucket)
blob = bucket.blob(os.path.join(path, "finished.json"))
blob.upload_from_string(json.dumps(finished))
return blob
def create_symlink(gcs_client, symlink, output):
"""Create a 'symlink' to the output directory.
Args:
gcs_client: GCS client
symlink: GCS path of the object to server as the link
output: The location to point to.
"""
m = GCS_REGEX.match(symlink)
bucket = m.group(1)
path = m.group(2)
bucket = gcs_client.get_bucket(bucket)
blob = bucket.blob(path)
blob.upload_from_string(output)
return blob
def upload_outputs(gcs_client, output_dir, build_log):
bucket_name, path = util.split_gcs_uri(output_dir)
bucket = gcs_client.get_bucket(bucket_name)
if not os.path.exists(build_log):
logging.error("File %s doesn't exist.", build_log)
else:
logging.info("Uploading file %s.", build_log)
blob = bucket.blob(os.path.join(path, "build-log.txt"))
blob.upload_from_filename(build_log)
def get_commit_from_env():
"""Get the commit to test from prow environment variables."""
# If this is a presubmit PULL_PULL_SHA will be set see:
# https://github.com/kubernetes/test-infra/tree/master/prow#job-evironment-variables
sha = ""
pull_number = os.getenv("PULL_NUMBER", "")
if pull_number:
sha = os.getenv("PULL_PULL_SHA", "")
else:
sha = os.getenv("PULL_BASE_SHA", "")
return sha
def create_latest(gcs_client, job_name, sha):
"""Create a file in GCS with information about the latest passing postsubmit.
"""
bucket_name = "kubeflow-ci-results"
path = os.path.join(job_name, "latest_green.json")
bucket = gcs_client.get_bucket(bucket_name)
logging.info("Creating GCS output: bucket: %s, path: %s.", bucket_name, path)
data = {
"status": "passing",
"job": job_name,
"sha": sha,
}
blob = bucket.blob(path)
blob.upload_from_string(json.dumps(data))
def _get_actual_junit_files(bucket, prefix):
actual_junit = set()
for b in bucket.list_blobs(prefix=os.path.join(prefix, "junit")):
actual_junit.add(os.path.basename(b.name))
return actual_junit
def check_no_errors(gcs_client, artifacts_dir, junit_files):
"""Check that all the XML files exist and there were no errors.
Args:
gcs_client: The GCS client.
artifacts_dir: The directory where artifacts should be stored.
junit_files: List of the names of the junit files.
Returns:
True if there were no errors and false otherwise.
"""
bucket_name, prefix = util.split_gcs_uri(artifacts_dir)
bucket = gcs_client.get_bucket(bucket_name)
no_errors = True
# Get a list of actual junit files.
actual_junit = _get_actual_junit_files(bucket, prefix)
for f in junit_files:
full_path = os.path.join(artifacts_dir, f)
logging.info("Checking %s", full_path)
b = bucket.blob(os.path.join(prefix, f))
if not b.exists():
logging.error("Missing %s", full_path)
no_errors = False
continue
xml_contents = b.download_as_string()
if test_util.get_num_failures(xml_contents) > 0:
logging.info("Test failures in %s", full_path)
no_errors = False
# Check if there were any extra tests that ran and treat
# that as a failure.
extra = set(actual_junit) - set(junit_files)
if extra:
logging.error("Extra junit files found: %s", ",".join(extra))
no_errors = False
return no_errors
def finalize_prow_job(args):
"""Finalize a prow job.
Finalizing a PROW job consists of determining the status of the
prow job by looking at the junit files and then creating finished.json.
"""
junit_files = args.junit_files.split(",")
gcs_client = storage.Client()
output_dir = get_gcs_output()
artifacts_dir = os.path.join(output_dir, "artifacts")
no_errors = check_no_errors(gcs_client, artifacts_dir, junit_files)
create_finished(gcs_client, output_dir, no_errors)
def main(): # pylint: disable=too-many-locals
logging.getLogger().setLevel(logging.INFO) # pylint: disable=too-many-locals
logging.basicConfig(
level=logging.INFO,
format=('%(levelname)s|%(asctime)s'
'|%(pathname)s|%(lineno)d| %(message)s'),
datefmt='%Y-%m-%dT%H:%M:%S',
)
# create the top-level parser
parser = argparse.ArgumentParser(description="Steps related to prow.")
subparsers = parser.add_subparsers()
#############################################################################
# Finalize prow job.
parser_finished = subparsers.add_parser(
"finalize_job", help="Finalize the prow job.")
parser_finished.add_argument(
"--junit_files",
default="",
type=str,
help=("A comma separated list of the names of "
"the expected junit files."))
parser_finished.set_defaults(func=finalize_prow_job)
# parse the args and call whatever function was selected
args = parser.parse_args()
args.func(args)
if __name__ == "__main__":
main()
|
py | b402cda1fa8098e234bab0f9a82e69c008301d51 | # Weights dict created from rosetta_source/src/core/scoring/ScoreType.hh at revision 47732 using the parseScoreType.py function Shane wrote
# When they are set, the columns correspond respectively to the description from the Rosetta 3.3 manual, a related comment in ScoreType.hh or https://www.rosettacommons.org/docs/latest/rna-denovo.html, and a list of the .wts files in which the terms are present.
# Most of this information is not present at the moment.
# More information was taken from here: https://www.rosettacommons.org/docs/latest/score-types.html
rosetta_weights = {
"fa_atr" : ("Lennard-Jones attractive between atoms in different residues", "enumeration starts at 1 for indexing utility::vector1", ['standard_weights']),
"fa_rep" : ("Lennard-Jones repulsive between atoms in different residues", None, ['standard_weights']),
"fa_sol" : ("Lazaridis-Jarplus solvation energy", None, ['standard_weights']),
"fa_intra_atr" : (None, None, []),
"fa_intra_rep" : ("Lennard-Jones repulsive between atoms in the same residue", None, ['standard_weights']),
"fa_intra_sol" : (None, None, []),
"lk_hack" : (None, None, []),
"lk_ball" : (None, None, []),
"lk_ball_iso" : (None, None, []),
"coarse_fa_atr" : (None, None, []),
"coarse_fa_rep" : (None, None, []),
"coarse_fa_sol" : (None, None, []),
"coarse_beadlj" : (None, None, []),
"mm_lj_intra_rep" : (None, None, []),
"mm_lj_intra_atr" : (None, None, []),
"mm_lj_inter_rep" : (None, None, []),
"mm_lj_inter_atr" : (None, None, []),
"mm_twist" : (None, "could be lr 2benergy and not in energy graph", []),
"mm_bend" : ("Deviation of bond angles from the mean", "could be lr 2benergy and not in energy graph", []),
"mm_stretch" : (None, "could be lr 2benergy and not in energy graph", []),
"lk_costheta" : (None, None, []),
"lk_polar" : (None, None, []),
"lk_nonpolar" : (None, "Lazaridis-Karplus solvation energy, over nonpolar atoms", []),
"hack_elec" : (None, None, []),
"fa_elec" : ("Coulombic electrostatic potential with a distance-dependant dielectric", None, []),
"dslf_fa13" : ("Disulfide geometry potential", None, []),
"hack_elec_bb_bb" : (None, None, []),
"hack_elec_bb_sc" : (None, None, []),
"hack_elec_sc_sc" : (None, None, []),
"h2o_hbond" : (None, None, []),
"dna_dr" : (None, None, []),
"dna_bp" : (None, None, []),
"dna_bs" : (None, None, []),
"peptide_bond" : (None, None, []),
"pcs" : (None, "Pseudocontact Shift Energy", []),
"pcs2" : (None, "Pseudocontact Shift Energy version 2. Will replace pcs end of 2010", []),
"fastsaxs" : (None, "fastsaxs agreement using formulation of Stovgaard et al (BMC Bioinf. 2010)", []),
"saxs_score" : (None, "centroid saxs asessment", []),
"saxs_cen_score" : (None, None, []),
"saxs_fa_score" : (None, "full-atom SAXS score", []),
"pddf_score" : (None, "score based on pairwise distance distribution function", []),
"fa_mbenv" : (None, "depth dependent reference term", []),
"fa_mbsolv" : (None, "burial+depth dependent term", []),
"hack_elec_rna_phos_phos" : (None, "Simple electrostatic repulsion term between phosphates", []),
"hack_elec_rna_phos_sugr" : (None, None, []),
"hack_elec_rna_phos_base" : (None, None, []),
"hack_elec_rna_sugr_sugr" : (None, None, []),
"hack_elec_rna_sugr_base" : (None, None, []),
"hack_elec_rna_base_base" : (None, None, []),
"hack_elec_aro_aro" : (None, None, []),
"hack_elec_aro_all" : (None, None, []),
"hack_aro" : (None, None, []),
"rna_fa_atr_base" : (None, None, []),
"rna_fa_rep_base" : (None, None, []),
"rna_data_backbone" : (None, "Using chemical accessibility data for RNA.", []),
"ch_bond" : (None, "Carbon hydrogen bonds", []),
"ch_bond_bb_bb" : (None, None, []),
"ch_bond_sc_sc" : (None, None, []),
"ch_bond_bb_sc" : (None, None, []),
"pro_close" : ("Proline ring closure energy", None, ['standard_weights']),
"rama2b" : (None, None, []),
"vdw" : (None, "centroid", []),
"cenpack" : (None, "centroid", []),
"cenpack_smooth" : (None, "fpd smooth cenpack", []),
"cen_hb" : (None, "fpd centroid bb hbonding", []),
"hybrid_vdw" : (None, "hybrid centroid+fa", []),
"rna_vdw" : (None, "low res clash check for RNA", []),
"rna_base_backbone" : (None, "Bases to 2'-OH, phosphates, etc.", []),
"rna_backbone_backbone" : (None, "2'-OH to 2'-OH, phosphates, etc.", []),
"rna_repulsive" : (None, "mainly phosphate-phosphate repulsion", []),
"rna_base_pair_pairwise" : (None, "Base-base interactions (Watson-Crick and non-Watson-Crick)", []),
"rna_base_axis_pairwise" : (None, "Force base normals to be parallel", []),
"rna_base_stagger_pairwise" : (None, "Force base pairs to be in same plane.", []),
"rna_base_stack_pairwise" : (None, "Stacking interactions", []),
"rna_base_stack_axis_pairwise" : (None, "Stacking interactions should involve parallel bases.", []),
"rna_data_base" : (None, "Using chemical accessibility data for RNA.", []),
"rna_base_pair" : (None, "Base-base interactions (Watson-Crick and non-Watson-Crick)", []),
"rna_base_axis" : (None, "Force base normals to be parallel", []),
"rna_base_stagger" : (None, "Force base pairs to be in same plane.", []),
"rna_base_stack" : (None, "Stacking interactions", []),
"rna_base_stack_axis" : (None, "Stacking interactions should involve parallel bases.", []),
"rna_torsion" : (None, "RNA torsional potential.", []),
"rna_sugar_close" : (None, "constraints to keep RNA sugar closed, and with reasonably ideal geometry", []),
"fa_stack" : (None, "stacking interaction modeled as pairwise atom-atom interactions", []),
"fa_stack_aro" : (None, None, []),
"fa_intra_RNA_base_phos_atr" : (None, "RNA specific score term", []),
"fa_intra_RNA_base_phos_rep" : (None, "RNA specific score term", []),
"fa_intra_RNA_base_phos_sol" : (None, "RNA specific score term", []),
"lk_polar_intra_RNA" : (None, "RNA specific score term", []),
"lk_nonpolar_intra_RNA" : (None, "RNA specific score term", []),
"hbond_intra" : (None, "Currently effects only RNA", []),
"geom_sol_intra_RNA" : (None, "RNA specific score term", []),
"CI_geom_sol" : (None, "Context independent version. Currently tested only for RNA case.", []),
"CI_geom_sol_intra_RNA" : (None, "RNA specific score term", []),
"fa_cust_pair_dist" : (None, "custom short range 2b", []),
"custom_atom_pair" : (None, None, []),
"orbitals_hpol" : (None, None, []),
"orbitals_haro" : (None, None, []),
"orbitals_orbitals" : (None, None, []),
"orbitals_hpol_bb" : (None, None, []),
"PyRosettaTwoBodyContextIndepenedentEnergy_first" : (None, None, []),
"PyRosettaTwoBodyContextIndepenedentEnergy_last" : (None, None, []),
"python" : (None, "<-- Deprecated use PyRosettaEnergie* instead", []),
"n_ci_2b_score_types" : (None, "/ keep this guy at the end of the ci2b scores", []),
"fa_pair" : ("Statistics-based pair term, favors salt bridges (replaced by fa_elec in Talaris2013)", "/ == fa_pair_pol_pol", ['standard_weights']),
"fa_pair_aro_aro" : (None, None, []),
"fa_pair_aro_pol" : (None, None, []),
"fa_pair_pol_pol" : (None, None, []),
"fa_plane" : ("pi-pi interaction between aromatic groups, by default = 0", None, ['standard_weights']),
"hbond_sr_bb" : ("Backbone-backbone hbonds close in primary sequence", None, ['standard_weights']),
"hbond_lr_bb" : ("Backbone-backbone hbonds distant in primary sequence", None, ['standard_weights']),
"hbond_bb_sc" : ("Sidechain-backbone hydrogen bond energy", None, ['standard_weights']),
"hbond_sr_bb_sc" : (None, None, []),
"hbond_lr_bb_sc" : (None, None, []),
"hbond_sc" : ("Sidechain-sidechain hydrogen bond energy", None, ['standard_weights']),
"PyRosettaTwoBodyContextDependentEnergy_first" : (None, None, []),
"PyRosettaTwoBodyContextDependentEnergy_last" : (None, None, []),
"interface_dd_pair" : (None, None, []),
"geom_sol" : (None, "Geometric Solvation energy for polar atoms", []),
"occ_sol_fitted" : (None, None, []),
"occ_sol_fitted_onebody" : (None, None, []),
"occ_sol_exact" : (None, None, []),
"pair" : (None, "centroid", []),
"cen_pair_smooth" : (None, "fpd smooth centroid pair", []),
"Mpair" : (None, None, []),
"suck" : (None, None, []),
"rna_rg" : (None, "Radius of gyration for RNA", []),
"interchain_pair" : (None, None, []),
"interchain_vdw" : (None, None, []),
"n_shortranged_2b_score_types" : (None, "keep this guy at the end of the sr ci/cd 2b scores", []),
"gb_elec" : (None, None, []),
"dslf_ss_dst" : ("Distance score in current disulfide (replaced by dslf_fa13 in Talaris2013)", None, ['standard_weights']),
"dslf_cs_ang" : ("CSangles score in current disulfide (replaced by dslf_fa13 in Talaris2013)", None, ['standard_weights']),
"dslf_ss_dih" : ("Dihedral score in current disulfide (replaced by dslf_fa13 in Talaris2013)", None, ['standard_weights']),
"dslf_ca_dih" : ("Ca dihedral score in current disulfide (replaced by dslf_fa13 in Talaris2013)", None, ['standard_weights']),
"dslf_cbs_ds" : (None, None, []),
"dslfc_cen_dst" : (None, None, []),
"dslfc_cb_dst" : (None, None, []),
"dslfc_ang" : (None, None, []),
"dslfc_cb_dih" : (None, None, []),
"dslfc_bb_dih" : (None, None, []),
"dslfc_rot" : (None, None, []),
"dslfc_trans" : (None, None, []),
"dslfc_RT" : (None, None, []),
"atom_pair_constraint" : (None, "Harmonic constraints between atoms involved in Watson-Crick base pairs specified by the user in the params file", []),
"constant_constraint" : (None, None, []),
"coordinate_constraint" : (None, None, []),
"angle_constraint" : (None, None, []),
"dihedral_constraint" : (None, None, []),
"big_bin_constraint" : (None, None, []),
"dunbrack_constraint" : (None, None, []),
"site_constraint" : (None, None, []),
"rna_bond_geometry" : (None, "deviations from ideal geometry", []),
"rama" : ("Ramachandran preferences", None, ['score12_wts_patch']),
"omega" : ("Omega dihedral in the backbone", None, ['score12_wts_patch']),
"fa_dun" : ("Internal energy of sidechain rotamers as derived from Dunbrack's statistics", None, ['standard_weights']),
"p_aa_pp" : ("Probability of amino acid at phi/psi", None, ['standard_weights']),
"yhh_planarity" : (None, None, []),
"h2o_intra" : (None, None, []),
"ref" : ("Reference energy for each amino acid", None, ['standard_weights']),
"seqdep_ref" : (None, None, []),
"envsmooth" : (None, None, []),
"e_pH" : (None, None, []),
"rna_bulge" : (None, None, []),
"special_rot" : (None, None, []),
"PB_elec" : (None, None, []),
"cen_env_smooth" : (None, "fpd smooth centroid env", []),
"cbeta_smooth" : (None, "fpd smooth cbeta", []),
"env" : (None, None, []),
"cbeta" : (None, None, []),
"DFIRE" : (None, None, []),
"Menv" : (None, None, []),
"Mcbeta" : (None, None, []),
"Menv_non_helix" : (None, None, []),
"Menv_termini" : (None, None, []),
"Menv_tm_proj" : (None, None, []),
"Mlipo" : (None, None, []),
"rg" : (None, "radius of gyration", []),
"co" : (None, "contact order", []),
"hs_pair" : (None, None, []),
"ss_pair" : (None, None, []),
"rsigma" : (None, None, []),
"sheet" : (None, None, []),
"burial" : (None, "informatic burial prediction", []),
"abego" : (None, "informatic torsion-bin prediction", []),
"natbias_ss" : (None, None, []),
"natbias_hs" : (None, None, []),
"natbias_hh" : (None, None, []),
"natbias_stwist" : (None, None, []),
"aa_cmp" : (None, None, []),
"dock_ens_conf" : (None, "conformer reference energies for docking", []),
"rdc" : (None, "NMR residual dipolar coupling energy", []),
"rdc_segments" : (None, "fit alignment on multiple segments independently", []),
"rdc_rohl" : (None, None, []),
"holes" : (None, None, []),
"holes_decoy" : (None, None, []),
"holes_resl" : (None, None, []),
"holes_min" : (None, None, []),
"holes_min_mean" : (None, None, []),
"dab_sasa" : (None, "classic 1.4A probe solvant accessible surface area", []),
"dab_sev" : (None, "solvent excluded volume -- volume of atoms inflated by 1.4A", []),
"sa" : (None, "nonpolar contribution in GBSA", []),
"interchain_env" : (None, None, []),
"interchain_contact" : (None, None, []),
"chainbreak" : (None, None, []),
"linear_chainbreak" : (None, None, []),
"overlap_chainbreak" : (None, None, []),
"distance_chainbreak" : (None, None, []),
"dof_constraint" : (None, None, []),
"cart_bonded" : (None, "cartesian bonded potential", []),
"neigh_vect" : (None, None, []),
"neigh_count" : (None, None, []),
"neigh_vect_raw" : (None, None, []),
"symE_bonus" : (None, None, []),
"sym_lig" : (None, None, []),
"pack_stat" : (None, None, []),
"rms" : (None, "All-heavy-atom RMSD to the native structure", []),
"rms_stem" : (None, "All-heavy-atom RMSD to helical segments in the native structure, defined by 'STEM' entries in the parameters file", []),
"res_type_constraint" : (None, None, []),
"res_type_linking_constraint" : (None, None, []),
"pocket_constraint" : (None, None, []),
"backbone_stub_constraint" : (None, None, []),
"surface" : (None, None, []),
"p_aa" : (None, None, []),
"unfolded" : (None, None, []),
"elec_dens_fast" : (None, None, []),
"elec_dens_window" : (None, None, []),
"elec_dens_whole_structure_ca" : (None, None, []),
"elec_dens_whole_structure_allatom" : (None, None, []),
"elec_dens_atomwise" : (None, None, []),
"patterson_cc" : (None, None, []),
"hpatch" : (None, None, []),
"Menv_smooth" : (None, None, []),
"PyRosettaEnergy_first" : (None, None, []),
"PyRosettaEnergy_last" : (None, None, []),
"total_score" : (None, None, []),
"n_score_types" : (None, None, []),
"end_of_score_type_enumeration" : (None, None, []),
"N_WC" : (None, "Number of Watson-Crick base pairs", []),
"N_NWC" : (None, "Number of non-Watson-Crick base pairs", []),
"N_BS" : (None, "Number of base stacks", []),
"f_natWC" : (None, "fraction of native Watson-Crick base pairs recovered", []),
"f_natNWC" : (None, "fraction of native non-Watson-Crick base pairs recovered", []),
"f_natBP" : (None, "fraction of base pairs recovered", []),
}
|
py | b402cde8085b8b05cc14ed6696160ec8ed8a92a7 | #!/usr/bin/env python3
#
# SPDX-License-Identifier: BSD-3-Clause
# Copyright 2021, Intel Corporation
#
#
# Benchmark.py -- a single benchmark object (EXPERIMENTAL)
#
import json
import os
import subprocess
from .common import *
class Benchmark:
"""A single benchmark object"""
def __init__(self, oneseries):
oneseries['done'] = oneseries.get('done', False)
self.oneseries = oneseries
if 'requirements' in oneseries.keys():
self.req = oneseries['requirements']
# remove the remaining duplicate
oneseries.pop('requirements')
else:
self.req = {}
def __repr__(self):
"""A string representation of the object"""
return json.JSONEncoder(indent=4).encode(self.oneseries)
def __eq__(self, other):
"""A comparison function"""
# a complete list of all keys from both objects (without duplicates)
keys = list(set([*self.oneseries.keys(), *other.oneseries.keys()]))
for k in keys:
# ignore series-specific or instance-specific keys
if k in ['id', 'label']:
continue
sv = self.oneseries.get(k, None)
ov = other.oneseries.get(k, None)
if sv != ov:
return False
return True
def set_id(self, id):
"""Set an instance id"""
self.oneseries['id'] = id
def get_id(self):
"""Get the instance id"""
return self.oneseries.get('id', None)
@classmethod
def uniq(cls, figures):
"""Generate a set of unique benchmarks"""
output = [cls(oneseries)
for f in figures
for oneseries in f.series
]
return uniq(output)
def get_requirements(self):
return self.req
def cache(self):
"""Cache the current state of execution"""
return self.oneseries
def is_done(self):
return self.oneseries['done']
def _get_env_value(value):
if type(value) is bool:
return '1' if value else '0'
else:
return str(value)
def _get_env(self, config, result_dir, include_environ=True):
"""Construct the benchmarking environment"""
# make sure all values are strings
env = {k: Benchmark._get_env_value(v) for k, v in config.items()}
output_file = os.path.join(result_dir,
'benchmark_' + str(self.get_id()) + '.json')
# include:
# - the parent process environment (optional)
# - the user-provided configuration
# - the output file path
environ = os.environ if include_environ else {}
output = {**environ, **env, **{'OUTPUT_FILE': output_file}}
output.pop('_comment', None)
output.pop('report', None)
return output
def _benchmark_args(self, env):
if 'tool' not in self.oneseries:
raise ValueError("'tool' is missing in the figure")
if 'mode' not in self.oneseries:
raise ValueError("'mode' is missing in the figure")
if 'server_ip' not in env:
raise ValueError("'server_ip' is missing in the configuration")
args = ['./' + self.oneseries['tool'], env['server_ip']]
if 'tool_mode' in self.oneseries.keys():
args.append(self.oneseries['tool_mode'])
if 'rw' in self.oneseries.keys():
args.append(self.oneseries['rw'])
args.append(self.oneseries['mode'])
return args
def run(self, config, result_dir):
"""Run the benchmark process and mark it as done.
Args:
config (dict): a user-provided system config
result_dir (str): the directory for the benchmark's results
Raises:
CalledProcessError: when the benchmark's process returns non-zero
code
Returns:
None
"""
args = self._benchmark_args(config)
env = self._get_env(config, result_dir)
if 'filetype' not in self.oneseries:
raise ValueError("'filetype' is missing in the figure")
if 'id' not in self.oneseries:
raise ValueError("'id' is missing in the figure")
if self.oneseries['filetype'] == 'malloc':
env['REMOTE_JOB_MEM_PATH'] = 'malloc'
elif self.oneseries['filetype'] == 'pmem':
if 'REMOTE_JOB_MEM_PATH' not in env or env['REMOTE_JOB_MEM_PATH'] == 'malloc':
raise ValueError("'REMOTE_JOB_MEM_PATH' is not set with a path")
if 'tool_mode' in self.oneseries.keys() and self.oneseries['tool_mode'] == 'gpspm':
if 'busy_wait_polling' not in self.oneseries:
raise ValueError("'busy_wait_polling' is missing in the figure")
if 'busy_wait_polling' in self.oneseries:
if self.oneseries['busy_wait_polling']:
env['BUSY_WAIT_POLLING'] = '1'
else:
env['BUSY_WAIT_POLLING'] = '0'
process = subprocess.run(args, env=env)
process.check_returncode()
self.oneseries['done'] = True
def dump(self, config, result_dir):
args = self._benchmark_args(config)
env = self._get_env(config, result_dir, include_environ=False)
id = self.get_id()
done = 'done' if self.is_done() else 'not done'
print("Benchmark[{}]: {}".format(id, done))
print('- Environment:')
print("\n".join(["{}=\"{}\"".format(k, v) for k, v in env.items()]))
print('- Command: ' + ' '.join(args))
|
py | b402cf7471de44cbc1b9f2649e85cbdf48fc573b | # Corner plot script
# Mean to plot for LISA WD binaries , magnitudes included in these plots
# This script should run for all 4 cluster types and observing scenarios.
# Upload corner plots to box (both contours and no contour versions)
# Pulls from LISA files (may not be enough lines in obs and rec files, if so pass)
# Corner plotting documentation here: https://corner.readthedocs.io/en/latest/
import corner
import numpy as np
import pandas as pd
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.cm as cm
import matplotlib.colors as colors
import os
# Putting these functions this in its own script for runtime purposes
def corner_plot(DataFrame, popType, name, contours = False):
'''
Function to make a corner plot with our different parameters -- using seaborns
df - should be dataframe with values (all binary params) (each columnn is one element)
popType - binary subpopulation type (All, Obs, Rec)
name - name of scenario (Open/Globular/Long; Baseline/Colossus; Crowding/NoCrowding)
contours - set as True if contours overlaid on plots desired, default: False
'''
# Plot with contours
if contours == True:
print('Making corner plots with contours...')
df = DataFrame
f = corner.corner(df, labels = df.columns, label_kwargs={"fontsize":16}, bins = 20,
plot_contours = False, title_kwargs={"fontsize":28},
range = [(0.,1.), (0.,1.),(0.,1.),(0.,1.),(0.,1.),(0.,0.99),(0.,1.),(0.,1.),])
f.suptitle(popType + '-' + name + ' White Dwarfs', fontsize=24)
f.show()
f.savefig(f'./plots/lisaPlots/contours/{popType}-{name}-cornerPlot_contour_WDBinary.pdf')
f.close()
print('Corner contour plots made!')
# No contours
elif contours == False:
print('Making corner plots...')
df = DataFrame
f = corner.corner(df, labels = df.columns, label_kwargs={"fontsize":16}, bins = 20,
plot_contours = False, title_kwargs={"fontsize":28},
range = [(0.,1.), (0.,1.),(0.,1.),(0.,1.),(0.,1.),(0.,0.99),(0.,1.),(0.,1.),])
f.suptitle(popType + '-' + name + ' White Dwarfs', fontsize = 24)
f.show()
f.savefig(f'./plots/lisaPlots/{popType}-{name}-cornerPlot_WDBinary.pdf')
f.close()
print('Corner plots made!')
print('On to the next!')
# ########################################################################################################
# Looping through correct files in our trees and making plots
for root, dirs, files in os.walk('./clusters/', topdown = True):
for name in files:
print('ROOT, NAME: ', root, name)
if 'WD-histDataLISA.csv' in name:
dat = pd.read_csv(os.path.join(root,name), header = 0)
# dat = dat.drop('Unnamed: 0', axis =1)
# dat['p'] = np.log10(dat['p'])
if len(dat) > 1:
dat = dat.loc[np.where(dat['appMagMean_r'] != -999.0)] # Only want wds with good magnitudes
dat.columns = ['p(days)', 'm1 $(M_{\odot})$', 'm2 $(M_{\odot})$', 'r1 $(R_{\odot})$', 'r2 $(R_{\odot})$', 'e', 'i (deg)','appMagMean_r']
print('Data read in...')
print(dat)
# Making corner plots for every scenario -- add to other makeHists scripts (M67, OCs and GCs)
if len(dat) > len(dat.columns):
# Using only relevant slices of names for OCs and GCs
if ('GlobularClusters' in root) or ('OpenClusters' in root):
corner_plot(dat, name[0:3], name[4:7], False) # No contours -- Change name slice to [4:7] for OCs and GCs (less characters in names)
corner_plot(dat, name[0:3], name[4:7], True) # Making plots -- WITH contours
# Not including long clusters for WD binaries -- thesis
# If long clusters (name longer by 2 chars)
elif ('m10' in root) or ('m67' in root):
corner_plot(dat, name[0:3], name[4:9], False) # No contours -- Change name slice to [4:7] for OCs and GCs (less characters in names)
corner_plot(dat, name[0:3], name[4:9], True) # Making plots -- WITH contours
print('All done!')
|
py | b402d19ba06675c2b1bc62439e9abd623b288245 | import os, sys, time
def IsValidBuildDir(entry):
if not entry.name.startswith('.') and entry.is_dir() \
and entry.name != "x64" and entry.name != "CMakeFiles":
return True
else:
return False
def main():
currDir = os.path.realpath(sys.argv[1])
subDirEntries = []
with os.scandir(os.path.join(currDir, "tests")) as it:
for entry in it:
if IsValidBuildDir(entry):
subDirEntries.append(entry)
while len( subDirEntries ) == 0 :
print("[-] Tests are yet to be built. Waiting...")
time.sleep(5)
execPaths = []
print("-==RUNNING TEST SCRIPTS!=--")
for dir in subDirEntries:
execDir = os.path.join(dir, "Debug")
with os.scandir(execDir) as it:
for entry in it:
if entry.name.endswith(".exe"):
execPaths.append(os.path.join(execDir,entry))
for exec in execPaths:
print("[-] Running test: ", os.path.split(exec)[1])
os.system(exec)
if __name__ == '__main__':
main()
|
py | b402d313e3fe34ebfc5ceb11ac0bc7cc9e96adb0 | import os
io_command = 'iozone -r 1 -r 2 -r 3 -r 4 -r 5 -r 6 -r 7 -r 8 -r 9 -r 10 -r 16 -r 32 -r 64 -r 128 -r 256 -r 512 -i 0 -i 1 -i 2 -s 1G -I'
for i in range(1,3):
file_path = f"host_io_{i}.txt"
# os.system(f"{io_command} > {file_path}")
# file_path = f"docker_io_{i}.txt"
# qemu
file_path = f"qemu_io_{i}.txt"
ssh_command = "ssh [email protected]"
os.system(f"{ssh_command} \"{io_command}\" > {file_path}")
|
py | b402d3768bda427b4ba1bc9ad6270f283930bea5 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# yfinance - market data downloader
# https://github.com/ranaroussi/yfinance
#
# Copyright 2017-2019 Ran Aroussi
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import print_function
import time as _time
import datetime as _datetime
import requests as _requests
import pandas as _pd
import numpy as _np
import re as _re
try:
from urllib.parse import quote as urlencode
except ImportError:
from urllib import quote as urlencode
from . import utils
import json as _json
# import re as _re
# import sys as _sys
from . import shared
_BASE_URL_ = 'https://query2.finance.yahoo.com'
_SCRAPE_URL_ = 'https://finance.yahoo.com/quote'
def _mktime(timetuple):
"""
This is an implementation of time.mktime that works for Windows 10 even when dates before 1970 are used.
"""
return _datetime.datetime(*timetuple[:6], tzinfo=_datetime.timezone.utc).timestamp()
class TickerBase():
def __init__(self, ticker, session=None):
self.ticker = ticker.upper()
self.session = session
self._history = None
self._base_url = _BASE_URL_
self._scrape_url = _SCRAPE_URL_
self._fundamentals = False
self._info = None
self._analysis = None
self._sustainability = None
self._recommendations = None
self._major_holders = None
self._institutional_holders = None
self._mutualfund_holders = None
self._isin = None
self._news = []
self._shares = None
self._calendar = None
self._expirations = {}
self._earnings = {
"yearly": utils.empty_df(),
"quarterly": utils.empty_df()}
self._financials = {
"yearly": utils.empty_df(),
"quarterly": utils.empty_df()}
self._balancesheet = {
"yearly": utils.empty_df(),
"quarterly": utils.empty_df()}
self._cashflow = {
"yearly": utils.empty_df(),
"quarterly": utils.empty_df()}
# accept isin as ticker
if utils.is_isin(self.ticker):
self.ticker = utils.get_ticker_by_isin(self.ticker, None, session)
def stats(self, proxy=None):
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
if self._fundamentals:
return
ticker_url = "{}/{}".format(self._scrape_url, self.ticker)
# get info and sustainability
data = utils.get_json(ticker_url, proxy, self.session)
return data
def history(self, period="1mo", interval="1d",
start=None, end=None, prepost=False, actions=True,
auto_adjust=True, back_adjust=False,
proxy=None, rounding=False, tz=None, timeout=None, **kwargs):
"""
:Parameters:
period : str
Valid periods: 1d,5d,1mo,3mo,6mo,1y,2y,5y,10y,ytd,max
Either Use period parameter or use start and end
interval : str
Valid intervals: 1m,2m,5m,15m,30m,60m,90m,1h,1d,5d,1wk,1mo,3mo
Intraday data cannot extend last 60 days
start: str
Download start date string (YYYY-MM-DD) or _datetime.
Default is 1900-01-01
end: str
Download end date string (YYYY-MM-DD) or _datetime.
Default is now
prepost : bool
Include Pre and Post market data in results?
Default is False
auto_adjust: bool
Adjust all OHLC automatically? Default is True
back_adjust: bool
Back-adjusted data to mimic true historical prices
proxy: str
Optional. Proxy server URL scheme. Default is None
rounding: bool
Round values to 2 decimal places?
Optional. Default is False = precision suggested by Yahoo!
tz: str
Optional timezone locale for dates.
(default data is returned as non-localized dates)
timeout: None or float
If not None stops waiting for a response after given number of
seconds. (Can also be a fraction of a second e.g. 0.01)
Default is None.
**kwargs: dict
debug: bool
Optional. If passed as False, will suppress
error message printing to console.
"""
if start or period is None or period.lower() == "max":
if start is None:
start = -631159200
elif isinstance(start, _datetime.datetime):
start = int(_mktime(start.timetuple()))
else:
start = int(_mktime(
_time.strptime(str(start), '%Y-%m-%d')))
if end is None:
end = int(_time.time())
elif isinstance(end, _datetime.datetime):
end = int(_mktime(end.timetuple()))
else:
end = int(_mktime(_time.strptime(str(end), '%Y-%m-%d')))
params = {"period1": start, "period2": end}
else:
period = period.lower()
params = {"range": period}
params["interval"] = interval.lower()
params["includePrePost"] = prepost
params["events"] = "div,splits"
# 1) fix weired bug with Yahoo! - returning 60m for 30m bars
if params["interval"] == "30m":
params["interval"] = "15m"
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
# Getting data from json
url = "{}/v8/finance/chart/{}".format(self._base_url, self.ticker)
session = self.session or _requests
data = None
try:
data = session.get(
url=url,
params=params,
proxies=proxy,
headers=utils.user_agent_headers,
timeout=timeout
)
if "Will be right back" in data.text or data is None:
raise RuntimeError("*** YAHOO! FINANCE IS CURRENTLY DOWN! ***\n"
"Our engineers are working quickly to resolve "
"the issue. Thank you for your patience.")
data = data.json()
except Exception:
pass
# Work with errors
debug_mode = True
if "debug" in kwargs and isinstance(kwargs["debug"], bool):
debug_mode = kwargs["debug"]
err_msg = "No data found for this date range, symbol may be delisted"
if data is None or not type(data) is dict or 'status_code' in data.keys():
shared._DFS[self.ticker] = utils.empty_df()
shared._ERRORS[self.ticker] = err_msg
if "many" not in kwargs and debug_mode:
print('- %s: %s' % (self.ticker, err_msg))
return utils.empty_df()
if "chart" in data and data["chart"]["error"]:
err_msg = data["chart"]["error"]["description"]
shared._DFS[self.ticker] = utils.empty_df()
shared._ERRORS[self.ticker] = err_msg
if "many" not in kwargs and debug_mode:
print('- %s: %s' % (self.ticker, err_msg))
return shared._DFS[self.ticker]
elif "chart" not in data or data["chart"]["result"] is None or \
not data["chart"]["result"]:
shared._DFS[self.ticker] = utils.empty_df()
shared._ERRORS[self.ticker] = err_msg
if "many" not in kwargs and debug_mode:
print('- %s: %s' % (self.ticker, err_msg))
return shared._DFS[self.ticker]
# parse quotes
try:
quotes = utils.parse_quotes(data["chart"]["result"][0], tz)
except Exception:
shared._DFS[self.ticker] = utils.empty_df()
shared._ERRORS[self.ticker] = err_msg
if "many" not in kwargs and debug_mode:
print('- %s: %s' % (self.ticker, err_msg))
return shared._DFS[self.ticker]
# 2) fix weired bug with Yahoo! - returning 60m for 30m bars
if interval.lower() == "30m":
quotes2 = quotes.resample('30T')
quotes = _pd.DataFrame(index=quotes2.last().index, data={
'Open': quotes2['Open'].first(),
'High': quotes2['High'].max(),
'Low': quotes2['Low'].min(),
'Close': quotes2['Close'].last(),
'Adj Close': quotes2['Adj Close'].last(),
'Volume': quotes2['Volume'].sum()
})
try:
quotes['Dividends'] = quotes2['Dividends'].max()
except Exception:
pass
try:
quotes['Stock Splits'] = quotes2['Dividends'].max()
except Exception:
pass
try:
if auto_adjust:
quotes = utils.auto_adjust(quotes)
elif back_adjust:
quotes = utils.back_adjust(quotes)
except Exception as e:
if auto_adjust:
err_msg = "auto_adjust failed with %s" % e
else:
err_msg = "back_adjust failed with %s" % e
shared._DFS[self.ticker] = utils.empty_df()
shared._ERRORS[self.ticker] = err_msg
if "many" not in kwargs and debug_mode:
print('- %s: %s' % (self.ticker, err_msg))
if rounding:
quotes = _np.round(quotes, data[
"chart"]["result"][0]["meta"]["priceHint"])
quotes['Volume'] = quotes['Volume'].fillna(0).astype(_np.int64)
quotes.dropna(inplace=True)
# actions
dividends, splits = utils.parse_actions(data["chart"]["result"][0], tz)
# combine
df = _pd.concat([quotes, dividends, splits], axis=1, sort=True)
df["Dividends"].fillna(0, inplace=True)
df["Stock Splits"].fillna(0, inplace=True)
# index eod/intraday
df.index = df.index.tz_localize("UTC").tz_convert(
data["chart"]["result"][0]["meta"]["exchangeTimezoneName"])
if params["interval"][-1] == "m":
df.index.name = "Datetime"
elif params["interval"] == "1h":
pass
else:
df.index = _pd.to_datetime(df.index.date)
if tz is not None:
df.index = df.index.tz_localize(tz)
df.index.name = "Date"
# duplicates and missing rows cleanup
df.dropna(how='all', inplace=True)
df = df[~df.index.duplicated(keep='first')]
self._history = df.copy()
if not actions:
df.drop(columns=["Dividends", "Stock Splits"], inplace=True)
return df
# ------------------------
def _get_fundamentals(self, kind=None, proxy=None):
def cleanup(data):
df = _pd.DataFrame(data).drop(columns=['maxAge'])
for col in df.columns:
df[col] = _np.where(
df[col].astype(str) == '-', _np.nan, df[col])
df.set_index('endDate', inplace=True)
try:
df.index = _pd.to_datetime(df.index, unit='s')
except ValueError:
df.index = _pd.to_datetime(df.index)
df = df.T
df.columns.name = ''
df.index.name = 'Breakdown'
df.index = utils.camel2title(df.index)
return df
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
if self._fundamentals:
return
ticker_url = "{}/{}".format(self._scrape_url, self.ticker)
# get info and sustainability
data = utils.get_json(ticker_url, proxy, self.session)
# holders
try:
resp = utils.get_html(ticker_url + '/holders', proxy, self.session)
holders = _pd.read_html(resp)
except Exception:
holders = []
if len(holders) >= 3:
self._major_holders = holders[0]
self._institutional_holders = holders[1]
self._mutualfund_holders = holders[2]
elif len(holders) >= 2:
self._major_holders = holders[0]
self._institutional_holders = holders[1]
elif len(holders) >= 1:
self._major_holders = holders[0]
# self._major_holders = holders[0]
# self._institutional_holders = holders[1]
if self._institutional_holders is not None:
if 'Date Reported' in self._institutional_holders:
self._institutional_holders['Date Reported'] = _pd.to_datetime(
self._institutional_holders['Date Reported'])
if '% Out' in self._institutional_holders:
self._institutional_holders['% Out'] = self._institutional_holders[
'% Out'].str.replace('%', '').astype(float) / 100
if self._mutualfund_holders is not None:
if 'Date Reported' in self._mutualfund_holders:
self._mutualfund_holders['Date Reported'] = _pd.to_datetime(
self._mutualfund_holders['Date Reported'])
if '% Out' in self._mutualfund_holders:
self._mutualfund_holders['% Out'] = self._mutualfund_holders[
'% Out'].str.replace('%', '').astype(float) / 100
# sustainability
d = {}
try:
if isinstance(data.get('esgScores'), dict):
for item in data['esgScores']:
if not isinstance(data['esgScores'][item], (dict, list)):
d[item] = data['esgScores'][item]
s = _pd.DataFrame(index=[0], data=d)[-1:].T
s.columns = ['Value']
s.index.name = '%.f-%.f' % (
s[s.index == 'ratingYear']['Value'].values[0],
s[s.index == 'ratingMonth']['Value'].values[0])
self._sustainability = s[~s.index.isin(
['maxAge', 'ratingYear', 'ratingMonth'])]
except Exception:
pass
# info (be nice to python 2)
self._info = {}
try:
items = ['summaryProfile', 'financialData', 'quoteType',
'defaultKeyStatistics', 'assetProfile', 'summaryDetail']
for item in items:
if isinstance(data.get(item), dict):
self._info.update(data[item])
except Exception:
pass
# For ETFs, provide this valuable data: the top holdings of the ETF
try:
if 'topHoldings' in data:
self._info.update(data['topHoldings'])
except Exception:
pass
try:
if not isinstance(data.get('summaryDetail'), dict):
# For some reason summaryDetail did not give any results. The price dict usually has most of the same info
self._info.update(data.get('price', {}))
except Exception:
pass
try:
# self._info['regularMarketPrice'] = self._info['regularMarketOpen']
self._info['regularMarketPrice'] = data.get('price', {}).get(
'regularMarketPrice', self._info.get('regularMarketOpen', None))
except Exception:
pass
try:
self._info['preMarketPrice'] = data.get('price', {}).get(
'preMarketPrice', self._info.get('preMarketPrice', None))
except Exception:
pass
self._info['logo_url'] = ""
try:
domain = self._info['website'].split(
'://')[1].split('/')[0].replace('www.', '')
self._info['logo_url'] = 'https://logo.clearbit.com/%s' % domain
except Exception:
pass
# events
try:
cal = _pd.DataFrame(
data['calendarEvents']['earnings'])
cal['earningsDate'] = _pd.to_datetime(
cal['earningsDate'], unit='s')
self._calendar = cal.T
self._calendar.index = utils.camel2title(self._calendar.index)
self._calendar.columns = ['Value']
except Exception:
pass
# analyst recommendations
try:
rec = _pd.DataFrame(
data['upgradeDowngradeHistory']['history'])
rec['earningsDate'] = _pd.to_datetime(
rec['epochGradeDate'], unit='s')
rec.set_index('earningsDate', inplace=True)
rec.index.name = 'Date'
rec.columns = utils.camel2title(rec.columns)
self._recommendations = rec[[
'Firm', 'To Grade', 'From Grade', 'Action']].sort_index()
except Exception:
pass
# get fundamentals
data = utils.get_json(ticker_url + '/financials', proxy, self.session)
# generic patterns
for key in (
(self._cashflow, 'cashflowStatement', 'cashflowStatements'),
(self._balancesheet, 'balanceSheet', 'balanceSheetStatements'),
(self._financials, 'incomeStatement', 'incomeStatementHistory')
):
item = key[1] + 'History'
if isinstance(data.get(item), dict):
try:
key[0]['yearly'] = cleanup(data[item][key[2]])
except Exception:
pass
item = key[1] + 'HistoryQuarterly'
if isinstance(data.get(item), dict):
try:
key[0]['quarterly'] = cleanup(data[item][key[2]])
except Exception:
pass
# earnings
if isinstance(data.get('earnings'), dict):
try:
earnings = data['earnings']['financialsChart']
earnings['financialCurrency'] = 'USD' if 'financialCurrency' not in data['earnings'] else data['earnings']['financialCurrency']
self._earnings['financialCurrency'] = earnings['financialCurrency']
df = _pd.DataFrame(earnings['yearly']).set_index('date')
df.columns = utils.camel2title(df.columns)
df.index.name = 'Year'
self._earnings['yearly'] = df
df = _pd.DataFrame(earnings['quarterly']).set_index('date')
df.columns = utils.camel2title(df.columns)
df.index.name = 'Quarter'
self._earnings['quarterly'] = df
except Exception:
pass
# shares outstanding
try:
shares = _pd.DataFrame(data['annualBasicAverageShares'])
shares['Year'] = shares['asOfDate'].agg(lambda x: int(x[:4]))
shares.set_index('Year', inplace=True)
shares.drop(columns=['dataId', 'asOfDate', 'periodType', 'currencyCode'], inplace=True)
shares.rename(columns={'reportedValue': "BasicShares"}, inplace=True)
self._shares = shares
except Exception:
pass
# Analysis
data = utils.get_json(ticker_url + '/analysis', proxy, self.session)
if isinstance(data.get('earningsTrend'), dict):
try:
analysis = _pd.DataFrame(data['earningsTrend']['trend'])
analysis['endDate'] = _pd.to_datetime(analysis['endDate'])
analysis.set_index('period', inplace=True)
analysis.index = analysis.index.str.upper()
analysis.index.name = 'Period'
analysis.columns = utils.camel2title(analysis.columns)
dict_cols = []
for idx, row in analysis.iterrows():
for colname, colval in row.items():
if isinstance(colval, dict):
dict_cols.append(colname)
for k, v in colval.items():
new_colname = colname + ' ' + utils.camel2title([k])[0]
analysis.loc[idx, new_colname] = v
self._analysis = analysis[[c for c in analysis.columns if c not in dict_cols]]
except Exception:
pass
# Complementary key-statistics (currently fetching the important trailingPegRatio which is the value shown in the website)
res = {}
try:
my_headers = {'user-agent': 'curl/7.55.1', 'accept': 'application/json', 'content-type': 'application/json', 'referer': 'https://finance.yahoo.com/', 'cache-control': 'no-cache', 'connection': 'close'}
p = _re.compile(r'root\.App\.main = (.*);')
r = _requests.session().get('https://finance.yahoo.com/quote/{}/key-statistics?p={}'.format(self.ticker, self.ticker), headers=my_headers)
q_results = {}
my_qs_keys = ['pegRatio'] # QuoteSummaryStore
my_ts_keys = ['trailingPegRatio'] # , 'quarterlyPegRatio'] # QuoteTimeSeriesStore
# Complementary key-statistics
data = _json.loads(p.findall(r.text)[0])
key_stats = data['context']['dispatcher']['stores']['QuoteTimeSeriesStore']
q_results.setdefault(self.ticker, [])
for i in my_ts_keys:
# j=0
try:
# res = {i: key_stats['timeSeries'][i][1]['reportedValue']['raw']}
# We need to loop over multiple items, if they exist: 0,1,2,..
zzz = key_stats['timeSeries'][i]
for j in range(len(zzz)):
if key_stats['timeSeries'][i][j]:
res = {i: key_stats['timeSeries'][i][j]['reportedValue']['raw']}
q_results[self.ticker].append(res)
# print(res)
# q_results[ticker].append(res)
except:
q_results[ticker].append({i: np.nan})
res = {'Company': ticker}
q_results[ticker].append(res)
except Exception:
pass
if 'trailingPegRatio' in res:
self._info['trailingPegRatio'] = res['trailingPegRatio']
self._fundamentals = True
def get_recommendations(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._recommendations
if as_dict:
return data.to_dict()
return data
def get_calendar(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._calendar
if as_dict:
return data.to_dict()
return data
def get_major_holders(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._major_holders
if as_dict:
return data.to_dict()
return data
def get_institutional_holders(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._institutional_holders
if data is not None:
if as_dict:
return data.to_dict()
return data
def get_mutualfund_holders(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._mutualfund_holders
if data is not None:
if as_dict:
return data.to_dict()
return data
def get_info(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._info
if as_dict:
return data.to_dict()
return data
def get_sustainability(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._sustainability
if as_dict:
return data.to_dict()
return data
def get_earnings(self, proxy=None, as_dict=False, freq="yearly"):
self._get_fundamentals(proxy=proxy)
data = self._earnings[freq]
if as_dict:
dict_data = data.to_dict()
dict_data['financialCurrency'] = 'USD' if 'financialCurrency' not in self._earnings else self._earnings['financialCurrency']
return dict_data
return data
def get_analysis(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._analysis
if as_dict:
return data.to_dict()
return data
def get_financials(self, proxy=None, as_dict=False, freq="yearly"):
self._get_fundamentals(proxy=proxy)
data = self._financials[freq]
if as_dict:
return data.to_dict()
return data
def get_balancesheet(self, proxy=None, as_dict=False, freq="yearly"):
self._get_fundamentals(proxy=proxy)
data = self._balancesheet[freq]
if as_dict:
return data.to_dict()
return data
def get_balance_sheet(self, proxy=None, as_dict=False, freq="yearly"):
return self.get_balancesheet(proxy, as_dict, freq)
def get_cashflow(self, proxy=None, as_dict=False, freq="yearly"):
self._get_fundamentals(proxy=proxy)
data = self._cashflow[freq]
if as_dict:
return data.to_dict()
return data
def get_dividends(self, proxy=None):
if self._history is None:
self.history(period="max", proxy=proxy)
if self._history is not None and "Dividends" in self._history:
dividends = self._history["Dividends"]
return dividends[dividends != 0]
return []
def get_splits(self, proxy=None):
if self._history is None:
self.history(period="max", proxy=proxy)
if self._history is not None and "Stock Splits" in self._history:
splits = self._history["Stock Splits"]
return splits[splits != 0]
return []
def get_actions(self, proxy=None):
if self._history is None:
self.history(period="max", proxy=proxy)
if self._history is not None and "Dividends" in self._history and "Stock Splits" in self._history:
actions = self._history[["Dividends", "Stock Splits"]]
return actions[actions != 0].dropna(how='all').fillna(0)
return []
def get_shares(self, proxy=None, as_dict=False, *args, **kwargs):
self._get_fundamentals(proxy=proxy)
data = self._shares
if as_dict:
return data.to_dict()
return data
def get_isin(self, proxy=None):
# *** experimental ***
if self._isin is not None:
return self._isin
ticker = self.ticker.upper()
if "-" in ticker or "^" in ticker:
self._isin = '-'
return self._isin
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
q = ticker
self.get_info(proxy=proxy)
if "shortName" in self._info:
q = self._info['shortName']
url = 'https://markets.businessinsider.com/ajax/' \
'SearchController_Suggest?max_results=25&query=%s' \
% urlencode(q)
session = self.session or _requests
data = session.get(
url=url,
proxies=proxy,
headers=utils.user_agent_headers
).text
search_str = '"{}|'.format(ticker)
if search_str not in data:
if q.lower() in data.lower():
search_str = '"|'
if search_str not in data:
self._isin = '-'
return self._isin
else:
self._isin = '-'
return self._isin
self._isin = data.split(search_str)[1].split('"')[0].split('|')[0]
return self._isin
def get_news(self, proxy=None):
if self._news:
return self._news
# setup proxy in requests format
if proxy is not None:
if isinstance(proxy, dict) and "https" in proxy:
proxy = proxy["https"]
proxy = {"https": proxy}
# Getting data from json
url = "{}/v1/finance/search?q={}".format(self._base_url, self.ticker)
session = self.session or _requests
data = session.get(
url=url,
proxies=proxy,
headers=utils.user_agent_headers
)
if "Will be right back" in data.text:
raise RuntimeError("*** YAHOO! FINANCE IS CURRENTLY DOWN! ***\n"
"Our engineers are working quickly to resolve "
"the issue. Thank you for your patience.")
data = data.json()
# parse news
self._news = data.get("news", [])
return self._news
|
py | b402d39e9668704725719f393d22c3647d9ef05f | # coding: utf-8
from os import path
from setuptools import setup, find_packages
NAME = "huaweicloudsdkservicestage"
VERSION = "3.0.47"
AUTHOR = "HuaweiCloud SDK"
AUTHOR_EMAIL = "[email protected]"
URL = "https://github.com/huaweicloud/huaweicloud-sdk-python-v3"
DESCRIPTION = "ServiceStage"
this_directory = path.abspath(path.dirname(__file__))
with open(path.join(this_directory, 'README_PYPI.md'), encoding='utf-8') as f:
LONG_DESCRIPTION = f.read()
REQUIRES = ["huaweicloudsdkcore"]
OPTIONS = {
'bdist_wheel': {
'universal': True
}
}
setup(
name=NAME,
version=VERSION,
options=OPTIONS,
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
long_description_content_type='text/markdown',
author=AUTHOR,
author_email=AUTHOR_EMAIL,
license="Apache LICENSE 2.0",
url=URL,
keywords=["huaweicloud", "sdk", "ServiceStage"],
packages=find_packages(exclude=["tests*"]),
install_requires=REQUIRES,
python_requires=">=2.7",
include_package_data=True,
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Programming Language :: Python :: 3.6',
'Programming Language :: Python :: 3.7',
'Programming Language :: Python :: 3.8',
'Programming Language :: Python :: 3.9',
'Topic :: Software Development'
]
)
|
py | b402d56264ce2b0703568f5d07613facc58a4075 | # -*- coding: utf-8 -*-
#import sys, getopt
#import csv
import cgitb, cgi, json, sys, re
import json
#import subprocess
import os
import time
import datetime
from datetime import datetime
from datetime import timedelta
#from pytz import timezone
#import pytz
#import math
from operator import itemgetter, attrgetter
#import re
import psycopg2
cgitb.enable()
class reg(object):
def __init__(self, cursor, row):
for (attr, val) in zip((d[0] for d in cursor.description), row) :
setattr(self, attr, val)
def ParseInputCodes(year, params):
#year = "1980"
#params = "1-01 nhwhtXX,1-14 a60blkXX,3-10 hincXX"
result = {'bitstring': None, 'codes': []}
bitmap = [0, 0, 0, 0]
values = params.split(',')
for value in values:
groupid = value[0:1]
seqnumber = int(value[2:4])
code = value[5:]
result['codes'].append(groupid+' '+code)
idx = int(groupid) - 1
bitmap[idx] = bitmap[idx] | 2 ** (int(seqnumber) - 1)
group1 = format(bitmap[0], 'x')
group2 = format(bitmap[1], 'x')
group3 = format(bitmap[2], 'x')
group4 = format(bitmap[3], 'x')
if group1 == '0': group1 = ''
if group2 == '0': group2 = ''
if group3 == '0': group3 = ''
if group4 == '0': group4 = ''
result['bitstring'] = 'h' + group1 + 'h' + group2 + 'h' + group3 + 'h' + group4
#print result
return result
def LongitudinalNeighborhoodAnalysis(year, stateid, metroid, countyid, control):
codes = [] # from control[[code, numerator, denominator, formula, shortname], ...]
dict = {} # key: trtid10, value: [[numerator, denominator], [numerator, denominator], ...]
conn = psycopg2.connect("host='localhost' dbname='LTDB' user='neighborhood' password='Tomas159'")
curs = conn.cursor()
curs.execute("SELECT * From codebook LIMIT 0")
colnames = [desc[0] for desc in curs.description]
#print colnames
# set condition string of query using metroid, countyid
condition = ''
if (countyid != "ALL"):
condition = "substring(trtid10,1,5) = '" + countyid + "'"
else:
if (metroid == "ALL"):
condition = "substring(trtid10,1,2) = '" + stateid + "'"
else:
# get geoid10_county from metro_county table by stateid || metroid
curs.execute("SELECT geoid10_county AS countyid FROM metro_county WHERE states_msa_code = '" + stateid + "' AND geoid_msa = '" + metroid + "'")
results = curs.fetchall()
for row in results:
geoid10_county = row[0]
condition += "substring(trtid10,1,5) = '" + geoid10_county + "' OR "
if (len(condition) > 4): condition = condition[0:len(condition)-4]
#print condition
for val in control['codes']:
# separate group and code (e.g. '1 nhwhtXX' 1 is group number and nhwhtXX is code )
group = int(val.split()[0]); # 1
code = val.split()[1]; # nhwhtXX
# Check if the code is unqiue in codebook table.
curs.execute("SELECT * From codebook WHERE code = '" + code + "'")
results = curs.fetchall()
if len(results) != 1:
#print "Ignore '{:s}' because codebook record count={:d}".format(val, len(results))
continue
# Even if the same code exisits in codebook table, skip if group code is different.
row = results[0]
r = reg(curs, row)
if r.groupid != group:
#print "Ignore '{:s}' because codebook group={:d}".format(val, r.groupid)
continue
# Utilize year and find numerator and demoninator and make the equation
numerators = {'1970': r.year1970, '1980': r.year1980, '1990': r.year1990, '2000': r.year2000, '2010': r.year2010, '2012': r.year2012}
denominators = {'1970': r.denominator1970, '1980': r.denominator1980, '1990': r.denominator1990, '2000': r.denominator2000, '2010': r.denominator2010, '2012': r.denominator2012}
numerator = numerators[year].strip() if numerators[year] else ""
denominator = denominators[year].strip() if denominators[year] else ""
formula = '('+numerator+'/'+denominator+')'
if denominator == "": formula = '('+numerator+')'
shortname = r.description_short_name
#print "{:8s}{:15s}{:30s}{:s} ".format(year, val, formula, shortname)
# Skip if both numerator and denominator do not exisit
if (numerator == "" and denominator == ""):
#print "Ignore '{:s}' because both of numerator and denominator are not found.".format(val)
#print 'All columns of codebook =', row
continue
# Check if the selected column exisit in the table where the numerator exisits.
if (numerator != ""):
table1 = 'std_' + year + '_fullcount' # std_1980_fullcount
column1 = numerator # NHWHT80
if (numerator.endswith('_s') or numerator.endswith('_S')):
table1 = 'std_' + year + '_sample' # std_1980_sample
if (year == "2012"): table1 = 'std_2010_sample'
column1 = numerator[0:len(numerator)-2] # take off surfix _s if it exisits
column1 = column1.lower()
curs.execute("SELECT * FROM " + table1 + " LIMIT 0")
colnames = [desc[0] for desc in curs.description]
if column1 not in colnames:
#print "Ignore '{:s}' because numerator '{:s}' is not found in {:s}".format(val, column1, table1)
#print 'All columns of ' + table1 + ' =', colnames
continue
# Check if the selected column exisit in the table where the denominator exisits.
if (denominator != ""):
table2 = 'std_' + year + '_fullcount' # std_1980_fullcount
column2 = denominator # pop80
if (denominator.endswith('_s') or denominator.endswith('_S')):
table2 = 'std_' + year + '_sample' # std_1980_sample
if (year == "2012"): table2 = 'std_2010_sample'
column2 = denominator[0:len(denominator)-2] # take off surfix _s if it exisits
column2 = column2.lower()
curs.execute("SELECT * FROM " + table2 + " LIMIT 0")
colnames = [desc[0] for desc in curs.description]
if column2 not in colnames:
#print "Ignore '{:s}' because denominator '{:s}' is not found in {:s}".format(val, column2, table2)
#print 'All columns of ' + table2 + ' =', colnames
continue
# Ready to register in the codes array
codes.append([code, numerator, denominator, formula, shortname])
#p = len(codes) - 1 # array position to be saved in the value of dictionary
# read a numerator part of the table and save in the dictionary
if (numerator != ""):
#curs.execute("SELECT trtid10, " + column1 + " FROM " + table1 + " ORDER BY trtid10")
#curs.execute("SELECT trtid10, " + column1 + " FROM " + table1 + " WHERE trtid10 BETWEEN '" + stateid + "' AND '" + stateid + "999999999" + "' ORDER BY trtid10")
curs.execute("SELECT trtid10, " + column1 + " FROM " + table1 + " WHERE " + condition + " ORDER BY trtid10")
results = curs.fetchall()
testCount = 0
for row in results:
testCount += 1
#if (testCount > 1270): continue
tractid = row[0]
value1 = row[1] if row[1] else -9999 # Assign -9999 when the columns of numerators are none.
#dict = {} # key: trtid10, value: [[numerator, denominator], [numerator, denominator], ...]
if tractid in dict:
v = dict[tractid] # [[numerator, denominator], [numerator, denominator], ...]
for i in range(len(v), len(codes)-1): v.append([-9999, -9999])
if len(v) == len(codes)-1:
v.append([value1, -9999])
else:
print("Abort '{:s}' because inter error at numerator '{:s}' in {:s}".format(val, column1, table1))
print("All columns of row =", row)
print("codes =", codes)
print("dict['" + tractid + "'] =", v)
sys.exit("internal logic error!")
dict[tractid] = v
else:
v = []
for i in range(len(v), len(codes)-1): v.append([-9999, -9999])
v.append([value1, -9999])
dict[tractid] = v
# read a denominator part of the table and save in the dictionary
if (denominator != ""):
#curs.execute("SELECT trtid10, " + column2 + " FROM " + table2 + " ORDER BY trtid10")
#curs.execute("SELECT trtid10, " + column2 + " FROM " + table2 + " WHERE trtid10 BETWEEN '" + stateid + "' AND '" + stateid + "999999999" + "' ORDER BY trtid10")
curs.execute("SELECT trtid10, " + column2 + " FROM " + table2 + " WHERE " + condition + " ORDER BY trtid10")
results = curs.fetchall()
testCount = 0
for row in results:
testCount += 1
#if (testCount > 1270): continue
tractid = row[0]
value2 = row[1] if row[1] else -9999 # Assign -9999 when the columns of denominoator are none.
#dict = {} # key: trtid10, value: [[numerator, denominator], [numerator, denominator], ...]
if tractid in dict:
v = dict[tractid] # [[numerator, denominator], [numerator, denominator], ...]
for i in range(len(v), len(codes)): v.append([-9999, -9999])
if len(v) == len(codes):
#v[len(codes)-1] = [v[len(codes)-1][0], value2]
v[len(codes)-1][1] = value2
else:
print("Abort '{:s}' because inter error at numerator '{:s}' in {:s}".format(val, column2, table2))
print("All columns of row =", row)
print("codes =", codes)
print("dict['" + tractid + "'] =", v)
sys.exit("internal logic error!")
dict[tractid] = v
else:
v = []
for i in range(len(v), len(codes)-1): v.append([-9999, -9999])
v.append([-9999, value2])
dict[tractid] = v
output = []
list = dict.items()
#list.sort(key=itemgetter(0)) # Python 2
list = sorted(list, key=lambda x: x[0]) # Python 3
#print(list)
#outputfile = year + '_' + control['bitstring'] + '.csv'
#print outputfile + ' file write started ...'
#csvfile = open(outputfile, 'wb')
#csvwriter = csv.writer(csvfile)
header1 = ['tractid', 'state', 'county', 'tract']
header2 = ['', '', '', '']
for v in codes:
#code = v[0]
code = v[0][0:len(v[0])-2] + year[2:]
numerator = v[1]
denominator = v[2]
formula = v[3]
shortname = v[4]
#header1.extend(['', '', code + ' ' + formula])
#header2.extend(['numerator', 'denominator', shortname])
header1.extend([code + ' ' + formula])
header2.extend([shortname])
#csvwriter.writerow(header1)
#csvwriter.writerow(header2)
output.append(header1)
output.append(header2)
oCount = 0
#print codes
for tuple in list:
#print tuple
tractid = tuple[0]
values = tuple[1]
#print tractid, values
state = ""
county = ""
tract = ""
# Read tract table
curs.execute("SELECT * From tract WHERE tractid = '" + tractid + "'")
results = curs.fetchall()
if len(results) != 0:
row = results[0]
r = reg(curs, row)
state = r.state
county = r.county
tract = r.tract
record = [tractid, state, county, tract]
for idx, v in enumerate(values):
numerator = v[0]
denominator = v[1]
result = numerator * 100.0 / denominator if denominator != 0 else -9999
if numerator == -9999 or denominator == -9999: result = -9999
if codes[idx][1] == "": result = -9999 # numerator in codes
if codes[idx][2] == "": result = numerator # denominator in codes
#record.extend([numerator, denominator, result])
record.extend([result])
oCount += 1
#csvwriter.writerow(record)
output.append(record)
#csvfile.close()
conn.commit()
curs.close()
conn.close()
return output
def getParameter(argv):
year = '1970,1980,1990,2000,2010,2012'
possibleYear = year.split(',')
inputfile = ''
try:
opts, args = getopt.getopt(argv, "hy:i:", ["year=", "inputfile="])
except getopt.GetoptError:
print("LongitudinalNeighborhoodAnalysis.py -y <year> -i <inputfile>")
sys.exit(2)
for opt, arg in opts:
if opt == "-h":
print("LongitudinalNeighborhoodAnalysis.py -y <year> -i <inputfile>")
sys.exit()
elif opt in ("-y", "--year"):
year = arg
elif opt in ("-i", "--inputfile"):
inputfile = arg
print("year is : ", year)
print("Input file is : ", inputfile)
years = year.split(',')
for var in years:
if var not in possibleYear:
print("Impossible year found in --year parameter.")
sys.exit("year parameter error!")
return {'year': years, 'inputfile': inputfile}
if __name__ == '__main__':
# LongitudinalNeighborhoodAnalysis.py -y 1980 -i "C:\Users\Administrator\Documents\2018-01-30 LTDB setup\LongitudinalNeighborhoodAnalysis_SelectedVariableList.txt"
#started_datetime = datetime.now()
#dateYYMMDD = started_datetime.strftime('%Y%m%d')
#timeHHMMSS = started_datetime.strftime('%H%M%S')
#print 'LongitudinalNeighborhoodAnalysis start at %s %s' % (started_datetime.strftime('%Y-%m-%d'), started_datetime.strftime('%H:%M:%S'))
# Get parameter from console
#parameter = getParameter(sys.argv[1:])
#years = parameter['year']
#inputfile = parameter['inputfile']
# Get parameter from client
fields = cgi.FieldStorage()
year = "1980"
state = "06 CA"
metro = "ALL"
#metro = "31080"
county = "ALL"
codes = "1-01 nhwhtXX,1-14 a60blkXX,3-10 hincXX"
statename = ""
metroname = ""
countyame = ""
if "year" in fields: year = fields['year'].value
if "state" in fields: state = fields['state'].value
if "metro" in fields: metro = fields['metro'].value
if "county" in fields: county = fields['county'].value
if "codes" in fields: codes = fields['codes'].value
if "statename" in fields: statename = fields['statename'].value
if "metroname" in fields: metroname = fields['metroname'].value
if "countyame" in fields: countyame = fields['countyame'].value
# Read input file and create bitstring for outputfileName and codes list
#control = ReadInputFile(inputfile)
#print control['bitstring']
#print control['codes']
# Parse input codes and create bitstring for outputfileName and codes list
control = ParseInputCodes(year, codes)
# Read codebook table and select
#for year in years:
# LongitudinalNeighborhoodAnalysis(year, control)
result = LongitudinalNeighborhoodAnalysis(year, state[0:2], metro, county, control)
filename = year + '_' + state[3:5]
if (metro != "ALL"): filename += '_' + metroname.split('-')[0].replace(' ', '-')
if (county != "ALL"): filename += '_' + countyame.replace(' ', '-')
filename += '_' + control['bitstring'] + '.csv'
out_data = {'filename': filename, 'result': result}
#out_data = {'filename': filename, 'result': ''}
time.sleep(0)
print("Content-Type: text/html\n")
print(json.dumps(out_data))
#ended_datetime = datetime.now()
#elapsed = ended_datetime - started_datetime
#total_seconds = int(elapsed.total_seconds())
#hours, remainder = divmod(total_seconds,60*60)
#minutes, seconds = divmod(remainder,60)
#print 'LongitudinalNeighborhoodAnalysis ended at %s %s Elapsed %02d:%02d:%02d' % (ended_datetime.strftime('%Y-%m-%d'), ended_datetime.strftime('%H:%M:%S'), hours, minutes, seconds)
|
py | b402d572e682a410fe173cffb51af1ff5f197c34 | #!/usr/bin/env python3
# Copyright (c) 2014-2017 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the zapwallettxes functionality.
- start two bpqd nodes
- create two transactions on node 0 - one is confirmed and one is unconfirmed.
- restart node 0 and verify that both the confirmed and the unconfirmed
transactions are still available.
- restart node 0 with zapwallettxes and persistmempool, and verify that both
the confirmed and the unconfirmed transactions are still available.
- restart node 0 with just zapwallettxes and verify that the confirmed
transactions are still available, but that the unconfirmed transaction has
been zapped.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
wait_until,
)
class ZapWalletTXesTest (BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self.log.info("Mining blocks...")
self.nodes[0].generate(1)
self.sync_all()
self.nodes[1].generate(100)
self.sync_all()
# This transaction will be confirmed
txid1 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 10)
self.nodes[0].generate(1)
self.sync_all()
# This transaction will not be confirmed
txid2 = self.nodes[0].sendtoaddress(self.nodes[1].getnewaddress(), 20)
# Confirmed and unconfirmed transactions are now in the wallet.
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop-start node0. Both confirmed and unconfirmed transactions remain in the wallet.
self.stop_node(0)
self.start_node(0)
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop node0 and restart with zapwallettxes and persistmempool. The unconfirmed
# transaction is zapped from the wallet, but is re-added when the mempool is reloaded.
self.stop_node(0)
self.start_node(0, ["-persistmempool=1", "-zapwallettxes=2"])
wait_until(lambda: self.nodes[0].getmempoolinfo()['size'] == 1, timeout=3)
self.nodes[0].syncwithvalidationinterfacequeue() # Flush mempool to wallet
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
assert_equal(self.nodes[0].gettransaction(txid2)['txid'], txid2)
# Stop node0 and restart with zapwallettxes, but not persistmempool.
# The unconfirmed transaction is zapped and is no longer in the wallet.
self.stop_node(0)
self.start_node(0, ["-zapwallettxes=2"])
# tx1 is still be available because it was confirmed
assert_equal(self.nodes[0].gettransaction(txid1)['txid'], txid1)
# This will raise an exception because the unconfirmed transaction has been zapped
assert_raises_rpc_error(-5, 'Invalid or non-wallet transaction id', self.nodes[0].gettransaction, txid2)
if __name__ == '__main__':
ZapWalletTXesTest().main()
|
py | b402d760391ac3c3e92196b09f47fbc45022134f | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import math
from dataclasses import dataclass, field
import torch
import torch.nn.functional as F
from fairseq import metrics
from fairseq.criterions import FairseqCriterion, register_criterion
from fairseq.dataclass import FairseqDataclass
@dataclass
class SentencePredictionConfig(FairseqDataclass):
classification_head_name: str = field(
default="sentence_classification_head",
metadata={"help": "name of the classification head to use"},
)
regression_target: bool = field(
default=False,
)
@register_criterion("sentence_prediction", dataclass=SentencePredictionConfig)
class SentencePredictionCriterion(FairseqCriterion):
def __init__(self, cfg: SentencePredictionConfig, task):
super().__init__(task)
self.classification_head_name = cfg.classification_head_name
self.regression_target = cfg.regression_target
def forward(self, model, sample, reduce=True):
"""Compute the loss for the given sample.
Returns a tuple with three elements:
1) the loss
2) the sample size, which is used as the denominator for the gradient
3) logging outputs to display while training
"""
assert (
hasattr(model, "classification_heads")
and self.classification_head_name in model.classification_heads
), "model must provide sentence classification head for --criterion=sentence_prediction"
logits, _ = model(
**sample["net_input"],
features_only=True,
classification_head_name=self.classification_head_name,
)
targets = model.get_targets(sample, [logits]).view(-1)
sample_size = targets.numel()
if not self.regression_target:
lprobs = F.log_softmax(logits, dim=-1, dtype=torch.float32)
task_loss = F.nll_loss(lprobs, targets, reduction="sum")
else:
logits = logits.view(-1).float()
targets = targets.float()
task_loss = F.mse_loss(logits, targets, reduction="sum")
logging_output = {}
loss = task_loss
# mha & ffn regularization update
if (
hasattr(model.args, "mha_reg_scale_factor")
and model.args.mha_reg_scale_factor != 0.0
):
mha_reg_loss = model._get_adaptive_head_loss()
loss += mha_reg_loss
logging_output.update({"mha_reg_loss": mha_reg_loss})
if (
hasattr(model.args, "ffn_reg_scale_factor")
and model.args.ffn_reg_scale_factor != 0.0
):
ffn_reg_loss = model._get_adaptive_ffn_loss()
loss += ffn_reg_loss
logging_output.update({"ffn_reg_loss": ffn_reg_loss})
logging_output.update(
{
"loss": loss.data,
"ntokens": sample["ntokens"],
"nsentences": sample_size,
"sample_size": sample_size,
}
)
if not self.regression_target:
preds = logits.argmax(dim=1)
logging_output["ncorrect"] = (preds == targets).sum()
return loss, sample_size, logging_output
@staticmethod
def reduce_metrics(logging_outputs) -> None:
"""Aggregate logging outputs from data parallel training."""
loss_sum = sum(log.get("loss", 0) for log in logging_outputs)
ntokens = sum(log.get("ntokens", 0) for log in logging_outputs)
nsentences = sum(log.get("nsentences", 0) for log in logging_outputs)
sample_size = sum(log.get("sample_size", 0) for log in logging_outputs)
mha_reg_loss_sum = sum(log.get("mha_reg_loss", 0) for log in logging_outputs)
ffn_reg_loss_sum = sum(log.get("ffn_reg_loss", 0) for log in logging_outputs)
metrics.log_scalar(
"loss", loss_sum / sample_size / math.log(2), sample_size, round=3
)
if mha_reg_loss_sum:
metrics.log_scalar(
"mha_reg_loss",
mha_reg_loss_sum / sample_size / math.log(2),
sample_size,
round=3,
)
if ffn_reg_loss_sum:
metrics.log_scalar(
"ffn_reg_loss",
ffn_reg_loss_sum / sample_size / math.log(2),
sample_size,
round=3,
)
if sample_size != ntokens:
metrics.log_scalar(
"nll_loss", loss_sum / ntokens / math.log(2), ntokens, round=3
)
if len(logging_outputs) > 0 and "ncorrect" in logging_outputs[0]:
ncorrect = sum(log.get("ncorrect", 0) for log in logging_outputs)
metrics.log_scalar(
"accuracy", 100.0 * ncorrect / nsentences, nsentences, round=1
)
@staticmethod
def logging_outputs_can_be_summed() -> bool:
"""
Whether the logging outputs returned by `forward` can be summed
across workers prior to calling `reduce_metrics`. Setting this
to True will improves distributed training speed.
"""
return True
|
py | b402d82f1fc2254720584ae2bc2b36ad2e197e5a | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# isort:skip_file
import unittest
import uuid
from datetime import date, datetime, time, timedelta
from decimal import Decimal
import json
import os
import re
from typing import Any, Tuple, List, Optional
from unittest.mock import Mock, patch
from tests.integration_tests.fixtures.birth_names_dashboard import (
load_birth_names_dashboard_with_slices,
load_birth_names_data,
)
import numpy as np
import pandas as pd
import pytest
from flask import Flask, g
import marshmallow
from sqlalchemy.exc import ArgumentError
import tests.integration_tests.test_app
from superset import app, db, security_manager
from superset.exceptions import CertificateException, SupersetException
from superset.models.core import Database, Log
from superset.models.dashboard import Dashboard
from superset.models.slice import Slice
from superset.utils.core import (
base_json_conv,
cast_to_num,
convert_legacy_filters_into_adhoc,
create_ssl_cert_file,
DTTM_ALIAS,
extract_dataframe_dtypes,
format_timedelta,
GenericDataType,
get_form_data_token,
get_iterable,
get_email_address_list,
get_stacktrace,
json_int_dttm_ser,
json_iso_dttm_ser,
JSONEncodedDict,
merge_extra_filters,
merge_extra_form_data,
merge_request_params,
NO_TIME_RANGE,
normalize_dttm_col,
parse_ssl_cert,
parse_js_uri_path_item,
split,
validate_json,
zlib_compress,
zlib_decompress,
)
from superset.utils.database import get_or_create_db
from superset.utils import schema
from superset.utils.hashing import md5_sha_from_str
from superset.views.utils import build_extra_filters, get_form_data
from tests.integration_tests.base_tests import SupersetTestCase
from tests.integration_tests.fixtures.world_bank_dashboard import (
load_world_bank_dashboard_with_slices,
load_world_bank_data,
)
from .fixtures.certificates import ssl_certificate
class TestUtils(SupersetTestCase):
def test_json_int_dttm_ser(self):
dttm = datetime(2020, 1, 1)
ts = 1577836800000.0
assert json_int_dttm_ser(dttm) == ts
assert json_int_dttm_ser(date(2020, 1, 1)) == ts
assert json_int_dttm_ser(datetime(1970, 1, 1)) == 0
assert json_int_dttm_ser(date(1970, 1, 1)) == 0
assert json_int_dttm_ser(dttm + timedelta(milliseconds=1)) == (ts + 1)
with self.assertRaises(TypeError):
json_int_dttm_ser("this is not a date")
def test_json_iso_dttm_ser(self):
dttm = datetime(2020, 1, 1)
dt = date(2020, 1, 1)
t = time()
assert json_iso_dttm_ser(dttm) == dttm.isoformat()
assert json_iso_dttm_ser(dt) == dt.isoformat()
assert json_iso_dttm_ser(t) == t.isoformat()
with self.assertRaises(TypeError):
json_iso_dttm_ser("this is not a date")
def test_base_json_conv(self):
assert isinstance(base_json_conv(np.bool_(1)), bool) is True
assert isinstance(base_json_conv(np.int64(1)), int) is True
assert isinstance(base_json_conv(np.array([1, 2, 3])), list) is True
assert isinstance(base_json_conv(set([1])), list) is True
assert isinstance(base_json_conv(Decimal("1.0")), float) is True
assert isinstance(base_json_conv(uuid.uuid4()), str) is True
assert isinstance(base_json_conv(time()), str) is True
assert isinstance(base_json_conv(timedelta(0)), str) is True
def test_zlib_compression(self):
json_str = '{"test": 1}'
blob = zlib_compress(json_str)
got_str = zlib_decompress(blob)
self.assertEqual(json_str, got_str)
def test_merge_extra_filters(self):
# does nothing if no extra filters
form_data = {"A": 1, "B": 2, "c": "test"}
expected = {**form_data, "adhoc_filters": [], "applied_time_extras": {}}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
# empty extra_filters
form_data = {"A": 1, "B": 2, "c": "test", "extra_filters": []}
expected = {
"A": 1,
"B": 2,
"c": "test",
"adhoc_filters": [],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
# copy over extra filters into empty filters
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": "someval"},
{"col": "B", "op": "==", "val": ["c1", "c2"]},
]
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"filterOptionName": "90cfb3c34852eb3bc741b0cc20053b46",
"isExtra": True,
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"filterOptionName": "6c178d069965f1c02640661280415d96",
"isExtra": True,
"operator": "==",
"subject": "B",
},
],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
# adds extra filters to existing filters
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": "someval"},
{"col": "B", "op": "==", "val": ["c1", "c2"]},
],
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": ["G1", "g2"],
"expressionType": "SIMPLE",
"operator": "!=",
"subject": "D",
}
],
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": ["G1", "g2"],
"expressionType": "SIMPLE",
"operator": "!=",
"subject": "D",
},
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"filterOptionName": "90cfb3c34852eb3bc741b0cc20053b46",
"isExtra": True,
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"filterOptionName": "6c178d069965f1c02640661280415d96",
"isExtra": True,
"operator": "==",
"subject": "B",
},
],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
# adds extra filters to existing filters and sets time options
form_data = {
"extra_filters": [
{"col": "__time_range", "op": "in", "val": "1 year ago :"},
{"col": "__time_col", "op": "in", "val": "birth_year"},
{"col": "__time_grain", "op": "in", "val": "years"},
{"col": "A", "op": "like", "val": "hello"},
{"col": "__time_origin", "op": "in", "val": "now"},
{"col": "__granularity", "op": "in", "val": "90 seconds"},
]
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "hello",
"expressionType": "SIMPLE",
"filterOptionName": "e3cbdd92a2ae23ca92c6d7fca42e36a6",
"isExtra": True,
"operator": "like",
"subject": "A",
}
],
"time_range": "1 year ago :",
"granularity_sqla": "birth_year",
"time_grain_sqla": "years",
"granularity": "90 seconds",
"druid_time_origin": "now",
"applied_time_extras": {
"__time_range": "1 year ago :",
"__time_col": "birth_year",
"__time_grain": "years",
"__time_origin": "now",
"__granularity": "90 seconds",
},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
def test_merge_extra_filters_ignores_empty_filters(self):
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": ""},
{"col": "B", "op": "==", "val": []},
]
}
expected = {"adhoc_filters": [], "applied_time_extras": {}}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
def test_merge_extra_filters_ignores_nones(self):
form_data = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "",
"expressionType": "SIMPLE",
"operator": "in",
"subject": None,
}
],
"extra_filters": [{"col": "B", "op": "==", "val": []}],
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "",
"expressionType": "SIMPLE",
"operator": "in",
"subject": None,
}
],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
def test_merge_extra_filters_ignores_equal_filters(self):
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": "someval"},
{"col": "B", "op": "==", "val": ["c1", "c2"]},
{"col": "c", "op": "in", "val": ["c1", 1, None]},
],
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
{
"clause": "WHERE",
"comparator": ["c1", 1, None],
"expressionType": "SIMPLE",
"operator": "in",
"subject": "c",
},
],
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
{
"clause": "WHERE",
"comparator": ["c1", 1, None],
"expressionType": "SIMPLE",
"operator": "in",
"subject": "c",
},
],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
def test_merge_extra_filters_merges_different_val_types(self):
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": ["g1", "g2"]},
{"col": "B", "op": "==", "val": ["c1", "c2"]},
],
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
],
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
{
"clause": "WHERE",
"comparator": ["g1", "g2"],
"expressionType": "SIMPLE",
"filterOptionName": "c11969c994b40a83a4ae7d48ff1ea28e",
"isExtra": True,
"operator": "in",
"subject": "a",
},
],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": "someval"},
{"col": "B", "op": "==", "val": ["c1", "c2"]},
],
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": ["g1", "g2"],
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
],
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": ["g1", "g2"],
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"filterOptionName": "90cfb3c34852eb3bc741b0cc20053b46",
"isExtra": True,
"operator": "in",
"subject": "a",
},
],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
def test_merge_extra_filters_adds_unequal_lists(self):
form_data = {
"extra_filters": [
{"col": "a", "op": "in", "val": ["g1", "g2", "g3"]},
{"col": "B", "op": "==", "val": ["c1", "c2", "c3"]},
],
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": ["g1", "g2"],
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
],
}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": ["g1", "g2"],
"expressionType": "SIMPLE",
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2"],
"expressionType": "SIMPLE",
"operator": "==",
"subject": "B",
},
{
"clause": "WHERE",
"comparator": ["g1", "g2", "g3"],
"expressionType": "SIMPLE",
"filterOptionName": "21cbb68af7b17e62b3b2f75e2190bfd7",
"isExtra": True,
"operator": "in",
"subject": "a",
},
{
"clause": "WHERE",
"comparator": ["c1", "c2", "c3"],
"expressionType": "SIMPLE",
"filterOptionName": "0a8dcb928f1f4bba97643c6e68d672f1",
"isExtra": True,
"operator": "==",
"subject": "B",
},
],
"applied_time_extras": {},
}
merge_extra_filters(form_data)
self.assertEqual(form_data, expected)
def test_merge_request_params_when_url_params_undefined(self):
form_data = {"since": "2000", "until": "now"}
url_params = {"form_data": form_data, "dashboard_ids": "(1,2,3,4,5)"}
merge_request_params(form_data, url_params)
self.assertIn("url_params", form_data.keys())
self.assertIn("dashboard_ids", form_data["url_params"])
self.assertNotIn("form_data", form_data.keys())
def test_merge_request_params_when_url_params_predefined(self):
form_data = {
"since": "2000",
"until": "now",
"url_params": {"abc": "123", "dashboard_ids": "(1,2,3)"},
}
url_params = {"form_data": form_data, "dashboard_ids": "(1,2,3,4,5)"}
merge_request_params(form_data, url_params)
self.assertIn("url_params", form_data.keys())
self.assertIn("abc", form_data["url_params"])
self.assertEqual(
url_params["dashboard_ids"], form_data["url_params"]["dashboard_ids"]
)
def test_format_timedelta(self):
self.assertEqual(format_timedelta(timedelta(0)), "0:00:00")
self.assertEqual(format_timedelta(timedelta(days=1)), "1 day, 0:00:00")
self.assertEqual(format_timedelta(timedelta(minutes=-6)), "-0:06:00")
self.assertEqual(
format_timedelta(timedelta(0) - timedelta(days=1, hours=5, minutes=6)),
"-1 day, 5:06:00",
)
self.assertEqual(
format_timedelta(timedelta(0) - timedelta(days=16, hours=4, minutes=3)),
"-16 days, 4:03:00",
)
def test_json_encoded_obj(self):
obj = {"a": 5, "b": ["a", "g", 5]}
val = '{"a": 5, "b": ["a", "g", 5]}'
jsonObj = JSONEncodedDict()
resp = jsonObj.process_bind_param(obj, "dialect")
self.assertIn('"a": 5', resp)
self.assertIn('"b": ["a", "g", 5]', resp)
self.assertEqual(jsonObj.process_result_value(val, "dialect"), obj)
def test_validate_json(self):
valid = '{"a": 5, "b": [1, 5, ["g", "h"]]}'
self.assertIsNone(validate_json(valid))
invalid = '{"a": 5, "b": [1, 5, ["g", "h]]}'
with self.assertRaises(SupersetException):
validate_json(invalid)
def test_convert_legacy_filters_into_adhoc_where(self):
form_data = {"where": "a = 1"}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"expressionType": "SQL",
"filterOptionName": "46fb6d7891e23596e42ae38da94a57e0",
"sqlExpression": "a = 1",
}
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEqual(form_data, expected)
def test_convert_legacy_filters_into_adhoc_filters(self):
form_data = {"filters": [{"col": "a", "op": "in", "val": "someval"}]}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"comparator": "someval",
"expressionType": "SIMPLE",
"filterOptionName": "135c7ee246666b840a3d7a9c3a30cf38",
"operator": "in",
"subject": "a",
}
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEqual(form_data, expected)
def test_convert_legacy_filters_into_adhoc_having(self):
form_data = {"having": "COUNT(1) = 1"}
expected = {
"adhoc_filters": [
{
"clause": "HAVING",
"expressionType": "SQL",
"filterOptionName": "683f1c26466ab912f75a00842e0f2f7b",
"sqlExpression": "COUNT(1) = 1",
}
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEqual(form_data, expected)
def test_convert_legacy_filters_into_adhoc_having_filters(self):
form_data = {"having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}]}
expected = {
"adhoc_filters": [
{
"clause": "HAVING",
"comparator": 1,
"expressionType": "SIMPLE",
"filterOptionName": "967d0fb409f6d9c7a6c03a46cf933c9c",
"operator": "==",
"subject": "COUNT(1)",
}
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEqual(form_data, expected)
def test_convert_legacy_filters_into_adhoc_present_and_empty(self):
form_data = {"adhoc_filters": [], "where": "a = 1"}
expected = {
"adhoc_filters": [
{
"clause": "WHERE",
"expressionType": "SQL",
"filterOptionName": "46fb6d7891e23596e42ae38da94a57e0",
"sqlExpression": "a = 1",
}
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEqual(form_data, expected)
def test_convert_legacy_filters_into_adhoc_present_and_nonempty(self):
form_data = {
"adhoc_filters": [
{"clause": "WHERE", "expressionType": "SQL", "sqlExpression": "a = 1"}
],
"filters": [{"col": "a", "op": "in", "val": "someval"}],
"having": "COUNT(1) = 1",
"having_filters": [{"col": "COUNT(1)", "op": "==", "val": 1}],
}
expected = {
"adhoc_filters": [
{"clause": "WHERE", "expressionType": "SQL", "sqlExpression": "a = 1"}
]
}
convert_legacy_filters_into_adhoc(form_data)
self.assertEqual(form_data, expected)
def test_parse_js_uri_path_items_eval_undefined(self):
self.assertIsNone(parse_js_uri_path_item("undefined", eval_undefined=True))
self.assertIsNone(parse_js_uri_path_item("null", eval_undefined=True))
self.assertEqual("undefined", parse_js_uri_path_item("undefined"))
self.assertEqual("null", parse_js_uri_path_item("null"))
def test_parse_js_uri_path_items_unquote(self):
self.assertEqual("slashed/name", parse_js_uri_path_item("slashed%2fname"))
self.assertEqual(
"slashed%2fname", parse_js_uri_path_item("slashed%2fname", unquote=False)
)
def test_parse_js_uri_path_items_item_optional(self):
self.assertIsNone(parse_js_uri_path_item(None))
self.assertIsNotNone(parse_js_uri_path_item("item"))
def test_get_stacktrace(self):
with app.app_context():
app.config["SHOW_STACKTRACE"] = True
try:
raise Exception("NONONO!")
except Exception:
stacktrace = get_stacktrace()
self.assertIn("NONONO", stacktrace)
app.config["SHOW_STACKTRACE"] = False
try:
raise Exception("NONONO!")
except Exception:
stacktrace = get_stacktrace()
assert stacktrace is None
def test_split(self):
self.assertEqual(list(split("a b")), ["a", "b"])
self.assertEqual(list(split("a,b", delimiter=",")), ["a", "b"])
self.assertEqual(list(split("a,(b,a)", delimiter=",")), ["a", "(b,a)"])
self.assertEqual(
list(split('a,(b,a),"foo , bar"', delimiter=",")),
["a", "(b,a)", '"foo , bar"'],
)
self.assertEqual(
list(split("a,'b,c'", delimiter=",", quote="'")), ["a", "'b,c'"]
)
self.assertEqual(list(split('a "b c"')), ["a", '"b c"'])
self.assertEqual(list(split(r'a "b \" c"')), ["a", r'"b \" c"'])
def test_get_or_create_db(self):
get_or_create_db("test_db", "sqlite:///superset.db")
database = db.session.query(Database).filter_by(database_name="test_db").one()
self.assertIsNotNone(database)
self.assertEqual(database.sqlalchemy_uri, "sqlite:///superset.db")
self.assertIsNotNone(
security_manager.find_permission_view_menu("database_access", database.perm)
)
# Test change URI
get_or_create_db("test_db", "sqlite:///changed.db")
database = db.session.query(Database).filter_by(database_name="test_db").one()
self.assertEqual(database.sqlalchemy_uri, "sqlite:///changed.db")
db.session.delete(database)
db.session.commit()
def test_get_or_create_db_invalid_uri(self):
with self.assertRaises(ArgumentError):
get_or_create_db("test_db", "yoursql:superset.db/()")
def test_get_iterable(self):
self.assertListEqual(get_iterable(123), [123])
self.assertListEqual(get_iterable([123]), [123])
self.assertListEqual(get_iterable("foo"), ["foo"])
@pytest.mark.usefixtures("load_world_bank_dashboard_with_slices")
def test_build_extra_filters(self):
world_health = db.session.query(Dashboard).filter_by(slug="world_health").one()
layout = json.loads(world_health.position_json)
filter_ = db.session.query(Slice).filter_by(slice_name="Region Filter").one()
world = db.session.query(Slice).filter_by(slice_name="World's Population").one()
box_plot = db.session.query(Slice).filter_by(slice_name="Box plot").one()
treemap = db.session.query(Slice).filter_by(slice_name="Treemap").one()
filter_scopes = {
str(filter_.id): {
"region": {"scope": ["ROOT_ID"], "immune": [treemap.id]},
"country_name": {
"scope": ["ROOT_ID"],
"immune": [treemap.id, box_plot.id],
},
}
}
default_filters = {
str(filter_.id): {
"region": ["North America"],
"country_name": ["United States"],
}
}
# immune to all filters
assert (
build_extra_filters(layout, filter_scopes, default_filters, treemap.id)
== []
)
# in scope
assert build_extra_filters(
layout, filter_scopes, default_filters, world.id
) == [
{"col": "region", "op": "==", "val": "North America"},
{"col": "country_name", "op": "in", "val": ["United States"]},
]
assert build_extra_filters(
layout, filter_scopes, default_filters, box_plot.id
) == [{"col": "region", "op": "==", "val": "North America"}]
def test_merge_extra_filters_with_no_extras(self):
form_data = {
"time_range": "Last 10 days",
}
merge_extra_form_data(form_data)
self.assertEqual(
form_data, {"time_range": "Last 10 days", "adhoc_filters": [],},
)
def test_merge_extra_filters_with_unset_legacy_time_range(self):
"""
Make sure native filter is applied if filter box time range is unset.
"""
form_data = {
"time_range": "Last 10 days",
"extra_filters": [
{"col": "__time_range", "op": "==", "val": NO_TIME_RANGE},
],
"extra_form_data": {"time_range": "Last year"},
}
merge_extra_filters(form_data)
self.assertEqual(
form_data,
{
"time_range": "Last year",
"applied_time_extras": {},
"adhoc_filters": [],
},
)
def test_merge_extra_filters_with_conflicting_time_ranges(self):
"""
Make sure filter box takes precedence if both native filter and filter box
time ranges are set.
"""
form_data = {
"time_range": "Last 10 days",
"extra_filters": [{"col": "__time_range", "op": "==", "val": "Last week"}],
"extra_form_data": {"time_range": "Last year",},
}
merge_extra_filters(form_data)
self.assertEqual(
form_data,
{
"time_range": "Last week",
"applied_time_extras": {"__time_range": "Last week"},
"adhoc_filters": [],
},
)
def test_merge_extra_filters_with_extras(self):
form_data = {
"time_range": "Last 10 days",
"extra_form_data": {
"filters": [{"col": "foo", "op": "IN", "val": ["bar"]}],
"adhoc_filters": [
{
"expressionType": "SQL",
"clause": "WHERE",
"sqlExpression": "1 = 0",
}
],
"time_range": "Last 100 years",
"time_grain_sqla": "PT1M",
"relative_start": "now",
},
}
merge_extra_form_data(form_data)
adhoc_filters = form_data["adhoc_filters"]
assert adhoc_filters[0] == {
"clause": "WHERE",
"expressionType": "SQL",
"isExtra": True,
"sqlExpression": "1 = 0",
}
converted_filter = adhoc_filters[1]
del converted_filter["filterOptionName"]
assert converted_filter == {
"clause": "WHERE",
"comparator": ["bar"],
"expressionType": "SIMPLE",
"isExtra": True,
"operator": "IN",
"subject": "foo",
}
assert form_data["time_range"] == "Last 100 years"
assert form_data["time_grain_sqla"] == "PT1M"
assert form_data["extras"]["relative_start"] == "now"
def test_ssl_certificate_parse(self):
parsed_certificate = parse_ssl_cert(ssl_certificate)
self.assertEqual(parsed_certificate.serial_number, 12355228710836649848)
self.assertRaises(CertificateException, parse_ssl_cert, "abc" + ssl_certificate)
def test_ssl_certificate_file_creation(self):
path = create_ssl_cert_file(ssl_certificate)
expected_filename = md5_sha_from_str(ssl_certificate)
self.assertIn(expected_filename, path)
self.assertTrue(os.path.exists(path))
def test_get_email_address_list(self):
self.assertEqual(get_email_address_list("a@a"), ["a@a"])
self.assertEqual(get_email_address_list(" a@a "), ["a@a"])
self.assertEqual(get_email_address_list("a@a\n"), ["a@a"])
self.assertEqual(get_email_address_list(",a@a;"), ["a@a"])
self.assertEqual(
get_email_address_list(",a@a; b@b c@c a-c@c; d@d, f@f"),
["a@a", "b@b", "c@c", "a-c@c", "d@d", "f@f"],
)
def test_get_form_data_default(self) -> None:
with app.test_request_context():
form_data, slc = get_form_data()
self.assertEqual(slc, None)
def test_get_form_data_request_args(self) -> None:
with app.test_request_context(
query_string={"form_data": json.dumps({"foo": "bar"})}
):
form_data, slc = get_form_data()
self.assertEqual(form_data, {"foo": "bar"})
self.assertEqual(slc, None)
def test_get_form_data_request_form(self) -> None:
with app.test_request_context(data={"form_data": json.dumps({"foo": "bar"})}):
form_data, slc = get_form_data()
self.assertEqual(form_data, {"foo": "bar"})
self.assertEqual(slc, None)
def test_get_form_data_request_form_with_queries(self) -> None:
# the CSV export uses for requests, even when sending requests to
# /api/v1/chart/data
with app.test_request_context(
data={
"form_data": json.dumps({"queries": [{"url_params": {"foo": "bar"}}]})
}
):
form_data, slc = get_form_data()
self.assertEqual(form_data, {"url_params": {"foo": "bar"}})
self.assertEqual(slc, None)
def test_get_form_data_request_args_and_form(self) -> None:
with app.test_request_context(
data={"form_data": json.dumps({"foo": "bar"})},
query_string={"form_data": json.dumps({"baz": "bar"})},
):
form_data, slc = get_form_data()
self.assertEqual(form_data, {"baz": "bar", "foo": "bar"})
self.assertEqual(slc, None)
def test_get_form_data_globals(self) -> None:
with app.test_request_context():
g.form_data = {"foo": "bar"}
form_data, slc = get_form_data()
delattr(g, "form_data")
self.assertEqual(form_data, {"foo": "bar"})
self.assertEqual(slc, None)
def test_get_form_data_corrupted_json(self) -> None:
with app.test_request_context(
data={"form_data": "{x: '2324'}"},
query_string={"form_data": '{"baz": "bar"'},
):
form_data, slc = get_form_data()
self.assertEqual(form_data, {})
self.assertEqual(slc, None)
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_log_this(self) -> None:
# TODO: Add additional scenarios.
self.login(username="admin")
slc = self.get_slice("Girls", db.session)
dashboard_id = 1
resp = self.get_json_resp(
f"/superset/explore_json/{slc.datasource_type}/{slc.datasource_id}/"
+ f'?form_data={{"slice_id": {slc.id}}}&dashboard_id={dashboard_id}',
{"form_data": json.dumps(slc.viz.form_data)},
)
record = (
db.session.query(Log)
.filter_by(action="explore_json", slice_id=slc.id)
.order_by(Log.dttm.desc())
.first()
)
self.assertEqual(record.dashboard_id, dashboard_id)
self.assertEqual(json.loads(record.json)["dashboard_id"], str(dashboard_id))
self.assertEqual(json.loads(record.json)["form_data"]["slice_id"], slc.id)
self.assertEqual(
json.loads(record.json)["form_data"]["viz_type"],
slc.viz.form_data["viz_type"],
)
def test_schema_validate_json(self):
valid = '{"a": 5, "b": [1, 5, ["g", "h"]]}'
self.assertIsNone(schema.validate_json(valid))
invalid = '{"a": 5, "b": [1, 5, ["g", "h]]}'
self.assertRaises(marshmallow.ValidationError, schema.validate_json, invalid)
def test_schema_one_of_case_insensitive(self):
validator = schema.OneOfCaseInsensitive(choices=[1, 2, 3, "FoO", "BAR", "baz"])
self.assertEqual(1, validator(1))
self.assertEqual(2, validator(2))
self.assertEqual("FoO", validator("FoO"))
self.assertEqual("FOO", validator("FOO"))
self.assertEqual("bar", validator("bar"))
self.assertEqual("BaZ", validator("BaZ"))
self.assertRaises(marshmallow.ValidationError, validator, "qwerty")
self.assertRaises(marshmallow.ValidationError, validator, 4)
def test_cast_to_num(self) -> None:
assert cast_to_num("5") == 5
assert cast_to_num("5.2") == 5.2
assert cast_to_num(10) == 10
assert cast_to_num(10.1) == 10.1
assert cast_to_num(None) is None
assert cast_to_num("this is not a string") is None
def test_get_form_data_token(self):
assert get_form_data_token({"token": "token_abcdefg1"}) == "token_abcdefg1"
generated_token = get_form_data_token({})
assert re.match(r"^token_[a-z0-9]{8}$", generated_token) is not None
@pytest.mark.usefixtures("load_birth_names_dashboard_with_slices")
def test_extract_dataframe_dtypes(self):
slc = self.get_slice("Girls", db.session)
cols: Tuple[Tuple[str, GenericDataType, List[Any]], ...] = (
("dt", GenericDataType.TEMPORAL, [date(2021, 2, 4), date(2021, 2, 4)]),
(
"dttm",
GenericDataType.TEMPORAL,
[datetime(2021, 2, 4, 1, 1, 1), datetime(2021, 2, 4, 1, 1, 1)],
),
("str", GenericDataType.STRING, ["foo", "foo"]),
("int", GenericDataType.NUMERIC, [1, 1]),
("float", GenericDataType.NUMERIC, [0.5, 0.5]),
("mixed-int-float", GenericDataType.NUMERIC, [0.5, 1.0]),
("bool", GenericDataType.BOOLEAN, [True, False]),
("mixed-str-int", GenericDataType.STRING, ["abc", 1.0]),
("obj", GenericDataType.STRING, [{"a": 1}, {"a": 1}]),
("dt_null", GenericDataType.TEMPORAL, [None, date(2021, 2, 4)]),
(
"dttm_null",
GenericDataType.TEMPORAL,
[None, datetime(2021, 2, 4, 1, 1, 1)],
),
("str_null", GenericDataType.STRING, [None, "foo"]),
("int_null", GenericDataType.NUMERIC, [None, 1]),
("float_null", GenericDataType.NUMERIC, [None, 0.5]),
("bool_null", GenericDataType.BOOLEAN, [None, False]),
("obj_null", GenericDataType.STRING, [None, {"a": 1}]),
# Non-timestamp columns should be identified as temporal if
# `is_dttm` is set to `True` in the underlying datasource
("ds", GenericDataType.TEMPORAL, [None, {"ds": "2017-01-01"}]),
)
df = pd.DataFrame(data={col[0]: col[2] for col in cols})
assert extract_dataframe_dtypes(df, slc.datasource) == [col[1] for col in cols]
def test_normalize_dttm_col(self):
def normalize_col(
df: pd.DataFrame,
timestamp_format: Optional[str],
offset: int,
time_shift: Optional[timedelta],
) -> pd.DataFrame:
df = df.copy()
normalize_dttm_col(df, timestamp_format, offset, time_shift)
return df
ts = pd.Timestamp(2021, 2, 15, 19, 0, 0, 0)
df = pd.DataFrame([{"__timestamp": ts, "a": 1}])
# test regular (non-numeric) format
assert normalize_col(df, None, 0, None)[DTTM_ALIAS][0] == ts
assert normalize_col(df, "epoch_ms", 0, None)[DTTM_ALIAS][0] == ts
assert normalize_col(df, "epoch_s", 0, None)[DTTM_ALIAS][0] == ts
# test offset
assert normalize_col(df, None, 1, None)[DTTM_ALIAS][0] == pd.Timestamp(
2021, 2, 15, 20, 0, 0, 0
)
# test offset and timedelta
assert normalize_col(df, None, 1, timedelta(minutes=30))[DTTM_ALIAS][
0
] == pd.Timestamp(2021, 2, 15, 20, 30, 0, 0)
# test numeric epoch_s format
df = pd.DataFrame([{"__timestamp": ts.timestamp(), "a": 1}])
assert normalize_col(df, "epoch_s", 0, None)[DTTM_ALIAS][0] == ts
# test numeric epoch_ms format
df = pd.DataFrame([{"__timestamp": ts.timestamp() * 1000, "a": 1}])
assert normalize_col(df, "epoch_ms", 0, None)[DTTM_ALIAS][0] == ts
|
py | b402d87b87040e446bcd06c3379147fc2f324493 | """
Functions for creating and restoring url-safe signed JSON objects.
The format used looks like this:
>>> signing.dumps("hello")
'ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk'
There are two components here, separated by a ':'. The first component is a
URLsafe base64 encoded JSON of the object passed to dumps(). The second
component is a base64 encoded hmac/SHA-256 hash of "$first_component:$secret"
signing.loads(s) checks the signature and returns the deserialized object.
If the signature fails, a BadSignature exception is raised.
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv422nZA4sgmk")
'hello'
>>> signing.loads("ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified")
...
BadSignature: Signature "ImhlbGxvIg:1QaUZC:YIye-ze3TTx7gtSv42-modified" does not match
You can optionally compress the JSON prior to base64 encoding it to save
space, using the compress=True argument. This checks if compression actually
helps and only applies compression if the result is a shorter string:
>>> signing.dumps(list(range(1, 20)), compress=True)
'.eJwFwcERACAIwLCF-rCiILN47r-GyZVJsNgkxaFxoDgxcOHGxMKD_T7vhAml:1QaUaL:BA0thEZrp4FQVXIXuOvYJtLJSrQ'
The fact that the string is compressed is signalled by the prefixed '.' at the
start of the base64 JSON.
There are 65 url-safe characters: the 64 used by url-safe base64 and the ':'.
These functions make use of all of them.
"""
import base64
import datetime
import json
import time
import zlib
from django.conf import settings
from django.utils.crypto import constant_time_compare, salted_hmac
from django.utils.encoding import force_bytes
from django.utils.module_loading import import_string
from django.utils.regex_helper import _lazy_re_compile
_SEP_UNSAFE = _lazy_re_compile(r"^[A-z0-9-_=]*$")
BASE62_ALPHABET = "0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz"
class BadSignature(Exception):
"""Signature does not match."""
pass
class SignatureExpired(BadSignature):
"""Signature timestamp is older than required max_age."""
pass
def b62_encode(s):
if s == 0:
return "0"
sign = "-" if s < 0 else ""
s = abs(s)
encoded = ""
while s > 0:
s, remainder = divmod(s, 62)
encoded = BASE62_ALPHABET[remainder] + encoded
return sign + encoded
def b62_decode(s):
if s == "0":
return 0
sign = 1
if s[0] == "-":
s = s[1:]
sign = -1
decoded = 0
for digit in s:
decoded = decoded * 62 + BASE62_ALPHABET.index(digit)
return sign * decoded
def b64_encode(s):
return base64.urlsafe_b64encode(s).strip(b"=")
def b64_decode(s):
pad = b"=" * (-len(s) % 4)
return base64.urlsafe_b64decode(s + pad)
def base64_hmac(salt, value, key, algorithm="sha1"):
return b64_encode(
salted_hmac(salt, value, key, algorithm=algorithm).digest()
).decode()
def _cookie_signer_key(key):
# SECRET_KEYS items may be str or bytes.
return b"django.http.cookies" + force_bytes(key)
def get_cookie_signer(salt="django.core.signing.get_cookie_signer"):
Signer = import_string(settings.SIGNING_BACKEND)
return Signer(
key=_cookie_signer_key(settings.SECRET_KEY),
fallback_keys=map(_cookie_signer_key, settings.SECRET_KEY_FALLBACKS),
salt=salt,
)
class JSONSerializer:
"""
Simple wrapper around json to be used in signing.dumps and
signing.loads.
"""
def dumps(self, obj):
return json.dumps(obj, separators=(",", ":")).encode("latin-1")
def loads(self, data):
return json.loads(data.decode("latin-1"))
def dumps(
obj, key=None, salt="django.core.signing", serializer=JSONSerializer, compress=False
):
"""
Return URL-safe, hmac signed base64 compressed JSON string. If key is
None, use settings.SECRET_KEY instead. The hmac algorithm is the default
Signer algorithm.
If compress is True (not the default), check if compressing using zlib can
save some space. Prepend a '.' to signify compression. This is included
in the signature, to protect against zip bombs.
Salt can be used to namespace the hash, so that a signed string is
only valid for a given namespace. Leaving this at the default
value or re-using a salt value across different parts of your
application without good cause is a security risk.
The serializer is expected to return a bytestring.
"""
return TimestampSigner(key, salt=salt).sign_object(
obj, serializer=serializer, compress=compress
)
def loads(
s,
key=None,
salt="django.core.signing",
serializer=JSONSerializer,
max_age=None,
fallback_keys=None,
):
"""
Reverse of dumps(), raise BadSignature if signature fails.
The serializer is expected to accept a bytestring.
"""
return TimestampSigner(key, salt=salt, fallback_keys=fallback_keys).unsign_object(
s,
serializer=serializer,
max_age=max_age,
)
class Signer:
def __init__(
self,
key=None,
sep=":",
salt=None,
algorithm=None,
fallback_keys=None,
):
self.key = key or settings.SECRET_KEY
self.fallback_keys = (
fallback_keys
if fallback_keys is not None
else settings.SECRET_KEY_FALLBACKS
)
self.sep = sep
if _SEP_UNSAFE.match(self.sep):
raise ValueError(
"Unsafe Signer separator: %r (cannot be empty or consist of "
"only A-z0-9-_=)" % sep,
)
self.salt = salt or "%s.%s" % (
self.__class__.__module__,
self.__class__.__name__,
)
self.algorithm = algorithm or "sha256"
def signature(self, value, key=None):
key = key or self.key
return base64_hmac(self.salt + "signer", value, key, algorithm=self.algorithm)
def sign(self, value):
return "%s%s%s" % (value, self.sep, self.signature(value))
def unsign(self, signed_value):
if self.sep not in signed_value:
raise BadSignature('No "%s" found in value' % self.sep)
value, sig = signed_value.rsplit(self.sep, 1)
for key in [self.key, *self.fallback_keys]:
if constant_time_compare(sig, self.signature(value, key)):
return value
raise BadSignature('Signature "%s" does not match' % sig)
def sign_object(self, obj, serializer=JSONSerializer, compress=False):
"""
Return URL-safe, hmac signed base64 compressed JSON string.
If compress is True (not the default), check if compressing using zlib
can save some space. Prepend a '.' to signify compression. This is
included in the signature, to protect against zip bombs.
The serializer is expected to return a bytestring.
"""
data = serializer().dumps(obj)
# Flag for if it's been compressed or not.
is_compressed = False
if compress:
# Avoid zlib dependency unless compress is being used.
compressed = zlib.compress(data)
if len(compressed) < (len(data) - 1):
data = compressed
is_compressed = True
base64d = b64_encode(data).decode()
if is_compressed:
base64d = "." + base64d
return self.sign(base64d)
def unsign_object(self, signed_obj, serializer=JSONSerializer, **kwargs):
# Signer.unsign() returns str but base64 and zlib compression operate
# on bytes.
base64d = self.unsign(signed_obj, **kwargs).encode()
decompress = base64d[:1] == b"."
if decompress:
# It's compressed; uncompress it first.
base64d = base64d[1:]
data = b64_decode(base64d)
if decompress:
data = zlib.decompress(data)
return serializer().loads(data)
class TimestampSigner(Signer):
def timestamp(self):
return b62_encode(int(time.time()))
def sign(self, value):
value = "%s%s%s" % (value, self.sep, self.timestamp())
return super().sign(value)
def unsign(self, value, max_age=None):
"""
Retrieve original value and check it wasn't signed more
than max_age seconds ago.
"""
result = super().unsign(value)
value, timestamp = result.rsplit(self.sep, 1)
timestamp = b62_decode(timestamp)
if max_age is not None:
if isinstance(max_age, datetime.timedelta):
max_age = max_age.total_seconds()
# Check timestamp is not older than max_age
age = time.time() - timestamp
if age > max_age:
raise SignatureExpired("Signature age %s > %s seconds" % (age, max_age))
return value
|
py | b402d89d696ad29f21fbff3ea332cfef615016ef | from __future__ import unicode_literals
import logging
from django.db.models.signals import post_save
from django.db.models.signals import pre_delete
from algoliasearch.search_client import SearchClient
from algoliasearch.user_agent import UserAgent
from .models import AlgoliaIndex
from .settings import SETTINGS
from .version import VERSION
from django import get_version as django_version
logger = logging.getLogger(__name__)
UserAgent.add("Algolia for Django", VERSION)
UserAgent.add("Django", django_version())
class AlgoliaEngineError(Exception):
"""Something went wrong with Algolia Engine."""
class RegistrationError(AlgoliaEngineError):
"""Something went wrong when registering a model."""
class AlgoliaEngine(object):
def __init__(self, settings=SETTINGS):
"""Initializes the Algolia engine."""
try:
app_id = settings['APPLICATION_ID']
api_key = settings['API_KEY']
except KeyError:
raise AlgoliaEngineError(
'APPLICATION_ID and API_KEY must be defined.')
self.__auto_indexing = settings.get('AUTO_INDEXING', True)
self.__settings = settings
self.__registered_models = {}
self.client = SearchClient.create(app_id, api_key)
def is_registered(self, model):
"""Checks whether the given models is registered with Algolia engine"""
return model in self.__registered_models
def register(self, model, index_cls=AlgoliaIndex, auto_indexing=None):
"""
Registers the given model with Algolia engine.
If the given model is already registered with Algolia engine, a
RegistrationError will be raised.
"""
# Check for existing registration.
if self.is_registered(model):
raise RegistrationError(
'{} is already registered with Algolia engine'.format(model))
# Perform the registration.
if not issubclass(index_cls, AlgoliaIndex):
raise RegistrationError(
'{} should be a subclass of AlgoliaIndex'.format(index_cls))
index_obj = index_cls(model, self.client, self.__settings)
self.__registered_models[model] = index_obj
if (isinstance(auto_indexing, bool) and
auto_indexing) or self.__auto_indexing:
# Connect to the signalling framework.
post_save.connect(self.__post_save_receiver, model)
pre_delete.connect(self.__pre_delete_receiver, model)
logger.info('REGISTER %s', model)
def unregister(self, model):
"""
Unregisters the given model with Algolia engine.
If the given model is not registered with Algolia engine, a
RegistrationError will be raised.
"""
if not self.is_registered(model):
raise RegistrationError(
'{} is not registered with Algolia engine'.format(model))
# Perform the unregistration.
del self.__registered_models[model]
# Disconnect from the signalling framework.
post_save.disconnect(self.__post_save_receiver, model)
pre_delete.disconnect(self.__pre_delete_receiver, model)
logger.info('UNREGISTER %s', model)
def get_registered_models(self):
"""
Returns a list of models that have been registered with Algolia
engine.
"""
return list(self.__registered_models.keys())
def get_adapter(self, model):
"""Returns the adapter associated with the given model."""
if not self.is_registered(model):
raise RegistrationError(
'{} is not registered with Algolia engine'.format(model))
return self.__registered_models[model]
def get_adapter_from_instance(self, instance):
"""Returns the adapter associated with the given instance."""
model = instance.__class__
return self.get_adapter(model)
# Proxies methods.
def save_record(self, instance, **kwargs):
"""Saves the record.
If `update_fields` is set, this method will use partial_update_object()
and will update only the given fields (never `_geoloc` and `_tags`).
For more information about partial_update_object:
https://github.com/algolia/algoliasearch-client-python#update-an-existing-object-in-the-index
"""
adapter = self.get_adapter_from_instance(instance)
adapter.save_record(instance, **kwargs)
def save_records(self, model, qs, batch_size=1000, force_index=False):
"""
Saves multiple records.
This method is optimized for speed. It takes a model class and QuerySet. Optionally, you
can specify the size of the batch send to Algolia with batch_size (default to 1000).
"""
adapter = self.get_adapter(model)
adapter.save_records(qs, batch_size=batch_size, force_index=force_index)
def delete_record(self, instance):
"""Deletes the record."""
adapter = self.get_adapter_from_instance(instance)
adapter.delete_record(instance)
def delete_records(self, model, qs, batch_size=1000):
"""
Deletes multiple records.
This method is optimized for speed. It takes a model class and QuerySet. Optionally, you
can specify the size of the batch send to Algolia with batch_size (default to 1000).
"""
adapter = self.get_adapter(model)
adapter.delete_records(qs, batch_size=batch_size)
def update_records(self, model, qs, batch_size=1000, **kwargs):
"""
Updates multiple records.
This method is optimized for speed. It takes a QuerySet and the same
arguments as QuerySet.update(). Optionally, you can specify the size
of the batch send to Algolia with batch_size (default to 1000).
>>> from algoliasearch_django import update_records
>>> qs = MyModel.objects.filter(myField=False)
>>> update_records(MyModel, qs, myField=True)
>>> qs.update(myField=True)
"""
adapter = self.get_adapter(model)
adapter.update_records(qs, batch_size=batch_size, **kwargs)
def raw_search(self, model, query='', params=None):
"""Performs a search query and returns the parsed JSON."""
if params is None:
params = {}
adapter = self.get_adapter(model)
return adapter.raw_search(query, params)
def clear_objects(self, model):
"""Clears the index."""
adapter = self.get_adapter(model)
adapter.clear_objects()
def clear_index(self, model):
# TODO: add deprecatd warning
self.clear_objects(model)
def reindex_all(self, model, batch_size=1000):
"""
Reindex all the records.
By default, this method use Model.objects.all() but you can implement
a method `get_queryset` in your subclass. This can be used to optimize
the performance (for example with select_related or prefetch_related).
"""
adapter = self.get_adapter(model)
return adapter.reindex_all(batch_size)
def reset(self, settings=None):
"""Reinitializes the Algolia engine and its client.
:param settings: settings to use instead of the default django.conf.settings.algolia
"""
self.__init__(settings=settings if settings is not None else SETTINGS)
# Signalling hooks.
def __post_save_receiver(self, instance, **kwargs):
"""Signal handler for when a registered model has been saved."""
logger.debug('RECEIVE post_save FOR %s', instance.__class__)
self.save_record(instance, **kwargs)
def __pre_delete_receiver(self, instance, **kwargs):
"""Signal handler for when a registered model has been deleted."""
logger.debug('RECEIVE pre_delete FOR %s', instance.__class__)
self.delete_record(instance)
# Algolia engine
algolia_engine = AlgoliaEngine()
|
py | b402d920358a633c7d7822cceb61e669c53ab641 | # -*- coding: utf-8 -*-
"""
mundiapi
This file was automatically generated by APIMATIC v2.0 ( https://apimatic.io ).
"""
import mundiapi.models.create_checkout_card_installment_option_request
import mundiapi.models.create_payment_authentication_request
class CreateCheckoutCreditCardPaymentRequest(object):
"""Implementation of the 'CreateCheckoutCreditCardPaymentRequest' model.
Checkout card payment request
Attributes:
statement_descriptor (string): Card invoice text descriptor
installments (list of CreateCheckoutCardInstallmentOptionRequest):
Payment installment options
authentication (CreatePaymentAuthenticationRequest): Creates payment
authentication
capture (bool): Authorize and capture?
"""
# Create a mapping from Model property names to API property names
_names = {
"statement_descriptor":'statement_descriptor',
"installments":'installments',
"authentication":'authentication',
"capture":'capture'
}
def __init__(self,
statement_descriptor=None,
installments=None,
authentication=None,
capture=None):
"""Constructor for the CreateCheckoutCreditCardPaymentRequest class"""
# Initialize members of the class
self.statement_descriptor = statement_descriptor
self.installments = installments
self.authentication = authentication
self.capture = capture
@classmethod
def from_dictionary(cls,
dictionary):
"""Creates an instance of this model from a dictionary
Args:
dictionary (dictionary): A dictionary representation of the object as
obtained from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if dictionary is None:
return None
# Extract variables from the dictionary
statement_descriptor = dictionary.get('statement_descriptor')
installments = None
if dictionary.get('installments') != None:
installments = list()
for structure in dictionary.get('installments'):
installments.append(mundiapi.models.create_checkout_card_installment_option_request.CreateCheckoutCardInstallmentOptionRequest.from_dictionary(structure))
authentication = mundiapi.models.create_payment_authentication_request.CreatePaymentAuthenticationRequest.from_dictionary(dictionary.get('authentication')) if dictionary.get('authentication') else None
capture = dictionary.get('capture')
# Return an object of this model
return cls(statement_descriptor,
installments,
authentication,
capture)
|
py | b402d98285463aba52322afa1de1b43624534734 | import pytest
import mock
from airflow.hooks.base_hook import BaseHook
from rb_quality_plugin.operators.base_data_quality_operator import (
BaseDataQualityOperator,
)
@mock.patch.object(BaseHook, "get_connection")
@mock.patch.object(BaseHook, "get_hook")
def test_get_sql_value_one_result(mock_get_hook, mock_get_connection):
mock_get_connection.conn_type = "id"
mock_hook = mock.Mock()
mock_hook.get_records.return_value = [(10,)]
mock_get_hook.return_value = mock_hook
task = BaseDataQualityOperator(
task_id="one_result_task", conn_id="test_id", sql="SELECT COUNT(1) FROM test;"
)
result = task.get_sql_value(conn_id=task.conn_id, sql=task.sql)
assert result == 10
@mock.patch.object(BaseHook, "get_connection")
@mock.patch.object(BaseHook, "get_hook")
def test_get_sql_value_not_one_result(mock_get_hook, mock_get_connection):
mock_get_connection.conn_type = "id"
mock_hook = mock.Mock()
mock_hook.get_records.return_value = [(10,), (100,)]
mock_get_hook.return_value = mock_hook
task = BaseDataQualityOperator(
task_id="one_result_task", conn_id="test_id", sql="SELECT COUNT(1) FROM test;"
)
with pytest.raises(ValueError):
task.get_sql_value(conn_id=task.conn_id, sql=task.sql)
@mock.patch.object(BaseHook, "get_connection")
@mock.patch.object(BaseHook, "get_hook")
def test_get_sql_value_no_result(mock_get_hook, mock_get_connection):
mock_get_connection.conn_type = "id"
mock_hook = mock.Mock()
mock_hook.get_records.return_value = []
mock_get_hook.return_value = mock_hook
task = BaseDataQualityOperator(
task_id="one_result_task", conn_id="test_id", sql="SELECT COUNT(1) FROM test;"
)
with pytest.raises(ValueError):
task.get_sql_value(conn_id=task.conn_id, sql=task.sql)
@mock.patch.object(BaseHook, "get_connection")
@mock.patch.object(BaseHook, "get_hook")
def test_get_sql_value_multiple_results(mock_get_hook, mock_get_connection):
mock_get_connection.conn_type = "id"
mock_hook = mock.Mock()
mock_hook.get_records.return_value = [(10, "bad value")]
mock_get_hook.return_value = mock_hook
task = BaseDataQualityOperator(
task_id="one_result_task", conn_id="test_id", sql="SELECT COUNT(1) FROM test;"
)
with pytest.raises(ValueError):
task.get_sql_value(conn_id=task.conn_id, sql=task.sql)
|
py | b402d9df12ac943767b98c9435c1aac09c100668 | import sys
from nose.core import run_exit
if sys.argv[0].endswith('__main__.py'):
sys.argv[0] = '%s -m nose' % sys.executable
run_exit()
|
py | b402da8c763e3b55957db70f42129b5cbbd9b5c1 |
import numpy as np
class AcousticRecord(object):
def __init__(self, n_days, n_events, seed=None):
self.n_days = n_days
self.n_events = n_events
self.duration = n_days*24*3600
if(seed is not None):
np.random.seed(seed) # for static plots
# create attributes for traffic properties
# they're written such that they can be re-assigned if desired
self.Lmax_distribution = np.random.randint(200, 800, size=n_events)/10 # same precision as SLM
self.fwhm_duration_distribution = np.random.normal(100, 50, size=n_events)
self.center_times = np.random.randint(self.duration, size=n_events)
# initialize numpy arrays to hold noise, natural sound, and (eventually) the combination of both
self.event_record = None
self.ambient = None
self.full_record = None
# this attribute stores the total event duration WITHOUT ambience
self.total_event_duration = 0
# these attributes represent the temporal bounds of a SPLAT record
# they are intervals of noise and quietude, respectively
# initial values are None
self.noise_intervals = None
self.noise_free_intervals = None
# arrays to hold summary metrics
self.SPL_summary_metrics = None
self.duration_summary_metrics = None
self.nfi_list = None
def point_source(self, Lmax, duration_fwhm):
'''
Create a one-second resolution point source with center at time zero.
This is a helper function to simplify use of 'combine_point_sources()'
inputs
------
Lmax (float): the highest sound pressure level of the event in decibels
duration_fwhm (float): full width at half-maxium of the event in seconds
cutoff (float): the lowest sound pressure level of interest (a cutoff,) in decibels
outputs
-------
event (numpy array):
'''
# an event will definitely be 'over' within ±10 times the full width at half-maximum!
sweep_times = np.arange(-10*duration_fwhm, 10*duration_fwhm, 1)
# calculate the gauss curve
event = Lmax*np.exp(-1*np.power(sweep_times, 2)/(2*np.power(duration_fwhm, 2)))
return event
def combine_point_sources(self):
'''
Generate a continuous record of noise for every overflight that occurs.
'''
# create an empty numpy array to hold the event record
# we'll use one-second time resolution throughout this model
self.event_record = np.zeros(shape=self.n_days*3600*24)
for Lmax, dur_fwhm, center_time in zip(self.Lmax_distribution, self.fwhm_duration_distribution, self.center_times):
point = self.point_source(Lmax, dur_fwhm)
# handle the fact that events can overlap the start/end of the record
# if the start of the event is less than the beginning, truncate it
if(-10*dur_fwhm + center_time < 0):
rec_start = 0
event_start = np.absolute(-10*dur_fwhm + center_time)
else:
rec_start = -10*dur_fwhm + center_time
event_start = 0
# likewise, if the end of the event is longer than the record
# it'll also need to be truncated
if(10*dur_fwhm + center_time >= self.event_record.size):
rec_end = self.event_record.size
event_end = self.event_record.size - (10*dur_fwhm + center_time) + 1
else:
rec_end = 10*dur_fwhm + center_time
event_end = point.size - 1
# handling
try:
# cast all the indices to integer
rec_start = int(rec_start)
rec_end = int(rec_end)
event_start = int(event_start)
event_end = int(event_end)
self.event_record[rec_start:rec_end] = 10*np.log10(np.power(10, self.event_record[rec_start:rec_end]/10)
+ np.power(10, point[event_start:event_end]/10))
# add the event duration to the total event duration
self.total_event_duration = self.total_event_duration + (event_end - event_start)
except ValueError:
if(np.absolute(self.event_record[rec_start:rec_end].size
- point[event_start:event_end].size) > 1):
self.n_events = self.n_events - 1
elif(np.absolute(self.event_record[rec_start:rec_end].size
- point[event_start:event_end].size) == 1):
event_end = point.size
try:
self.event_record[rec_start:rec_end] = 10*np.log10(np.power(10, self.event_record[rec_start:rec_end]/10)
+ np.power(10, point[event_start:event_end]/10))
# add the event duration to the total event duration
self.total_event_duration = self.total_event_duration + (event_end - event_start)
except ValueError:
pass
def adjust_noise_free_intervals(self, noise_free_intervals, noise_intervals):
'''
In this simulation our convention will be to have closed noise intervals.
To achieve this, we need to bound our noise free intervals.
'''
nfi_starts = self.noise_free_intervals.T[0]
nfi_ends = self.noise_free_intervals.T[1]
# ------- Account for different beginning conditions -----------------------------------------
# the record begins with noise...
if(self.noise_intervals[0, 0] == 0):
# ...the first noise free interval (and thus ALL intervals) need to start one second later
nfi_starts = nfi_starts + 1
# the record begins with quietude...
else:
# ...the first noise free interval stays the same, and equals zero
# the rest are + 1
nfi_starts = nfi_starts + 1
nfi_starts[0] = 0
# ------- Account for different ending conditions -----------------------------------------
# the record ends with noise...
if(self.noise_intervals[-1, 0] == 0):
# ...the last noise free interval (and thus ALL intervals) need to end one second earlier
nfi_ends = nfi_ends - 1
# the record ends with quietude...
else:
# ...the last noise free interval stays the same, and equals zero
# the rest are - 1
save = nfi_ends[-1]
nfi_ends = nfi_ends - 1
nfi_ends[-1] = save
# reset attribute to these updated, correct values
self.noise_free_interval = np.array([nfi_starts, nfi_ends]).T
def contiguous_regions(self, condition):
"""
Finds contiguous True regions of the boolean array "condition". Returns
a 2D array where the first column is the start index of the region and the
second column is the end index.
"""
# Find the indicies of changes in "condition"
d = np.diff(condition)
idx, = d.nonzero()
# We need to start things after the change in "condition". Therefore,
# we'll shift the index by 1 to the right.
idx += 1
if condition[0]:
# If the start of condition is True prepend a 0
idx = np.r_[0, idx]
if condition[-1]:
# If the end of condition is True, append the length of the array
idx = np.r_[idx, condition.size] # Edit
# Reshape the result into two columns
idx.shape = (-1,2)
return idx
def annotate_events(self, audibility_buffer=0.0):
'''
This function divides self.full_record into binary categories: noise, and non-noise.
input
-----
Self
outputs
-------
(1) list SPLs for each event [use self.event_record for each section of self.full_record >= ambient]
(2) NFIs [use self.event_record < ambient to save arrays from self.full_record]
'''
# we can't annotate events that don't exist
if(self.event_record is None):
self.combine_point_sources()
if(self.ambient is not None):
# 'observe' the contiguous regions of noise
self.noise_intervals = self.contiguous_regions((self.event_record > self.ambient + audibility_buffer))
# likewise, 'observe' the contiguous regions of quiet, where pressure from events is less than the ambient level
self.noise_free_intervals = self.contiguous_regions((self.event_record < self.ambient + audibility_buffer))
# then, correct the noise free intervals to be bounded intervals
# this removes the overlapping seconds shared by noise and quiet (by convention, always in favor of noise)
self.adjust_noise_free_intervals(self.noise_free_intervals, self.noise_intervals)
elif(self.ambient is None):
raise AttributeError('Ambience is undefined. Use of .add_ambience() is prerequisite to .annotate_events()')
def add_ambience(self, Lp, audibility_buffer=0.0):
'''
Define and add ambience - an essential attribute of acoustic environments - to the full record.
Then, and only then, are simulated observations meaningful.
input
-----
Lp (numpy array): Sound pressure levels representing the natural energy of an environment.
If a numpy array shorter than the record is given, the first value will
be used as a constant. This function also accepts other types of signals
for ambience.
audibility_buffer (float): A scalar sound pressure level representing a modifier to the
annotation conditions of a given user. It will modify 'annotate_events'.
output
------
modifies self.full_record to include background
'''
# if you haven't combined the noise events yet, do that to generate the event record
if(self.event_record is None):
self.combine_point_sources()
# if the user gives only a single value
if((type(Lp)==float)|(type(Lp)==int)):
# handle input of scalar integers or floating point numbers
Lp_toUse = np.array(Lp)
# create a repeated numpy array of this value
self.ambient = np.repeat(Lp_toUse, self.event_record.size)
# raise and error if the array is too short
elif((len(Lp) < self.event_record.size)):
raise Exception("The ambient record is not as long as the entire record. Specify either a constant scalar value or a numpy array of shape ("+str(self.duration)+",).")
# if the user gives ambience defined over the entire record
elif(Lp.size == self.event_record.size):
self.ambient = Lp
# add the ambience to the energy from noise to get the full record
self.full_record = 10*np.log10( np.power(10, self.event_record/10) + np.power(10, self.ambient/10) )
# as soon as we have the full record, let's simulate
# the noise conditions we would measure/observe
self.annotate_events(audibility_buffer=audibility_buffer)
self.calculate_SPL_summary()
self.calculate_duration_summary()
self.calculate_nfi_summary()
def reset_ambience(self):
self.full_record = self.event_record
self.ambient = np.zeros(shape=self.n_days*3600*24)
def calculate_SPL_summary(self):
'''
This function computes sound pressure level metrics for each noise event in `noise_intervals`.
It's called as part of `add_ambience()` and works behind the scenes.
inputs
------
self
outputs
-------
a 2D numpy array of sound pressure level metrics as 'observed' in `self.full_record`:
[0] one-second broadband sound pressure levels for each noise event
[1] the equivalent sound pressure level over the duration of the event (Leq, *)
[2] the sound exposure level of the event
[3] the median sound pressure level of the event
[4] the maximum one-second sound pressure level of the event (maximum Leq, 1s)
[5] the time at which the maximum one-second sound pressure level occurred
'''
# the indices corresponding to each noise event (note: NOT each aircraft)
SPL_summary_indices = [np.arange(s, e+1) for s, e in self.noise_intervals]
# create a 2-D array: the full time series of each event, PLUS summary metrics
SPL_summary = []
for SPL_summary_index in SPL_summary_indices:
# clip out the one-second record for each event and add it to the full list
SPL_extract = self.event_record[SPL_summary_index]
# Equivalent Sound Pressure Level (Leq, *)
Leq = 10*np.log10((1/SPL_extract.size)*np.power(10, SPL_extract/10).sum())
# Sound Exposure Level (SEL)
SEL = Leq + 10*np.log10(SPL_extract.size)
# Median Sound Pressure Level of the Event (L50)
L50_event = np.percentile(SPL_extract, 50)
# Maximum Sound Pressure Level of the Event (maximum Leq, 1s)
# this metric also has precise timing, which we'll capture
Lmax_event = np.percentile(SPL_extract, 100)
Lmax_time = SPL_summary_index[np.argwhere(SPL_extract == Lmax_event)][0,0]
# add all these calculated metrics to the composite list
SPL_summary.append([SPL_extract, Leq, SEL, L50_event, Lmax_event, Lmax_time])
out = np.array(SPL_summary).T
# update the attribute
self.SPL_summary_metrics = out
# it's convenient for this function to return the results
return out
def calculate_duration_summary(self):
'''
This function computes the duration of noise event in `noise_intervals`.
It's called as part of `add_ambience()` and works behind the scenes.
inputs
------
self
outputs
-------
a 2D numpy array of sound pressure level metrics as 'observed' in `self.full_record`:
[0] a list of each event's duration
[1] the mean duration
[2] the standard deviation of the durations
[3] the median duration
[4] the median absolute deviation of the durations
'''
# the durations, themselves
duration_list = self.noise_intervals.T[1] - self.noise_intervals.T[0]
# mean duration
mean = np.mean(duration_list)
# standard deviation duration
stdev = np.std(duration_list)
# median duration
median = np.percentile(duration_list, 50)
# median absolute deviation of duration
mad = np.percentile(np.absolute(duration_list - median), 50)
# combine the results and update the class attribute
out = np.array([duration_list, mean, stdev, median, mad])
# update the attribute
self.duration_summary_metrics = out
# it's convenient to return the results
return out
def calculate_nfi_summary(self):
'''
A very simple function to calculate the length of each noise free interval.
'''
nfis = self.noise_free_intervals
# determine the duration of each interval and reassign the attribute
self.nfi_list = nfis.T[1] - nfis.T[0] |
py | b402db6ccafa8b26db9828a12c4f77e3791f0549 | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This package provides tools for saving docker images."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import io
import json
import os
import tarfile
import concurrent.futures
from containerregistry.client import docker_name
from containerregistry.client.v1 import docker_image as v1_image
from containerregistry.client.v1 import save as v1_save
from containerregistry.client.v2 import v1_compat
from containerregistry.client.v2_2 import docker_digest
from containerregistry.client.v2_2 import docker_http
from containerregistry.client.v2_2 import docker_image as v2_2_image
from containerregistry.client.v2_2 import v2_compat
import six
def _diff_id(v1_img, blob):
try:
return v1_img.diff_id(blob)
except ValueError:
unzipped = v1_img.uncompressed_layer(blob)
return docker_digest.SHA256(unzipped)
def multi_image_tarball(
tag_to_image,
tar,
tag_to_v1_image = None
):
"""Produce a "docker save" compatible tarball from the DockerImages.
Args:
tag_to_image: A dictionary of tags to the images they label.
tar: the open tarfile into which we are writing the image tarball.
tag_to_v1_image: A dictionary of tags to the v1 form of the images
they label. If this isn't provided, the image is simply converted.
"""
def add_file(filename, contents):
contents_bytes = contents.encode('utf8')
info = tarfile.TarInfo(filename)
info.size = len(contents_bytes)
tar.addfile(tarinfo=info, fileobj=io.BytesIO(contents_bytes))
tag_to_v1_image = tag_to_v1_image or {}
# The manifest.json file contains a list of the images to load
# and how to tag them. Each entry consists of three fields:
# - Config: the name of the image's config_file() within the
# saved tarball.
# - Layers: the list of filenames for the blobs constituting
# this image. The order is the reverse of the v1
# ancestry ordering.
# - RepoTags: the list of tags to apply to this image once it
# is loaded.
manifests = []
for (tag, image) in six.iteritems(tag_to_image):
# The config file is stored in a blob file named with its digest.
digest = docker_digest.SHA256(image.config_file().encode('utf8'), '')
add_file(digest + '.json', image.config_file())
cfg = json.loads(image.config_file())
diffs = set(cfg.get('rootfs', {}).get('diff_ids', []))
v1_img = tag_to_v1_image.get(tag)
if not v1_img:
v2_img = v2_compat.V2FromV22(image)
v1_img = v1_compat.V1FromV2(v2_img)
tag_to_v1_image[tag] = v1_img
# Add the manifests entry for this image.
manifest = {
'Config':
digest + '.json',
'Layers': [
layer_id + '/layer.tar'
# We don't just exclude the empty tar because we leave its diff_id
# in the set when coming through v2_compat.V22FromV2
for layer_id in reversed(v1_img.ancestry(v1_img.top()))
if _diff_id(v1_img, layer_id) in diffs and
not json.loads(v1_img.json(layer_id)).get('throwaway')
],
'RepoTags': [str(tag)]
}
layer_sources = {}
input_manifest = json.loads(image.manifest())
input_layers = input_manifest['layers']
for input_layer in input_layers:
if input_layer['mediaType'] == docker_http.FOREIGN_LAYER_MIME:
diff_id = image.digest_to_diff_id(input_layer['digest'])
layer_sources[diff_id] = input_layer
if layer_sources:
manifest['LayerSources'] = layer_sources
manifests.append(manifest)
# v2.2 tarballs are a superset of v1 tarballs, so delegate
# to v1 to save itself.
v1_save.multi_image_tarball(tag_to_v1_image, tar)
add_file('manifest.json', json.dumps(manifests, sort_keys=True))
def tarball(name, image,
tar):
"""Produce a "docker save" compatible tarball from the DockerImage.
Args:
name: The tag name to write into repositories and manifest.json
image: a docker image to save.
tar: the open tarfile into which we are writing the image tarball.
"""
multi_image_tarball({name: image}, tar, {})
def fast(image, directory,
threads = 1):
"""Produce a FromDisk compatible file layout under the provided directory.
After calling this, the following filesystem will exist:
directory/
config.json <-- only *.json, the image's config
001.tar.gz <-- the first layer's .tar.gz filesystem delta
001.sha256 <-- the sha256 of 1.tar.gz with a "sha256:" prefix.
...
N.tar.gz <-- the Nth layer's .tar.gz filesystem delta
N.sha256 <-- the sha256 of N.tar.gz with a "sha256:" prefix.
We pad layer indices to only 3 digits because of a known ceiling on the number
of filesystem layers Docker supports.
Args:
image: a docker image to save.
directory: an existing empty directory under which to save the layout.
threads: the number of threads to use when performing the upload.
Returns:
A tuple whose first element is the path to the config file, and whose second
element is an ordered list of tuples whose elements are the filenames
containing: (.sha256, .tar.gz) respectively.
"""
def write_file(name, accessor,
arg):
with io.open(name, u'wb') as f:
f.write(accessor(arg))
with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
future_to_params = {}
config_file = os.path.join(directory, 'config.json')
f = executor.submit(write_file, config_file,
lambda unused: image.config_file().encode('utf8'),
'unused')
future_to_params[f] = config_file
idx = 0
layers = []
for blob in reversed(image.fs_layers()):
# Create a local copy
digest_name = os.path.join(directory, '%03d.sha256' % idx)
f = executor.submit(
write_file,
digest_name,
# Strip the sha256: prefix
lambda blob: blob[7:].encode('utf8'),
blob)
future_to_params[f] = digest_name
layer_name = os.path.join(directory, '%03d.tar.gz' % idx)
f = executor.submit(write_file, layer_name, image.blob, blob)
future_to_params[f] = layer_name
layers.append((digest_name, layer_name))
idx += 1
# Wait for completion.
for future in concurrent.futures.as_completed(future_to_params):
future.result()
return (config_file, layers)
def uncompressed(image,
directory,
threads = 1):
"""Produce a format similar to `fast()`, but with uncompressed blobs.
After calling this, the following filesystem will exist:
directory/
config.json <-- only *.json, the image's config
001.tar <-- the first layer's .tar filesystem delta
001.sha256 <-- the sha256 of 001.tar with a "sha256:" prefix.
...
NNN.tar <-- the NNNth layer's .tar filesystem delta
NNN.sha256 <-- the sha256 of NNN.tar with a "sha256:" prefix.
We pad layer indices to only 3 digits because of a known ceiling on the number
of filesystem layers Docker supports.
Args:
image: a docker image to save.
directory: an existing empty directory under which to save the layout.
threads: the number of threads to use when performing the upload.
Returns:
A tuple whose first element is the path to the config file, and whose second
element is an ordered list of tuples whose elements are the filenames
containing: (.sha256, .tar) respectively.
"""
def write_file(name, accessor,
arg):
with io.open(name, u'wb') as f:
f.write(accessor(arg))
with concurrent.futures.ThreadPoolExecutor(max_workers=threads) as executor:
future_to_params = {}
config_file = os.path.join(directory, 'config.json')
f = executor.submit(write_file, config_file,
lambda unused: image.config_file().encode('utf8'),
'unused')
future_to_params[f] = config_file
idx = 0
layers = []
for diff_id in reversed(image.diff_ids()):
# Create a local copy
digest_name = os.path.join(directory, '%03d.sha256' % idx)
f = executor.submit(
write_file,
digest_name,
# Strip the sha256: prefix
lambda diff_id: diff_id[7:].encode('utf8'),
diff_id)
future_to_params[f] = digest_name
layer_name = os.path.join(directory, '%03d.tar' % idx)
f = executor.submit(write_file, layer_name, image.uncompressed_layer,
diff_id)
future_to_params[f] = layer_name
layers.append((digest_name, layer_name))
idx += 1
# Wait for completion.
for future in concurrent.futures.as_completed(future_to_params):
future.result()
return (config_file, layers)
|
py | b402dc00dbfbf00f121c6a7ee5e4d3dde176a3dd | import cv2
from intervaltree.intervaltree import IntervalTree
import numpy as np
import pickle
from PIL import ImageFilter, Image
import os
import os.path
# import xml.etree.cElementTree as ET
import xml.etree.ElementTree as ET
import operator
import pytesseract
import re
from numpy.core.fromnumeric import sort
import xlwt
import statistics
text_read_path = 'result_text/'
row_pkl_read_path = 'result_row_pkl/'
col_pkl_read_path = 'result_col_pkl/'
image_read_path = 'gt_without_box/'
image_write_path = 'processed_jpg/'
image_aligned_write_path = 'processed_aligned_jpg/'
coordinates_write_path = 'processed_txt/'
xml_output_path = 'processed_xml/'
excel_output_path = 'processed_excel/'
PREPARE_CSV = False
ADD_CONNTENT_IN_XML = False
EXECUTE_MERGE = False
try:
os.mkdir(image_write_path)
os.mkdir(coordinates_write_path)
os.mkdir(xml_output_path)
os.mkdir(excel_output_path)
except:
print("Directories already exist")
def read_text_file(read_text_path, min_score=0.8):
table_cells = []
skipped_indices = []
#x_mids = []
#y_mids = []
cell_id = 0
i = -1
with open(read_text_path, 'r') as file:
lines = file.readlines()
for line in lines:
i += 1
line = line.strip()
data = line.split()
caption = data[0]
score = data[1]
if float(score) < min_score:
skipped_indices.append(i)
continue
x1 = int(data[2])
y1 = int(data[3])
x2 = int(data[4])
y2 = int(data[5])
x_mid = (x1 + x2) / 2
y_mid = (y1 + y2) / 2
table_cells.append((x1, y1, x2, y2))
#x_mids.append(x1)
#y_mids.append(y_mid)
cell_id = cell_id + 1
return table_cells, skipped_indices
def remove_overlaps(cells,
overlapping_area_pct_threshold=0.25,
containment_area_pct_threshold=0.8,
removed_indices=[]):
removed_flag = False
x_interval_tree = IntervalTree()
y_interval_tree = IntervalTree()
for i in range(len(cells)):
bbox = cells[i]
x1 = bbox[0]
y1 = bbox[1]
x2 = bbox[2]
y2 = bbox[3]
y_interval_tree[y1:y2] = i
x_interval_tree[x1:x2] = i
for i in range(len(cells)):
cell = cells[i]
if i in removed_indices:
continue
x1, y1, x2, y2 = cell
y_overlapping_cells = set(
[j.data for j in y_interval_tree[y1:y2] if j.data != i])
x_overlapping_cells = set(
[j.data for j in x_interval_tree[x1:x2] if j.data != i])
overlapping_cells = x_overlapping_cells & y_overlapping_cells
overlapping_count = 0
for overlapping_cell_index in overlapping_cells:
if overlapping_cell_index in removed_indices:
continue
overlapping_cell = cells[overlapping_cell_index]
ox1, oy1, ox2, oy2 = overlapping_cell
cell_area = (y2 - y1) * (x2 - x1)
overlapping_cell_area = (oy2 - oy1) * (ox2 - ox1)
overlapping_area = max(
(min(oy2, y2) - max(oy1, y1)) * (min(ox2, x2) - max(ox1, x1)),
0)
overlapping_pct = overlapping_area / min(cell_area,
overlapping_cell_area)
if overlapping_pct >= overlapping_area_pct_threshold and overlapping_pct <= containment_area_pct_threshold:
overlapping_count = overlapping_count + 1
if overlapping_pct >= containment_area_pct_threshold:
if cell_area < overlapping_cell_area:
removed_indices.append(i)
else:
removed_indices.append(overlapping_cell_index)
removed_flag = True
if overlapping_count >= 2 and i not in removed_indices:
removed_indices.append(i)
removed_flag = True
return removed_indices, removed_flag
def recursively_remove_overlaps(cells,
overlapping_area_pct_threshold=0.25,
containment_area_pct_threshold=0.8,
removed_indices=[]):
removed_flag = True
while removed_flag == True:
removed_indices, removed_flag = remove_overlaps(
cells,
overlapping_area_pct_threshold=overlapping_area_pct_threshold,
containment_area_pct_threshold=containment_area_pct_threshold,
removed_indices=removed_indices)
return removed_indices
def remove_columnwise_unaligned_cells(cells,
containment_region_pct_threshold=0.7,
removed_indices=[]):
removed_flag = False
x_interval_tree = IntervalTree()
for i in range(len(cells)):
if i in removed_indices:
continue
bbox = cells[i]
x1 = bbox[0]
x2 = bbox[2]
x_interval_tree[x1:x2] = i
for i in range(len(cells)):
cell = cells[i]
if i in removed_indices:
continue
x1, y1, x2, y2 = cell
overlapping_cells = set(
[j.data for j in x_interval_tree[x1:x2] if j.data != i])
containment_count = 0
denominator_containment_count = 0
for overlapping_cell_index in overlapping_cells:
if overlapping_cell_index in removed_indices:
continue
overlapping_cell = cells[overlapping_cell_index]
ox1, oy1, ox2, oy2 = overlapping_cell
containment = float(min(x2, ox2) - max(x1, ox1)) / float(
min(x2 - x1, ox2 - ox1))
containment = max(0, containment)
if containment >= containment_region_pct_threshold:
containment_count = containment_count + 1
if containment >= 0.2:
denominator_containment_count = denominator_containment_count + 1
if denominator_containment_count >= 2 and containment_count < int(
0.34 *
(denominator_containment_count + 1)) and i not in removed_indices:
removed_indices.append(i)
removed_flag = True
return removed_indices, removed_flag
def remove_cells_min_height_criteria(cells,
threshold_pct=0.5,
remove_indices=[]):
heights = []
for cell in cells:
x1, y1, x2, y2 = cell
heights.append(y2 - y1)
height_threshold = int(
max(statistics.mean(heights), statistics.median(heights)) *
threshold_pct)
for i in range(len(cells)):
x1, y1, x2, y2 = cells[i]
if y2 - y1 < height_threshold:
remove_indices.append(i)
return remove_indices
def recursively_remove_columnwise_unaligned_cells(
cells, containment_region_pct_threshold=0.7, removed_indices=[]):
removed_flag = True
while removed_flag == True:
removed_indices, removed_flag = remove_columnwise_unaligned_cells(
cells,
containment_region_pct_threshold=containment_region_pct_threshold,
removed_indices=removed_indices)
return removed_indices
def remove_rowwise_unaligned_cells(cells,
containment_region_pct_threshold=0.7,
removed_indices=[]):
removed_flag = False
y_interval_tree = IntervalTree()
for i in range(len(cells)):
if i in removed_indices:
continue
bbox = cells[i]
y1 = bbox[1]
y2 = bbox[3]
y_interval_tree[y1:y2] = i
for i in range(len(cells)):
cell = cells[i]
if i in removed_indices:
continue
x1, y1, x2, y2 = cell
overlapping_cells = set(
[j.data for j in y_interval_tree[y1:y2] if j.data != i])
containment_count = 0
denominator_containment_count = 0
for overlapping_cell_index in overlapping_cells:
if overlapping_cell_index in removed_indices:
continue
overlapping_cell = cells[overlapping_cell_index]
ox1, oy1, ox2, oy2 = overlapping_cell
containment = float(min(y2, oy2) - max(y1, oy1)) / float(
min(y2 - y1, oy2 - oy1))
containment = max(0, containment)
if containment >= containment_region_pct_threshold:
containment_count = containment_count + 1
if containment >= 0.2:
denominator_containment_count = denominator_containment_count + 1
if denominator_containment_count >= 2 and containment_count < int(
0.34 *
(denominator_containment_count + 1)) and i not in removed_indices:
removed_indices.append(i)
removed_flag = True
return removed_indices, removed_flag
def recursively_remove_rowwise_unaligned_cells(
cells, containment_region_pct_threshold=0.7, removed_indices=[]):
removed_flag = True
while removed_flag == True:
removed_indices, removed_flag = remove_rowwise_unaligned_cells(
cells,
containment_region_pct_threshold=containment_region_pct_threshold,
removed_indices=removed_indices)
return removed_indices
def remove_extra_indices(indices_list):
def create_indices_dict(indices_list):
indices_dict = {}
for assignment in indices_list:
for index in assignment:
if index not in indices_dict:
indices_dict[index] = []
other_assignment_set = set(
[x for x in assignment if x != index])
indices_dict[index].append(other_assignment_set)
return indices_dict
def remove_extra(indices_list, remove_index):
indices = []
for assignment in indices_list:
new_assignment = []
for index in assignment:
if index == remove_index:
continue
elif index > remove_index:
new_assignment.append(index - 1)
else:
new_assignment.append(index)
indices.append(new_assignment)
return indices
# print(indices_list)
while True > 0:
indices_dict = create_indices_dict(indices_list)
remove_indices = []
for i in indices_dict:
redundant_indices = list(set.intersection(*indices_dict[i]))
# print(i, indices_dict[i], redundant_indices)
if len(redundant_indices) > 0:
remove_indices.append(i)
remove_indices = list(set(remove_indices))
remove_indices = sorted(remove_indices)
# print(indices_list, remove_indices)
if len(remove_indices) > 0:
indices_list = remove_extra(indices_list, remove_indices[0])
else:
break
return indices_list
def get_column_structure_indices(adj, coordinates):
def get_x_overlap_and_containment(cell1, cell2):
overlap = float(min(cell1[2], cell2[2]) -
max(cell1[0], cell2[0])) / float(
max(cell1[2] - cell1[0], cell2[2] - cell2[0]))
containment = float(min(cell1[2], cell2[2]) -
max(cell1[0], cell2[0])) / float(
min(cell1[2] - cell1[0], cell2[2] - cell2[0]))
return overlap, containment
coordinates = np.asarray(coordinates)
sorted_indices_end_x = np.argsort(coordinates.view('i8,i8,i8,i8'),
order=['f2', 'f0', 'f1'],
axis=0)[:, 0]
column_indexes = []
for i in range(len(coordinates)):
column_indexes.append(set())
cur_col_index = -1
x_interval_tree = IntervalTree()
for index in sorted_indices_end_x:
x1, y1, x2, y2 = coordinates[index]
x_interval_tree[x1:x2] = index
for i in sorted_indices_end_x:
#include cell itself in it's overlaps
x1, y1, x2, y2 = coordinates[i]
x_overlapping_cells = set([j.data for j in x_interval_tree[x1:x2]])
condition_meeting_overlapping_cells = []
for j in x_overlapping_cells:
overlap_i_j, containment_i_j = get_x_overlap_and_containment(
coordinates[i], coordinates[j])
adj_i_j = max(adj[i, j], adj[j, i])
if adj_i_j == 1 and (overlap_i_j >= 0.5 or containment_i_j >= 0.7):
condition_meeting_overlapping_cells.append(j)
elif adj_i_j == 0 and (overlap_i_j >= 0.7
or containment_i_j >= 0.8):
condition_meeting_overlapping_cells.append(j)
column_indexes_np = np.array(column_indexes)
if len(column_indexes[i]) >= 1:
continue
num_common_assigned_indices = len(
set.intersection(
*column_indexes_np[condition_meeting_overlapping_cells]))
# print(*column_indexes_np[condition_meeting_overlapping_cells])
# print(i, condition_meeting_overlapping_cells,
# len(condition_meeting_overlapping_cells),
# num_common_assigned_indices)
if num_common_assigned_indices > 0:
continue
cur_col_index += 1
# print(cur_col_index)
for j in condition_meeting_overlapping_cells:
column_indexes[j].add(cur_col_index)
column_indexes = remove_extra_indices(column_indexes)
start_column_indices = []
end_column_indices = []
skipped = []
for i in range(len(column_indexes)):
if len(column_indexes[i]) == 0:
start_column_indices.append(-1)
end_column_indices.append(-1)
skipped.append(i)
continue
start_column_indices.append(min(column_indexes[i]))
end_column_indices.append(max(column_indexes[i]))
# print(column_indexes)
print("Num columns identified: " +
str(max(max(x) for x in column_indexes if len(x) > 0) + 1))
print("Num skipped: " + str(len(skipped)))
return start_column_indices, end_column_indices
def get_row_structure_indices(adj, coordinates):
def get_y_overlap_and_containment(cell1, cell2):
overlap = float(min(cell1[3], cell2[3]) -
max(cell1[1], cell2[1])) / float(
max(cell1[3] - cell1[1], cell2[3] - cell2[1]))
containment = float(min(cell1[3], cell2[3]) -
max(cell1[1], cell2[1])) / float(
min(cell1[3] - cell1[1], cell2[3] - cell2[1]))
return overlap, containment
coordinates = np.asarray(coordinates)
sorted_indices_end_y = np.argsort(coordinates.view('i8,i8,i8,i8'),
order=['f3', 'f1', 'f0'],
axis=0)[:, 0]
row_indexes = []
for i in range(len(coordinates)):
row_indexes.append(set())
cur_row_index = -1
y_interval_tree = IntervalTree()
for index in sorted_indices_end_y:
x1, y1, x2, y2 = coordinates[index]
y_interval_tree[y1:y2] = index
for i in sorted_indices_end_y:
#include cell itself in it's overlaps
x1, y1, x2, y2 = coordinates[i]
y_overlapping_cells = set([j.data for j in y_interval_tree[y1:y2]])
condition_meeting_overlapping_cells = []
for j in y_overlapping_cells:
overlap_i_j, containment_i_j = get_y_overlap_and_containment(
coordinates[i], coordinates[j])
adj_i_j = max(adj[i, j], adj[j, i])
if adj_i_j == 1 and (overlap_i_j >= 0.5
or containment_i_j >= 0.75):
condition_meeting_overlapping_cells.append(j)
elif adj_i_j == 0 and (overlap_i_j >= 0.7
or containment_i_j >= 0.85):
condition_meeting_overlapping_cells.append(j)
row_indexes_np = np.array(row_indexes)
if len(row_indexes[i]) >= 1:
continue
num_common_assigned_indices = len(
set.intersection(
*row_indexes_np[condition_meeting_overlapping_cells]))
# print(*row_indexes_np[condition_meeting_overlapping_cells])
# print(i, condition_meeting_overlapping_cells,
# len(condition_meeting_overlapping_cells),
# num_common_assigned_indices)
if num_common_assigned_indices > 0:
continue
cur_row_index += 1
# print(cur_row_index)
for j in condition_meeting_overlapping_cells:
row_indexes[j].add(cur_row_index)
start_row_indices = []
end_row_indices = []
skipped = []
row_indexes = remove_extra_indices(row_indexes)
# print(row_indexes)
for i in range(len(row_indexes)):
if len(row_indexes[i]) == 0:
start_row_indices.append(-1)
end_row_indices.append(-1)
skipped.append(i)
continue
start_row_indices.append(min(row_indexes[i]))
end_row_indices.append(max(row_indexes[i]))
print("Num rows identified: " +
str(max(max(x) for x in row_indexes if len(x) > 0) + 1))
print("Num skipped: " + str(len(skipped)))
return start_row_indices, end_row_indices
def get_aligned_column_coordinates(cells_coordinates, start_cols, end_cols):
column_starts = {}
column_ends = {}
for i in range(len(start_cols)):
col_index = start_cols[i]
cell = cells_coordinates[i]
if col_index not in column_starts:
column_starts[col_index] = []
column_starts[col_index].append(cell[0])
for i in range(len(end_cols)):
col_index = end_cols[i]
cell = cells_coordinates[i]
if col_index not in column_ends:
column_ends[col_index] = []
column_ends[col_index].append(cell[2])
min_col_starts = {}
max_col_ends = {}
for col_index in column_starts:
min_col_starts[col_index] = int(
statistics.median(column_starts[col_index]))
for col_index in column_ends:
max_col_ends[col_index] = int(statistics.median(
column_ends[col_index]))
col_starts = {}
col_ends = {}
for i in sorted(list(min_col_starts.keys())):
if i == 0:
col_starts[i] = 0
col_ends[max(max_col_ends)] = max_col_ends[max(max_col_ends)]
continue
gap = min_col_starts[i] - max_col_ends[i - 1]
col_starts[i] = min_col_starts[i] - int(float(gap) / 2.0)
col_ends[i - 1] = max_col_ends[i - 1] + int(float(gap) / 2.0)
return col_starts, col_ends
def get_aligned_row_coordinates(cells_coordinates, start_rows, end_rows):
row_starts = {}
row_ends = {}
for i in range(len(start_rows)):
row_index = start_rows[i]
cell = cells_coordinates[i]
if row_index not in row_starts:
row_starts[row_index] = []
row_starts[row_index].append(cell[1])
for i in range(len(end_rows)):
row_index = end_rows[i]
cell = cells_coordinates[i]
if row_index not in row_ends:
row_ends[row_index] = []
row_ends[row_index].append(cell[3])
min_row_starts = {}
max_row_ends = {}
for row_index in row_starts:
min_row_starts[row_index] = min(row_starts[row_index])
for row_index in row_ends:
max_row_ends[row_index] = max(row_ends[row_index])
row_starts = {}
row_ends = {}
for i in sorted(list(min_row_starts.keys())):
if i == 0:
row_starts[i] = 0
row_ends[max(max_row_ends)] = max_row_ends[max(max_row_ends)]
continue
gap = min_row_starts[i] - max_row_ends[i - 1]
row_starts[i] = min_row_starts[i] - int(float(gap) / 2.0)
row_ends[i - 1] = max_row_ends[i - 1] + int(float(gap) / 2.0)
return row_starts, row_ends
def get_final_table_details(table_details, row_starts, col_starts, row_ends,
col_ends):
final_table_details = []
located_regions = {}
for i in range(len(table_details)):
_, start_row, start_col, end_row, end_col, x1, y1, x2, y2 = table_details[
i]
new_cell_coords = [
int(col_starts[start_col]),
int(row_starts[start_row]),
int(col_ends[end_col]),
int(row_ends[end_row])
]
num_regions_in_cell = 0
for row in range(start_row, end_row + 1):
for col in range(start_col, end_col + 1):
num_regions_in_cell += 1
if (col, row, col, row) not in located_regions:
located_regions[(col, row, col, row)] = []
located_regions[(col, row, col, row)].append(
(i, num_regions_in_cell))
regions_processed = {}
for row in row_starts:
for col in col_starts:
region = (col, row, col, row)
if region in regions_processed:
continue
if region in located_regions:
best_cell = min(located_regions[region], key=lambda x: x[1])
i = best_cell[0]
_, start_row, start_col, end_row, end_col, x1, y1, x2, y2 = table_details[
i]
for row_id in range(start_row, end_row + 1):
for col_id in range(start_col, end_col + 1):
regions_processed[(col_id, row_id, col_id,
row_id)] = True
else:
start_row = row
start_col = col
end_row = row
end_col = col
regions_processed[region] = True
new_cell_coords = [
int(col_starts[start_col]),
int(row_starts[start_row]),
int(col_ends[end_col]),
int(row_ends[end_row])
]
final_table_details.append([
False, start_row, start_col, end_row, end_col,
new_cell_coords[0], new_cell_coords[1], new_cell_coords[2],
new_cell_coords[3]
])
return final_table_details
def get_final_table_details_without_merge(row_starts, col_starts, row_ends,
col_ends):
final_table_details = []
regions_processed = {}
for row in row_starts:
for col in col_starts:
region = (col, row, col, row)
if region in regions_processed:
continue
start_row = row
start_col = col
end_row = row
end_col = col
regions_processed[region] = True
new_cell_coords = [
int(col_starts[start_col]),
int(row_starts[start_row]),
int(col_ends[end_col]),
int(row_ends[end_row])
]
final_table_details.append([
False, start_row, start_col, end_row, end_col,
new_cell_coords[0], new_cell_coords[1], new_cell_coords[2],
new_cell_coords[3]
])
return final_table_details
def add_cells_to_img(img, final_cells, skipped_indices):
for i in range(len(final_cells)):
if i not in skipped_indices:
x1 = final_cells[i][5]
y1 = final_cells[i][6]
x2 = final_cells[i][7]
y2 = final_cells[i][8]
cv2.rectangle(img, (x1 - 1, y1 - 1), (x2 - 1, y2 - 1), (255, 0, 0),
3)
return img
def add_aligned_cells_to_img(img, col_starts, row_starts, col_ends, row_ends):
for i in col_starts:
for j in row_starts:
try:
x1 = int(col_starts[i])
y1 = int(row_starts[j])
x2 = int(col_ends[i])
y2 = int(row_ends[j])
cv2.rectangle(img, (x1 - 1, y1 - 1), (x2 - 1, y2 - 1),
(255, 0, 0), 3)
except:
continue
return img
def create_root(image_path, file_prefix, width, height, depth):
root = ET.Element("prediction")
ET.SubElement(root, "folder").text = "images"
ET.SubElement(root, "filename").text = "{}".format(file_prefix)
ET.SubElement(root, "path").text = image_path + "{}".format(file_prefix)
source = ET.SubElement(root, "source")
ET.SubElement(source, "database").text = "Unknown"
size = ET.SubElement(root, "size")
ET.SubElement(size, "width").text = str(width)
ET.SubElement(size, "height").text = str(height)
ET.SubElement(size, "depth").text = str(depth)
ET.SubElement(root, "segmentated").text = "0"
return root
def create_cell_annotation(root,
table_details,
table_information,
img,
add_content=False):
obj = ET.SubElement(root, "object")
ET.SubElement(obj, "name").text = "table"
ET.SubElement(obj, "pose").text = "Unspecified"
ET.SubElement(obj, "truncated").text = str(0)
ET.SubElement(obj, "difficult").text = str(0)
bbox = ET.SubElement(obj, "bndbox")
ET.SubElement(bbox, "xmin").text = str(table_information[0])
ET.SubElement(bbox, "ymin").text = str(table_information[1])
ET.SubElement(bbox, "xmax").text = str(table_information[2])
ET.SubElement(bbox, "ymax").text = str(table_information[3])
cells = ET.SubElement(obj, "cells")
for j in range(len(table_details)):
cell_detail = table_details[j]
cell = ET.SubElement(cells, "tablecell")
ET.SubElement(cell, "dont_care").text = str(cell_detail[0])
ET.SubElement(cell, "end_col").text = str(cell_detail[4])
ET.SubElement(cell, "end_row").text = str(cell_detail[3])
ET.SubElement(cell, "start_col").text = str(cell_detail[2])
ET.SubElement(cell, "start_row").text = str(cell_detail[1])
if add_content:
cell_img = Image.fromarray(
img[max(cell_detail[6] - 10, 0):min(cell_detail[8] +
10, img.shape[0]),
max(cell_detail[5] - 10, 0):min(cell_detail[7] +
10, img.shape[1]), :])
cell_content = str(
pytesseract.image_to_string(cell_img, lang='eng')).strip()
processed_cell_content = re.sub(r'[^a-zA-Z0-9 ]', r'',
cell_content)
ET.SubElement(
cell, "processed_content").text = str(processed_cell_content)
ET.SubElement(cell, "content").text = str(cell_content)
ET.SubElement(cell, "x0").text = str(cell_detail[5])
ET.SubElement(cell, "x1").text = str(cell_detail[7])
ET.SubElement(cell, "y0").text = str(cell_detail[6])
ET.SubElement(cell, "y1").text = str(cell_detail[8])
ET.SubElement(cell, "index").text = str(j)
return root
def create_output_excel(excel_write_path,
table_details,
table_information,
img,
add_content=False):
wb = xlwt.Workbook()
sheet = wb.add_sheet("digitized_table")
rows_processed = {}
cols_processed = {}
for j in range(len(table_details)):
cell_detail = table_details[j]
end_col = cell_detail[4]
end_row = cell_detail[3]
start_col = cell_detail[2]
start_row = cell_detail[1]
already_processed = False
# for i in (start_col, end_col + 1):
# if i in cols_processed:
# already_processed = True
# for i in (start_row, end_row + 1):
# if i in rows_processed:
# already_processed = True
# if already_processed:
# continue
if add_content:
cell_img = Image.fromarray(
img[max(cell_detail[6] - 10, 0):min(cell_detail[8] +
10, img.shape[0]),
max(cell_detail[5] - 10, 0):min(cell_detail[7] +
10, img.shape[1]), :])
cell_content = str(
pytesseract.image_to_string(cell_img, lang='eng')).strip()
processed_cell_content = re.sub(r'[^a-zA-Z0-9 ]', r'',
cell_content)
try:
sheet.write(start_row, start_col, str(cell_content))
except:
pass
# if start_row != end_row or start_col != end_col:
# sheet.merge(start_row, end_row, start_col, end_col)
# for i in (start_col, end_col + 1):
# cols_processed[i] = True
# for i in (start_row, end_row + 1):
# rows_processed[i] = True
wb.save(excel_write_path)
######################################################
def main():
index = 0
for text_file in os.listdir(text_read_path):
if text_file.endswith('txt'):
removed_indices = []
file_prefix = text_file.replace(".txt", "")
try:
if os.path.exists(xml_output_path + file_prefix +
".xml") and os.path.exists(image_write_path +
file_prefix +
".jpg"):
print("File " + str(file_prefix) +
" already exists. Proceeding to next.")
else:
print("File " + str(file_prefix) +
" does not exist. Processing.")
except:
pass
# if file_prefix != '0709.2961v1.1':
# continue
img_read_path = image_read_path + file_prefix + ".jpg"
img_write_path = image_write_path + file_prefix + ".jpg"
# aligned_img_write_path = image_aligned_write_path + file_prefix + ".jpg"
excel_write_path = excel_output_path + file_prefix + ".xls"
read_text_path = text_read_path + text_file
table_cells, skipped_indices = read_text_file(read_text_path)
removed_indices = remove_cells_min_height_criteria(
table_cells, threshold_pct=0.5, remove_indices=removed_indices)
removed_indices = recursively_remove_overlaps(
table_cells,
overlapping_area_pct_threshold=0.2,
containment_area_pct_threshold=0.75,
removed_indices=removed_indices)
print("Cells removed due to overlap : " + str(removed_indices))
removed_indices = recursively_remove_columnwise_unaligned_cells(
table_cells,
containment_region_pct_threshold=0.7,
removed_indices=removed_indices)
print("Cells removed due to column-wise misalignment : " +
str(removed_indices))
removed_indices = recursively_remove_rowwise_unaligned_cells(
table_cells,
containment_region_pct_threshold=0.7,
removed_indices=removed_indices)
print("After removing cells due to row-wise misalignment : " +
str(removed_indices))
row_adj = pickle.load(
open(row_pkl_read_path + file_prefix + ".pkl", 'rb'))
col_adj = pickle.load(
open(col_pkl_read_path + file_prefix + ".pkl", 'rb'))
row_adj = np.delete(row_adj, skipped_indices, axis=0)
row_adj = np.delete(row_adj, skipped_indices, axis=1)
col_adj = np.delete(col_adj, skipped_indices, axis=0)
col_adj = np.delete(col_adj, skipped_indices, axis=1)
table_cells = np.delete(table_cells, removed_indices, axis=0)
row_adj = np.delete(row_adj, removed_indices, axis=0)
row_adj = np.delete(row_adj, removed_indices, axis=1)
col_adj = np.delete(col_adj, removed_indices, axis=0)
col_adj = np.delete(col_adj, removed_indices, axis=1)
x_starts = np.asarray(table_cells)[:, 0]
x_ends = np.asarray(table_cells)[:, 2]
print("*******")
print(str(index + 1) + ":\t" + file_prefix)
img = cv2.imread(img_read_path)
# aligned_img = cv2.imread(img_read_path)
height, width, channel = img.shape
table_information = [0, 0, width, height]
table_details = []
root = create_root(img_read_path, file_prefix, width, height,
channel)
start_col_assignments, end_col_assignments = get_column_structure_indices(
col_adj, table_cells)
start_row_assignments, end_row_assignments = get_row_structure_indices(
row_adj, table_cells)
skipped_indices = []
for i in range(len(table_cells)):
if start_row_assignments[i] == -1 or start_col_assignments[
i] == -1:
skipped_indices.append(i)
if start_row_assignments[i] > end_row_assignments[i]:
end_row_assignments[i] = start_row_assignments[i]
if start_col_assignments[i] > end_col_assignments[i]:
end_col_assignments[i] = start_col_assignments[i]
if start_row_assignments[i] == 0:
table_cells[i][1] = 0
if end_row_assignments[i] == max(end_row_assignments):
table_cells[i][3] = height
if start_col_assignments[i] == 0:
table_cells[i][0] = 0
if end_col_assignments[i] == max(end_col_assignments):
table_cells[i][2] = width
table_details.append([
False, start_row_assignments[i], start_col_assignments[i],
end_row_assignments[i], end_col_assignments[i],
table_cells[i][0], table_cells[i][1], table_cells[i][2],
table_cells[i][3]
])
# print(
# str(table_cells[i]) + "\t" +
# str(start_row_assignments[i]) + "\t" +
# str(end_row_assignments[i]) + "\t" +
# str(start_col_assignments[i]) + "\t" +
# str(end_col_assignments[i]))
col_starts, col_ends = get_aligned_column_coordinates(
table_cells, start_col_assignments, end_col_assignments)
row_starts, row_ends = get_aligned_row_coordinates(
table_cells, start_row_assignments, end_row_assignments)
if EXECUTE_MERGE:
table_details = get_final_table_details(
table_details, row_starts, col_starts, row_ends, col_ends)
else:
table_details = get_final_table_details_without_merge(
row_starts, col_starts, row_ends, col_ends)
# aligned_img = add_aligned_cells_to_img(aligned_img, col_starts,
# row_starts, col_ends,
# row_ends)
root = create_cell_annotation(root,
table_details,
table_information,
img,
add_content=ADD_CONNTENT_IN_XML)
if PREPARE_CSV:
create_output_excel(excel_write_path,
table_details,
table_information,
img,
add_content=True)
tree = ET.ElementTree(root)
xml_file_path = xml_output_path + file_prefix + ".xml"
tree.write(xml_file_path)
img = add_cells_to_img(img, table_details, skipped_indices)
cv2.cvtColor(img, cv2.COLOR_BGR2RGB)
cv2.imwrite(img_write_path, img)
# cv2.imwrite(aligned_img_write_path, aligned_img)
index += 1
print("Processed Files : " + str(index))
if __name__ == "__main__":
main()
|
py | b402dc4972c93ea71ab6a1865dc0af1ea5b13f2a | from abc import ABC, abstractmethod
class ILaser(ABC):
@abstractmethod
def get_impressora_laser(self):
pass
class ImplaserHP(ILaser):
def __init__(self):
self.marca = 'HP'
self.model = 'HP laser'
def get_impressora_laser(self):
print("Marca:", self.marca)
print("Modelo:", self.model)
print("------------------------------------------------------------")
class ImplaserEpson(ILaser):
def __init__(self):
self.marca = 'Epson'
self.model = 'Epson laser'
def get_impressora_laser(self):
print("Marca:", self.marca)
print("Modelo:", self.model)
print("------------------------------------------------------------")
class LaserFactory:
@staticmethod
def get_laser_hp(self):
return ImplaserHP().get_impressora_laser()
@staticmethod
def get_laser_epson(self):
return ImplaserEpson().get_impressora_laser()
|
py | b402df081f611bbdfd3d06adc128a5fcf234be01 | from multiprocessing.managers import BaseManager
from typing import Dict, List, Union
import numpy as np
import pandas as pd
from peartree.toolkit import nan_helper
from peartree.utilities import log
class RouteProcessorManager(BaseManager):
pass
class RouteProcessor(object):
def __init__(
self,
target_time_start: int,
target_time_end: int,
feed_trips: pd.DataFrame,
stop_times: pd.DataFrame,
all_stops: pd.DataFrame):
# Initialize common parameters
self.target_time_start = target_time_start
self.target_time_end = target_time_end
self.stop_times = stop_times.copy()
# We use route_id as the index to ensure that subselection by
# route_id from target_route_ids more performant
self.trips = feed_trips.copy().set_index('route_id', drop=False)
# Ensure that stop_ids are cast as string
astops = all_stops.copy()
astops['stop_id'] = astops['stop_id'].astype(str)
self.all_stops = astops
def generate_route_costs(self, route_id: str):
# Get all the subset of trips that are related to this route
trips = self.trips.loc[route_id].copy()
# Pandas will try and make returned result a Series if there
# is only one result - prevent this from happening
if isinstance(trips, pd.Series):
trips = trips.to_frame().T
# Get just the stop times related to this trip
st_trip_id_mask = self.stop_times.trip_id.isin(trips.trip_id)
stimes_init = self.stop_times[st_trip_id_mask].copy()
# Then subset further by just the time period that we care about
start_time_mask = (stimes_init.arrival_time >= self.target_time_start)
end_time_mask = (stimes_init.arrival_time <= self.target_time_end)
stimes = stimes_init[start_time_mask & end_time_mask]
# Report on progress if requested
a = len(stimes_init.trip_id.unique())
b = len(stimes.trip_id.unique())
log('\tReduced selected trips on route {} from {} to {}.'.format(
route_id, a, b))
trips_and_stop_times = pd.merge(trips,
stimes,
how='inner',
on='trip_id')
trips_and_stop_times = pd.merge(trips_and_stop_times,
self.all_stops.copy(),
how='inner',
on='stop_id')
sort_list = ['stop_sequence',
'arrival_time',
'departure_time']
trips_and_stop_times = trips_and_stop_times.sort_values(sort_list)
# Check direction_id column value before using
# trips_and_stop_times to generate wait and edge costs
# Note: Advantage to adding handling at route level is that peartree
# avoids tossing direction id if a specific route has all direction
# id rows filled in (while another does not, which is possible).
if 'direction_id' in trips_and_stop_times:
# If there is such column then check if it contains NaN
has_nan = trips_and_stop_times['direction_id'].isnull()
if len(trips_and_stop_times[has_nan]) > 0:
# If it has no full coverage in direction_id, drop the column
trips_and_stop_times.drop('direction_id', axis=1, inplace=True)
wait_times = generate_wait_times(trips_and_stop_times)
# Look up wait time for each stop in wait_times for each direction
wait_zero = trips_and_stop_times['stop_id'].apply(lambda x: wait_times[0][x])
trips_and_stop_times['wait_dir_0'] = wait_zero
wait_one = trips_and_stop_times['stop_id'].apply(lambda x: wait_times[1][x])
trips_and_stop_times['wait_dir_1'] = wait_one
tst_sub = trips_and_stop_times[['stop_id',
'wait_dir_0',
'wait_dir_1']]
# Get all edge costs for this route and add to the running total
edge_costs = generate_all_observed_edge_costs(trips_and_stop_times)
return (tst_sub, edge_costs)
def calculate_average_wait(direction_times: pd.DataFrame) -> float:
# Exit early if we do not have enough values to calculate a mean
at = direction_times.arrival_time
if len(at) < 2:
return np.nan
first = at[1:].values
second = at[:-1].values
wait_seconds = (first - second)
# TODO: Can implement something more substantial here that takes into
# account divergent/erratic performance or intentional timing
# clusters that are not evenly dispersed
na = np.array(wait_seconds)
average_wait = na.mean() / 2 # half headway
return average_wait
def generate_wait_times(trips_and_stop_times: pd.DataFrame
) -> Dict[int, List[float]]:
wait_times = {0: {}, 1: {}}
for stop_id in trips_and_stop_times.stop_id.unique():
# Handle both inbound and outbound directions
for direction in [0, 1]:
# Check if direction_id exists in source data
if 'direction_id' in trips_and_stop_times:
constraint_1 = (trips_and_stop_times.direction_id == direction)
constraint_2 = (trips_and_stop_times.stop_id == stop_id)
both_constraints = (constraint_1 & constraint_2)
direction_subset = trips_and_stop_times[both_constraints]
else:
direction_subset = trips_and_stop_times.copy()
# Only run if each direction is contained
# in the same trip id
if direction_subset.empty:
average_wait = np.nan
else:
average_wait = calculate_average_wait(direction_subset)
# Add according to which direction we are working with
wait_times[direction][stop_id] = average_wait
return wait_times
def generate_all_observed_edge_costs(trips_and_stop_times: pd.DataFrame
) -> Union[None, pd.DataFrame]:
# TODO: This edge case should be handled up stream. If there is
# no direction id upstream, when the trip and stop times
# dataframe is created, then it should be added there and all
# directions should be set to default 0 or 1.
# Make sure that the GTFS feed has a direction id
has_dir_col = 'direction_id' in trips_and_stop_times.columns.values
all_edge_costs = []
all_from_stop_ids = []
all_to_stop_ids = []
for trip_id in trips_and_stop_times.trip_id.unique():
tst_mask = (trips_and_stop_times.trip_id == trip_id)
tst_sub = trips_and_stop_times[tst_mask]
# Just in case both directions are under the same trip id
for direction in [0, 1]:
# Support situations wheredirection_id is absent from the
# GTFS data. In such situations, include all trip and stop
# time data, instead of trying to split on that column
# (since it would not exist).
if has_dir_col:
dir_mask = (tst_sub.direction_id == direction)
tst_sub_dir = tst_sub[dir_mask]
else:
tst_sub_dir = tst_sub.copy()
tst_sub_dir = tst_sub_dir.sort_values('stop_sequence')
deps = tst_sub_dir.departure_time[:-1]
arrs = tst_sub_dir.arrival_time[1:]
# Use .values to strip existing indices
edge_costs = np.subtract(arrs.values, deps.values)
# TODO(kuanb): Negative values can result here!
# HACK: There are times when the arrival and departure data
# are "out of order" which results in negative values.
# From the values I've looked at, these are edge cases
# that have to do with start/end overlaps. I don't have
# a good answer for dealing with these but, since they
# are possible noise, they can be override by taking
# their absolute value.
edge_costs = np.absolute(edge_costs)
# Add each resulting list to the running array totals
all_edge_costs += list(edge_costs)
fr_ids = tst_sub_dir.stop_id[:-1].values
all_from_stop_ids += list(fr_ids)
to_ids = tst_sub_dir.stop_id[1:].values
all_to_stop_ids += list(to_ids)
# Only return a dataframe if there is contents to populate
# it with
if len(all_edge_costs) > 0:
# Now place results in data frame
return pd.DataFrame({
'edge_cost': all_edge_costs,
'from_stop_id': all_from_stop_ids,
'to_stop_id': all_to_stop_ids})
# Otherwise a None value should be returned
else:
return None
def make_route_processor_manager():
manager = RouteProcessorManager()
manager.start()
return manager
class NonUniqueSequenceSet(Exception):
pass
class TripTimesInterpolatorManager(BaseManager):
pass
class TripTimesInterpolator(object):
def __init__(
self,
stop_times_original_df: pd.DataFrame):
# Initialize common parameters
stop_times = stop_times_original_df.copy()
# Set index on trip id so we can quicly subset the dataframe
# during iteration of generate_infilled_times
stop_times = stop_times.set_index('trip_id')
# Also avoid having these be object column types
for col in ['arrival_time', 'departure_time']:
stop_times[col] = stop_times[col].astype(float)
# Now we can set to self
self.stop_times = stop_times
def generate_infilled_times(self, trip_id: str):
# Get all the subset of trips that are related to this route
sub_df = self.stop_times.loc[trip_id].copy()
# Pandas will try and make returned result a Series if there
# is only one result - prevent this from happening
if isinstance(sub_df, pd.Series):
sub_df = sub_df.to_frame().T
# We again want to make sure these columns are
# typed right and the pivot itself will just leave
# them as object type columns, which will cause errors
# when we check the row for NaN values later on
for col in ['arrival_time', 'departure_time']:
sub_df[col] = sub_df[col].astype(float)
# TODO: Should we be able to assume that this column is
# present by the time we arrive here? If so, we should
# be able to move this check upstream, earlier in tool
# Note: Make sure that there is a set of stop sequence
# numbers present in each of the trip_id sub-dataframes
if 'stop_sequence' not in sub_df.columns:
sub_df['stop_sequence'] = range(len(sub_df))
uniq_sequence_ids = sub_df.stop_sequence.unique()
if not len(uniq_sequence_ids) == len(sub_df):
raise NonUniqueSequenceSet(
'Expected there to be a unique set of '
'stop ids for each trip_id in stop_times.')
# Next, make sure that the subset dataframe is sorted
# stop sequence, incrementing upward
sub_df = sub_df.sort_values(by=['stop_sequence'])
# Extract the arrival and departure times as independent arrays
for col in ['arrival_time', 'departure_time']:
sub_df[col] = apply_interpolation(sub_df[col])
# Re-add the trip_id as column at this point
sub_df['trip_id'] = trip_id
# Also, we dump any index set on this subset to avoid issues
# when returned later
sub_df = sub_df.reset_index(drop=True)
# Now free to release/return
return sub_df
def apply_interpolation(orig_array: List) -> List:
target_col_array = orig_array.copy()
nans, x = nan_helper(target_col_array)
target_col_array[nans] = np.interp(x(nans),
x(~nans),
target_col_array[~nans])
return target_col_array
def make_trip_time_interpolator_manager():
manager = TripTimesInterpolatorManager()
manager.start()
return manager
TripTimesInterpolatorManager.register(
'TripTimesInterpolator', TripTimesInterpolator)
RouteProcessorManager.register('RouteProcessor', RouteProcessor)
|
py | b402dffcc79f7fb33ac845102fb2492128171929 | # encoding: utf-8
# module apt_pkg
# from /usr/lib/python3/dist-packages/apt_pkg.cpython-35m-x86_64-linux-gnu.so
# by generator 1.145
"""
Classes and functions wrapping the apt-pkg library.
The apt_pkg module provides several classes and functions for accessing
the functionality provided by the apt-pkg library. Typical uses might
include reading APT index files and configuration files and installing
or removing packages.
"""
# no imports
from .object import object
class Cache(object):
"""
Cache([progress]) -> Cache() object.
The APT cache file contains a hash table mapping names of binary
packages to their metadata. A Cache object is the in-core
representation of the same. It provides access to APT’s idea of the
list of available packages.
The optional parameter *progress* can be used to specify an
apt.progress.base.OpProgress() object (or similar) which reports
progress information while the cache is being opened. If this
parameter is not supplied, the progress will be reported in simple,
human-readable text to standard output. If it is None, no output
will be made.
The cache can be used like a mapping from package names to Package
objects (although only getting items is supported). Instead of a name,
a tuple of a name and an architecture may be used.
"""
def update(self, progress, sources, pulse_interval): # real signature unknown; restored from __doc__
"""
update(progress, sources: SourceList, pulse_interval: int) -> bool
Update the index files used by the cache. A call to this method
does not affect the current Cache object; instead, a new one
should be created in order to use the changed index files.
The parameter 'progress' can be used to specify an
apt.progress.base.AcquireProgress() object , which will report
progress information while the index files are being fetched.
The parameter 'sources', if provided, is an apt_pkg.SourcesList
object listing the remote repositories to be used.
The 'pulse_interval' parameter indicates how long (in microseconds)
to wait between calls to the pulse() method of the 'progress' object.
The default is 500000 microseconds.
"""
return False
def __contains__(self, *args, **kwargs): # real signature unknown
""" Return key in self. """
pass
def __getitem__(self, *args, **kwargs): # real signature unknown
""" Return self[key]. """
pass
def __init__(self, progress=None): # real signature unknown; restored from __doc__
pass
def __len__(self, *args, **kwargs): # real signature unknown
""" Return len(self). """
pass
@staticmethod # known case of __new__
def __new__(*args, **kwargs): # real signature unknown
""" Create and return a new object. See help(type) for accurate signature. """
pass
depends_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of apt_pkg.Dependency objects stored in the cache."""
file_list = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""A list of apt_pkg.PackageFile objects stored in the cache."""
groups = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""A list of Group objects in the cache"""
group_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of apt_pkg.Group objects stored in the cache."""
is_multi_arch = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Whether the cache supports multi-arch."""
packages = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""A list of apt_pkg.Package objects stored in the cache."""
package_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of apt_pkg.Package objects stored in the cache."""
package_file_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of apt_pkg.PackageFile objects stored in the cache."""
policy = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The PkgPolicy for the cache"""
provides_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""Number of Provides relations described in the cache."""
version_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of apt_pkg.Version objects stored in the cache."""
ver_file_count = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
"""The number of (Version, PackageFile) relations."""
|
py | b402e00d0828add04e155d5a1411ca5a11d4938b | import setuptools
setuptools.setup(
name="rasbeery",
version="0.0.0.1",
author="Ola Skavhaug",
author_email="[email protected]",
packages=setuptools.find_packages(),
entry_points={
"console_scripts": [
"mash_example=rasbeery.mash:mash_example"
]
}
) |
py | b402e08960863b912c7af2ad08599907eda06da3 | # Copyright (c) 2020 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from paddlenlp.embeddings import TokenEmbedding
from paddlehub.module.module import moduleinfo
from paddlehub.module.nlp_module import EmbeddingModule
@moduleinfo(
name="w2v_wiki_target_word-word_dim300",
version="1.0.1",
summary="",
author="paddlepaddle",
author_email="",
type="nlp/semantic_model",
meta=EmbeddingModule)
class Embedding(TokenEmbedding):
"""
Embedding model
"""
embedding_name = "w2v.wiki.target.word-word.dim300"
def __init__(self, *args, **kwargs):
super(Embedding, self).__init__(embedding_name=self.embedding_name, *args, **kwargs)
|
py | b402e1c7cb2ae0024cda1687ceb5f0418e134cdf | from .ensemble_classifier_chains import EnsembleClassifierChain
|
py | b402e24bd9b51d1289f4bae36a28542f5c66c8b0 | #!/usr/bin/env python
from setuptools import setup
from setuptools import find_packages
__version__ = '0.0.1'
__author__ = "Jordie Shier"
__contact__ = "[email protected]"
__url__ = ""
__license__ = "MIT"
with open("README.md", "r", encoding='utf-8') as f:
readme = f.read()
setup(
name='uvic-music-extractor',
version=__version__,
author=__author__,
author_email=__contact__,
description='Audio feature extractors for research on musical audio conducted at the University of Victoria',
long_description=readme,
long_description_content_type='text/markdown',
url=__url__,
licence=__license__,
packages=find_packages('src'),
package_dir={'': 'src'},
package_data={
'': [],
},
scripts=[
'scripts/uvic_music_extractor',
],
python_requires='>=3.6',
install_requires=[
'numpy',
'scipy',
'six',
'tqdm'
],
extras_require={
'dev': [
],
}
)
|
py | b402e33e4159725644dc4af4d90fb4053ccd541b | from typing import Union, List, Dict, Any, Optional
from app.utils import get_int, get_float
class BusVehicle:
def __init__(
self,
car_number: str,
index: int,
route_id: int,
station_id: str,
is_last: Optional[bool] = False,
is_full: Optional[bool] = None,
is_arrive: Optional[bool] = None,
vehicle_type: int = 0,
seat: Optional[int] = None,
congestion: Optional[int] = None,
section_distance: Optional[float] = None
):
self.car_number = car_number
self.index = index
self.route_id = route_id
self.station_id = station_id
self.is_last = is_last
self.is_full = is_full
self.is_arrival = is_arrive
self.type = vehicle_type
self.seat = seat
self.congestion = congestion
self.section_distance = section_distance
@staticmethod
def convert_seat(people: int) -> Optional[int]:
if people == -1:
return None
return people
@staticmethod
def convert_incheon(data: int) -> Optional[int]:
if data == 255:
return None
return data
@classmethod
def from_seoul(cls, payload: Dict[str, Any]):
print(payload)
congestion = get_int(payload.get('congetion'))
if congestion > 0:
congestion -= 2
section_distance = get_float(payload['sectDist'])
full_section_distance = get_float(payload['fullSectDist'])
return cls(
car_number=payload.get('plainNo'),
index=int(payload['sectOrd']),
route_id=payload['sectionId'],
station_id=payload['lastStnId'],
is_last=bool(int(payload.get('islastyn', 0))),
is_full=bool(int(payload.get('isFullFlag', False))),
is_arrive=bool(int(payload.get('stopFlag', False))),
vehicle_type=int(payload['busType']),
congestion=congestion,
section_distance=round(section_distance / full_section_distance * 100, 3),
)
@classmethod
def from_gyeonggi(cls, payload: Dict[str, Any]):
seat = cls.convert_seat(payload.get("remainSeatCnt", -1))
return cls(
car_number=payload.get('plateNo'),
index=int(payload['stationSeq']),
route_id=payload['routeId'],
station_id=payload['stationId'],
is_last=bool(int(payload.get('endBus', 0))),
is_full=True if seat == 0 else False,
seat=seat,
vehicle_type=int(payload['lowPlate'])
)
@classmethod
def from_incheon(cls, payload: Dict[str, Any]):
seat = cls.convert_incheon(int(payload.get("REMAIND_SEAT", 255)))
congestion = cls.convert_incheon(int(payload.get("CONGESTION", 255)))
return cls(
car_number=payload.get('BUS_NUM_PLATE'),
index=int(payload['LATEST_STOPSEQ']),
route_id=payload['ROUTEID'],
station_id=payload['LATEST_STOP_ID'],
is_last=bool(int(payload.get('LASTBUSYN', 0))),
is_full=True if seat == 0 else False,
seat=seat,
congestion=congestion,
vehicle_type=int(payload['LOW_TP_CD'])
)
def to_dict(self) -> Dict[str, Any]:
return {
"congestion": self.congestion,
"carNumber": self.car_number,
"lowBus": bool(self.type),
"index": self.index,
"station": self.station_id,
"routeId": self.route_id,
"seat": self.seat,
"isLast": self.is_last,
"isFull": self.is_full,
"isArrival": self.is_arrival,
"sectionDistance": self.section_distance
}
|
py | b402e439a073b33a9a53da14a12637d764eeda1f | # coding: utf-8
from django.db import models
import time
import datetime
MAX_CONTENT_LENGTH = 10240
PER_PAGE = 200
class EntryModel(models.Model):
feed_id = models.IntegerField()
url = models.URLField(max_length=256)
title = models.CharField(max_length=64)
updated = models.IntegerField()
content = models.TextField()
@property
def dict(self):
d = {
"id": self.id,
"url": self.url.encode("utf-8"),
"feed_id": self.feed_id,
"title": self.title.encode("utf-8"),
"updated": self.updated_stftime,
"content": self.content.encode("utf-8"),
}
return d
@property
def updated_stftime(self):
datetime_obj = datetime.datetime.fromtimestamp(self.updated)
return datetime_obj.strftime('%Y-%m-%d %H:%M')
@staticmethod
def count(feed_id, min_updated=0):
feed_id = int(feed_id)
count = EntryModel.objects.all().filter(
feed_id=feed_id
).filter(
updated__gt=min_updated
).order_by("-updated").count()
return count
@staticmethod
def get_entries(feed_id, page, min_updated=None):
"""reading"""
feed_id = int(feed_id)
page = int(page)
start_index = (page - 1) * PER_PAGE
end_index = (page) * PER_PAGE
try:
query = EntryModel.objects.all().filter(
feed_id=feed_id
)
if min_updated:
query = query.filter(
updated__gt=min_updated
)
entries = query.order_by("-updated")[start_index:end_index]
except EntryModel.DoesNotExist:
entries = []
return entries
@staticmethod
def get_timeline(feed_id, page):
feed_id = int(feed_id)
page = int(page)
start_index = (page - 1) * PER_PAGE
end_index = (page) * PER_PAGE
try:
query = EntryModel.objects.all()
if feed_id:
query = query.filter(
feed_id=feed_id
)
entries = query.order_by("-updated")[start_index:end_index]
except EntryModel.DoesNotExist:
entries = []
return entries
@staticmethod
def get_folder(feed_ids, page):
page = int(page)
start_index = (page - 1) * PER_PAGE
end_index = (page) * PER_PAGE
try:
query = EntryModel.objects.filter(
feed_id__in=feed_ids
)
entries = query.order_by("-updated")[start_index:end_index]
except EntryModel.DoesNotExist:
entries = []
return entries
@staticmethod
def get_content(entry):
if ("content" in entry and entry.content and
len(entry.content) >= 1 and len(entry.content[0]["value"]) <= MAX_CONTENT_LENGTH):
return entry.content[0]["value"]
elif "summary" in entry and entry.summary:
return entry.summary if len(entry.summary) <= MAX_CONTENT_LENGTH else ""
else:
return ""
@staticmethod
def add(feed_id, entry):
entry_model = EntryModel(
feed_id=feed_id,
url=entry.link,
title=entry.title,
updated=int(time.mktime(entry.updated_parsed)),
content=EntryModel.get_content(entry)
)
entry_model.save()
return entry_model
class Meta:
app_label = 'worker'
index_together = (
("feed_id", "updated"),
)
unique_together = (
("url", "updated", "feed_id"),
)
|
py | b402e48c544ac185f3620a58cc6a303061c40044 | """Helper functions for LogViewer
"""
## import os
## import glob
import pathlib
import datetime
import sqlite3
from contextlib import closing
try:
from http.client import responses
except ImportError:
from httplib import responses
LOGROOT = '/var/log/nginx'
DATABASE = '/tmp/loglines_{}.db'
# extra Nginx status codes
responses.update({444: 'No Response From Server',
494: 'Request Header Too Large',
495: 'SSL Certificate Error',
496: 'SSL Certificate Required',
497: 'HTTP Request Sent to HTTPS Port',
499: 'Client Closed Request'})
def listlogs():
"""bouw een lijst op van logfiles, meest recent aangepaste het eerst
"""
lijst = []
## for item in glob.glob(os.path.join(LOGROOT, '*.log')):
for item in (x for x in pathlib.Path(LOGROOT).iterdir() if x.suffix == '.log'):
## lijst.append((os.path.getctime(item), os.path.basename(item)))
lijst.append((item.stat().st_ctime, item.name))
lijst.sort()
lijst.reverse()
return [x[1] for x in lijst]
def connect_db(timestr):
"""get a connection to the database
"""
return sqlite3.connect(DATABASE.format(timestr))
def init_db(timestr):
"""initialiseer de tabel met sessieparameters
"""
with closing(connect_db(timestr)) as db:
cur = db.cursor()
cur.execute('DROP TABLE IF EXISTS parms;')
## db.commit()
cur.execute('CREATE TABLE parms (id INTEGER PRIMARY KEY, '
'logfile STRING NOT NULL, entries INTEGER NOT NULL, '
'current INTEGER NOT NULL, total INTEGER NOT NULL, '
'ordering STRING NOT NULL, mld STRING NOT NULL);')
db.commit()
cur.execute('INSERT INTO parms VALUES (?, ?, ?, ?, ?, ?, ?)', (1, '', 10,
0, 0, 'desc', ''))
db.commit()
def startswith_date(line):
"""return True if logline starts with a valid date
standard log lines start with yyyy/mm/dd followed by a space
cherrypy log lines start with [dd/mmm/yyyy followed by a colon
"""
if line.startswith('['):
test = line[1:].split(':', 1)
dateformat = '%d/%b/%Y'
else:
test = line.split(None, 1)
dateformat = '%Y/%m/%d'
if not len(test) == 2:
return False
try:
date = datetime.datetime.strptime(test[0], dateformat)
except ValueError:
return False
return True
def rereadlog(logfile, entries, order, timestr):
"""read the designated logfile and store in temporary database
"""
old_logfile, old_entries, old_order = logfile, entries, order
with closing(connect_db(timestr)) as db:
cur = db.cursor()
try:
data = cur.execute('SELECT logfile, entries, ordering FROM parms '
'where id == 1')
except sqlite3.OperationalError:
init_db(timestr)
else:
for row in data:
old_logfile, old_entries, old_order = row
break
if logfile == old_logfile and entries == old_entries and order == old_order:
return
with closing(connect_db(timestr)) as db:
cur = db.cursor()
cur.execute('UPDATE parms SET logfile = ?, entries = ? , ordering = ? '
'WHERE id == 1', (logfile, entries, order))
db.commit()
## fnaam = os.path.join(LOGROOT, logfile)
## with open(fnaam) as _in:
with (pathlib.Path(LOGROOT) / logfile).open() as _in:
data = _in.readlines()
if not data:
try:
with (pathlib.Path(LOGROOT) / (logfile + '.1')).open() as _in:
data = _in.readlines()
except FileNotFoundError: # no prior log generation found
pass
if 'error' in logfile:
# kijken of er tracebacks tussen zitten en die samenvoegen tot één regel
newdata = []
traceback_data = []
for line in data:
if startswith_date(line):
if traceback_data:
newdata.append(''.join(traceback_data))
traceback_data = []
newdata.append(line)
else:
traceback_data.append(line)
if traceback_data:
newdata.append(''.join(traceback_data))
data = newdata
total = len(data)
with closing(connect_db(timestr)) as db:
cur = db.cursor()
parms = cur.execute('SELECT ordering FROM parms where id == 1')
for row in parms:
order = row[0]
break
cur.execute('DROP TABLE IF EXISTS log;')
cur.execute('CREATE TABLE log (id INTEGER PRIMARY KEY, '
'line varchar(1000) NOT NULL);')
db.commit()
if order == 'desc':
data.reverse()
for ix, line in enumerate(data):
cur.execute('INSERT INTO log VALUES (?, ?)', (ix + 1, line))
db.commit()
check = cur.execute('SELECT COUNT(*) FROM log;')
for item in check:
check = item[0]
break
if check != total:
raise ValueError('Waarom dit verschil tussen {} and {}?'.format(total,
check))
else:
cur.execute('UPDATE parms SET total = ? WHERE id == 1', (total,))
db.commit()
def get_data(timestr, position='first'):
"""get a batch of lines from the collection of log lines
"""
outdict = {'loglist': listlogs(),
'logfile': '',
'order': '',
'errorlog': False,
'numentries': ('5', '10', '15', '20', '25', '30'),
'entries': '',
'mld': '',
'logdata': [],
'timestr': timestr}
with closing(connect_db(timestr)) as db:
cur = db.cursor()
try:
data = cur.execute('SELECT logfile, entries, current, total, ordering, '
'mld FROM parms where id == 1')
except sqlite3.OperationalError:
init_db(timestr)
outdict['mld'] = 'No data available, try refreshing the display'
else:
for row in data:
logfile, entries, current, total, order, mld = row
break
is_errorlog = True if 'error' in logfile else False
outdict['logfile'] = logfile
outdict['order'] = order
outdict['errorlog'] = is_errorlog
outdict['entries'] = str(entries)
outdict['mld'] = mld
if position == 'first':
current = 1
elif position == 'prev':
newtop = current - entries
if newtop > 0:
current = newtop
else:
outdict['mld'] = 'Geen vorige pagina'
elif position == 'next':
newtop = current + entries
if newtop <= total:
current = newtop
else:
outdict['mld'] = 'Geen volgende pagina'
elif position == 'last':
current = int(total / entries) * entries + 1
cur.execute('UPDATE parms SET current = ? WHERE id == 1', (current,))
db.commit()
if logfile:
lines = cur.execute('SELECT line FROM log WHERE id BETWEEN {} '
'and {}'.format(current, current + entries - 1))
for line in lines:
if is_errorlog:
parts = showerror(line[0])
else:
parts = showaccess(line[0])
outdict['logdata'].append(parts)
start = len(outdict['logdata'])
for i in range(start, entries):
outdict['logdata'].append({'client': '', 'date': '', 'data': ''})
return outdict
def showerror(text):
"""format a line from an error log
"""
errortypes = ('[notice]', '[error]', '[crit]')
client, date, data = '', '', ''
for item in errortypes:
if item in text:
date, data = text.split(item)
# add error type back to message
data = item + data
break
if not date:
# regular cherrypy error log lines start with the date between square brackets
test = text.split('] ', 1)
if len(test) == 2 and test[0].startswith('['):
date, text = test
date = date[1:]
data = text
if ', client' in data:
data, client = data.split(', client')
client = 'client' + client
parts = {"client": client, "date": date, "data": data}
return parts
def showaccess(text):
"""format a line from an access log
"""
parts = {'client': '', 'date': '', 'data': ''}
parsed = text.split(' -', 2) # client, date, data
parts['client'] = parsed[0]
if len(parsed) < 2:
return parts
parsed = parsed[-1].split(' [', 1) # strip off opening bracket for date
if len(parsed) < 2:
return parts
parsed = parsed[1].split('] "', 1) # date, data
parts['date'] = parsed[0]
if len(parsed) < 2:
return parts
parsed = parsed[1].split('" ', 1)
if len(parsed) < 2:
return parts
command = parsed[0] # verb address protocol = command.split()
parsed = parsed[1].split(' ', 1)
try:
text = responses[int(parsed[0])]
except KeyError:
text = 'unknown status'
parts['data'] = '{} {}: {}'.format(parsed[0], text, command)
return parts
|
py | b402e4eea71bcf003ecc90b206605af85073b0ba | #!/usr/bin/env python
# -*- coding: utf-8
# Copyright 2017-2019 The FIAAS Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import itertools
import os
import re
import subprocess
import uuid
import pytest
from xdist.scheduler import LoadScopeScheduling
DOCKER_FOR_E2E_OPTION = "--use-docker-for-e2e"
pytest_plugins = ['helpers_namespace']
@pytest.fixture(autouse=True)
def prometheus_registry():
from prometheus_client.core import REGISTRY
yield REGISTRY
for c in REGISTRY._collector_to_names.keys():
REGISTRY.unregister(c)
@pytest.helpers.register
def assert_any_call(mockk, first, *args, **kwargs):
__tracebackhide__ = True
def _assertion():
mockk.assert_any_call(first, *args, **kwargs)
_add_useful_error_message(_assertion, mockk, first, args)
@pytest.helpers.register
def assert_no_calls(mockk, uri=None):
__tracebackhide__ = True
def _assertion():
calls = [call[0] for call in mockk.call_args_list if (uri is None or call[0][0] == uri)]
assert len(calls) == 0
_add_useful_error_message(_assertion, mockk, None, None)
@pytest.helpers.register
def assert_dicts(actual, expected):
__tracebackhide__ = True
try:
assert actual == expected
except AssertionError as ae:
raise AssertionError(ae.message + _add_argument_diff(actual, expected))
def _add_useful_error_message(assertion, mockk, first, args):
"""
If an AssertionError is raised in the assert, find any other calls on mock where the first parameter is uri and
append those calls to the AssertionErrors message to more easily find the cause of the test failure.
"""
__tracebackhide__ = True
try:
assertion()
except AssertionError as ae:
other_calls = [call[0] for call in mockk.call_args_list if (first is None or call[0][0] == first)]
if other_calls:
extra_info = '\n\nURI {} got the following other calls:\n{}\n'.format(first, '\n'.join(
_format_call(call) for call in other_calls))
if len(other_calls) == 1 and len(other_calls[0]) == 2 and args is not None:
extra_info += _add_argument_diff(other_calls[0][1], args[0])
raise AssertionError(ae.message + extra_info)
else:
raise
def _add_argument_diff(actual, expected, indent=0, acc=None):
first = False
if not acc:
acc = ["Actual vs Expected"]
first = True
if type(actual) != type(expected):
acc.append("{}{!r} {} {!r}".format(" " * indent * 2, actual, "==" if actual == expected else "!=", expected))
elif isinstance(actual, dict):
for k in set(actual.keys() + expected.keys()):
acc.append("{}{}:".format(" " * indent * 2, k))
a = actual.get(k)
e = expected.get(k)
if a != e:
_add_argument_diff(a, e, indent + 1, acc)
elif isinstance(actual, list):
for a, e in itertools.izip_longest(actual, expected):
acc.append("{}-".format(" " * indent * 2))
if a != e:
_add_argument_diff(a, e, indent + 1, acc)
else:
acc.append("{}{!r} {} {!r}".format(" " * indent * 2, actual, "==" if actual == expected else "!=", expected))
if first:
return "\n".join(acc)
def _format_call(call):
if len(call) > 1:
return 'call({}, {})'.format(call[0], call[1])
else:
return 'call({})'.format(call[0])
class FixtureScheduling(LoadScopeScheduling):
def __init__(self, config, log=None):
LoadScopeScheduling.__init__(self, config, log)
self._assigned_scope = {}
def _split_scope(self, nodeid):
if nodeid in self._assigned_scope:
return self._assigned_scope[nodeid]
m = re.search(r".*\[(.*)\].*", nodeid)
if not m:
scope = LoadScopeScheduling._split_scope(self, nodeid)
else:
fixture_values = m.group(1).split("-")
if "test_e2e" in nodeid:
scope = "-".join(fixture_values[:2])
else:
scope = self._select_scope(fixture_values)
self._assigned_scope[nodeid] = scope
return scope
def _select_scope(self, fixture_values):
groups = itertools.izip_longest(fillvalue="", *([iter(fixture_values)] * 3))
return "-".join(next(groups))
@pytest.mark.tryfirst
def pytest_xdist_make_scheduler(config, log):
return FixtureScheduling(config, log)
def pytest_addoption(parser):
parser.addoption(DOCKER_FOR_E2E_OPTION, action="store_true",
help="Run FDD using the latest docker container when executing E2E tests")
@pytest.fixture(scope="session")
def use_docker_for_e2e(request):
def dockerize(test_request, cert_path, service_type, k8s_version, port, apiserver_ip):
container_name = "fdd_{}_{}_{}".format(service_type, k8s_version, str(uuid.uuid4()))
test_request.addfinalizer(lambda: subprocess.call(["docker", "stop", container_name]))
args = [
"docker", "run",
"-i", "--rm",
"-e", "NAMESPACE",
"--name", container_name,
"--publish", "{port}:{port}".format(port=port),
"--mount", "type=bind,src={},dst={},ro".format(cert_path, cert_path),
# make `kubernetes` resolve to the apiserver's IP to make it possible to validate its TLS cert
"--add-host", "kubernetes:{}".format(apiserver_ip),
]
if not _is_macos():
# Linux needs host networking to make the fiaas-deploy-daemon port available on localhost when running it
# in a container. To do the same thing on Docker for mac it is enough to use --publish, and enabling host
# networking will make it impossible to connect to the port.
args += ["--network", "host"]
return args + ["fiaas/fiaas-deploy-daemon:latest"]
if request.config.getoption(DOCKER_FOR_E2E_OPTION):
return dockerize
else:
return lambda *args, **kwargs: []
def _is_macos():
return os.uname()[0] == 'Darwin'
|
py | b402e5fe4d28450b930bd53140727b19e0f2209c | import datetime
from datetime import date
from datetime import datetime, timedelta
import time
from time import strftime
# django settings for script
from django.conf import settings
# from djequis.core.utils import sendmail
# from djzbar.utils.informix import do_sql
# from djzbar.utils.informix import get_engine
from djimix.core.utils import get_connection, xsql
# Imports for additional modules and functions written as part of this project
from djlabour.core.utilities import fn_validate_field, fn_convert_date, \
fn_calculate_age, fn_write_error, fn_write_log
DEBUG = settings.INFORMIX_DEBUG
# set up command-line options
desc = """
Upload ADP data to CX
"""
# write out the .sql file
scr = open("apdtocx_output.sql", "a")
def fn_format_race(race):
''' AM American Indian/Alaskan
AS Asian
BL Black/African American
HI Hispanic of any race
IS Native Hawaiian/Othr Isl
MU Two or more races
NO Non resident Alien
UN Race/Ethnicity Unknown
WH White
Blank
AP Native Asian/Pacific Isl'''
if race == "White (United States of America)":
return "WH"
elif race == "Asian (United States of America)":
return "AS"
elif race == "Black or African American (United States of America)":
return "BL"
elif race == "Two or More Races (United States of America)":
return "MU"
elif race == "American Indian or Alaska Native (United States of America)":
return "AM"
elif race == "Native Hawaiian or Other Pacific Islander" \
" (United States of America)":
return "IS"
else:
return ""
def fn_process_profile_rec(id, ethnicity, sex, race, birth_date,
prof_last_upd_date, EARL):
# engine = get_engine(EARL)
try:
##########################################################
# Find out if record exists to determine update vs insert
##########################################################
prof_rslt = fn_validate_field(id, "id", "id",
"profile_rec", "integer", EARL)
print("Prof Result = " + str(prof_rslt))
# create race dictionary
v_race = fn_format_race(race)
# create ethnicity dictionary
if ethnicity is None:
is_hispanic = 'N'
# elif ethnicity == '':
# is_hispanic = 'N'
else:
is_hispanic = ethnicity
# print(is_hispanic)
if birth_date is None or birth_date.strip() == "" or len(birth_date) == 0:
b_date = None
print ("Empty Birthdate")
age = None
else:
age = fn_calculate_age(birth_date)
b_date = birth_date
# print("Age = " + str(age))
if prof_rslt is None or prof_rslt == 0:
# Insert or update as needed
q_insert_prof_rec = '''
INSERT INTO profile_rec (id, sex, race, hispanic, birth_date,
age, prof_last_upd_date)
VALUES (?, ?, ?, ?, ?, ?, ?) '''
q_ins_prof_args=(id, sex, v_race, is_hispanic, b_date, age,
prof_last_upd_date)
# print(q_insert_prof_rec)
# print(q_ins_prof_args)
# engine.execute(q_insert_prof_rec, q_ins_prof_args)
# fn_write_log("Inserted into profile_rec table values " + str(id)
# + ", " + v_race + ", " + str(is_hispanic));
# print("Inserted into profile_rec table values " + str(id) + ","
# + v_race + ", " + str(is_hispanic))
scr.write(q_insert_prof_rec + '\n' + str(q_ins_prof_args) + '\n')
else:
q_update_prof_rec = '''
UPDATE profile_rec SET sex = ?,
hispanic = ?, race = ?,
birth_date = ?, age = ?,
prof_last_upd_date = ?
WHERE id = ?'''
q_upd_prof_args = (sex, is_hispanic, v_race,
b_date, age, prof_last_upd_date, id)
# print(q_update_prof_rec)
# print(q_upd_prof_args)
# engine.execute(q_update_prof_rec, q_upd_prof_args)
# fn_write_log("Updated profile_rec table values " + str(id) + ","
# + v_race + ", " + str(is_hispanic));
scr.write(q_update_prof_rec + '\n' + str(q_upd_prof_args) + '\n')
return 1
except Exception as e:
print(e)
fn_write_error("Error in profilerec.py for ID " + str(id)
+ ", Error = " + repr(e))
return 0
# finally:
# logging.shutdown() |
py | b402e6e7b327e86db62a1551108fdd63a3a7b799 | # Copyright (c) 2019, NVIDIA CORPORATION. All rights reserved.
#
# Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# =============================================================================
import argparse
import os
import logging
import time
import pprint
from functools import partial
from tensorflow import keras
import numpy as np
import tensorflow as tf
from tensorflow.python.compiler.tensorrt import trt_convert as trt
from tensorflow.python.saved_model import signature_constants
from tensorflow.python.saved_model import tag_constants
from tensorflow.python.framework import convert_to_constants
import preprocessing
def deserialize_image_record(record):
feature_map = {'image/encoded': tf.io.FixedLenFeature([], tf.string, ''),
'image/class/label': tf.io.FixedLenFeature([1], tf.int64, -1),
'image/class/text': tf.io.FixedLenFeature([], tf.string, ''),
'image/object/bbox/xmin': tf.io.VarLenFeature(
dtype=tf.float32),
'image/object/bbox/ymin': tf.io.VarLenFeature(
dtype=tf.float32),
'image/object/bbox/xmax': tf.io.VarLenFeature(
dtype=tf.float32),
'image/object/bbox/ymax': tf.io.VarLenFeature(
dtype=tf.float32)}
with tf.compat.v1.name_scope('deserialize_image_record'):
obj = tf.io.parse_single_example(serialized=record, features=feature_map)
imgdata = obj['image/encoded']
label = tf.cast(obj['image/class/label'], tf.int32)
return imgdata, label
def get_preprocess_fn(preprocess_method, input_size, mode='validation'):
"""Creates a function to parse and process a TFRecord
preprocess_method: string
input_size: int
mode: string, which mode to use (validation or benchmark)
returns: function, the preprocessing function for a record
"""
if preprocess_method == 'vgg':
preprocess_fn = preprocessing.vgg_preprocess
elif preprocess_method == 'inception':
preprocess_fn = preprocessing.inception_preprocess
else:
raise ValueError(
'Invalid preprocessing method {}'.format(preprocess_method))
def validation_process(record):
# Parse TFRecord
imgdata, label = deserialize_image_record(record)
label -= 1 # Change to 0-based (don't use background class)
try:
image = tf.image.decode_jpeg(
imgdata, channels=3, fancy_upscaling=False, dct_method='INTEGER_FAST')
except:
image = tf.image.decode_png(imgdata, channels=3)
# Use model's preprocessing function
image = preprocess_fn(image, input_size, input_size)
# type of image = <class 'tensorflow.python.framework.ops.Tensor'>
return image, label
def benchmark_process(path):
image = tf.io.read_file(path)
image = tf.image.decode_jpeg(image, channels=3)
image = preprocess_fn(image, input_size, input_size)
return image
if mode == 'validation':
return validation_process
if mode == 'benchmark':
return benchmark_process
raise ValueError("Mode must be either 'validation' or 'benchmark'")
def get_fashion_mnist_data(batch_size):
fashion_mnist = keras.datasets.fashion_mnist
(train_images, train_labels), (test_images,
test_labels) = fashion_mnist.load_data()
# Only going to deal with the 'test_images' since the model should already be trained
# might have to revisit the below in case I get something screwy
test_images = test_images.astype(np.float32) / 255.0
dataset = tf.data.Dataset.from_tensor_slices((test_images, test_labels))
dataset = dataset.batch(batch_size=batch_size)
dataset = dataset.repeat(count=1)
return dataset
def get_dataset(data_files,
batch_size,
use_synthetic,
preprocess_method,
input_size,
mode='validation'):
if use_synthetic:
features = np.random.normal(
loc=112, scale=70,
size=(batch_size, input_size, input_size, 3)).astype(np.float32)
features = np.clip(features, 0.0, 255.0)
features = tf.convert_to_tensor(value=tf.compat.v1.get_variable(
"features", dtype=tf.float32, initializer=tf.constant(features)))
dataset = tf.data.Dataset.from_tensor_slices([features])
dataset = dataset.repeat()
else:
# preprocess function for input data
preprocess_fn = get_preprocess_fn(
preprocess_method=preprocess_method,
input_size=input_size,
mode=mode)
print("")
print("")
print("")
print(mode)
print("")
print("")
print("")
if mode == 'validation':
dataset = tf.data.TFRecordDataset(data_files)
dataset = dataset.map(map_func=preprocess_fn, num_parallel_calls=8)
dataset = dataset.batch(batch_size=batch_size)
dataset = dataset.prefetch(buffer_size=tf.data.experimental.AUTOTUNE)
dataset = dataset.repeat(count=1)
print("HEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEEERE")
print(type(dataset))
print(dataset)
elif mode == 'benchmark':
dataset = tf.data.Dataset.from_tensor_slices(data_files)
dataset = dataset.map(map_func=preprocess_fn, num_parallel_calls=8)
dataset = dataset.batch(batch_size=batch_size)
dataset = dataset.repeat(count=1)
else:
raise ValueError("Mode must be either 'validation' or 'benchmark'")
print("vvvvvvVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV")
print(type(dataset))
return dataset
def get_func_from_saved_model(saved_model_dir):
saved_model_loaded = tf.saved_model.load(
saved_model_dir, tags=[tag_constants.SERVING])
graph_func = saved_model_loaded.signatures[
signature_constants.DEFAULT_SERVING_SIGNATURE_DEF_KEY]
graph_func = convert_to_constants.convert_variables_to_constants_v2(graph_func)
return graph_func
def get_graph_func(input_saved_model_dir,
preprocess_method,
input_size,
output_saved_model_dir=None,
conversion_params=trt.DEFAULT_TRT_CONVERSION_PARAMS,
use_trt=False,
calib_files=None,
num_calib_inputs=None,
use_synthetic=False,
batch_size=None,
optimize_offline=False):
"""Retreives a frozen SavedModel and applies TF-TRT
use_trt: bool, if true use TensorRT
precision: str, floating point precision (FP32, FP16, or INT8)
batch_size: int, batch size for TensorRT optimizations
returns: TF function that is ready to run for inference
"""
start_time = time.time()
graph_func = get_func_from_saved_model(input_saved_model_dir)
if use_trt:
converter = trt.TrtGraphConverterV2(
input_saved_model_dir=input_saved_model_dir,
conversion_params=conversion_params,
)
def input_fn(input_files, num_iterations):
dataset = get_dataset(data_files=input_files,
batch_size=batch_size,
use_synthetic=False,
preprocess_method=preprocess_method,
input_size=input_size,
mode='validation')
for i, (batch_images, _) in enumerate(dataset):
if i >= num_iterations:
break
yield (batch_images,)
print(" step %d/%d" % (i+1, num_iterations))
i += 1
if conversion_params.precision_mode != 'INT8':
print('Graph conversion...')
converter.convert()
if optimize_offline:
print('Building TensorRT engines...')
print("ANDYYYYY WE SHOULD NOT SEE THIS!!!!")
converter.build(input_fn=partial(input_fn, data_files, 1))
converter.save(output_saved_model_dir=output_saved_model_dir)
graph_func = get_func_from_saved_model(output_saved_model_dir)
else:
print('Graph conversion and INT8 calibration...')
converter.convert(calibration_input_fn=partial(
input_fn, calib_files, num_calib_inputs//batch_size))
if optimize_offline:
print('Building TensorRT engines...')
print("INSTANCE 2 OF WE SHOULD NOT SEE THIS")
converter.build(input_fn=partial(input_fn, data_files, 1))
converter.save(output_saved_model_dir=output_saved_model_dir)
graph_func = get_func_from_saved_model(output_saved_model_dir)
return graph_func, {'conversion': time.time() - start_time}
def eval_fn(preds, labels, adjust):
"""Measures number of correct predicted labels in a batch.
Assumes preds and labels are numpy arrays.
"""
preds = np.argmax(preds, axis=1).reshape(-1) - adjust
return np.sum((labels.reshape(-1) == preds))
def run_inference(graph_func,
data_files,
batch_size,
preprocess_method,
input_size,
num_classes,
num_iterations,
num_warmup_iterations,
use_synthetic,
display_every=100,
mode='validation',
target_duration=None,
use_fashion_mnist_data=False):
"""Run the given graph_func on the data files provided. In validation mode,
it consumes TFRecords with labels and reports accuracy. In benchmark mode, it
times inference on real data (.jpgs).
"""
results = {}
corrects = 0
iter_times = []
adjust = 1 if num_classes == 1001 else 0
initial_time = time.time()
if use_fashion_mnist_data:
dataset = get_fashion_mnist_data(batch_size=batch_size)
else:
dataset = get_dataset(data_files=data_files,
batch_size=batch_size,
use_synthetic=use_synthetic,
input_size=input_size,
preprocess_method=preprocess_method,
mode=mode)
if mode == 'validation':
print("EYE CATCHER")
print(type(dataset))
print(dataset)
print("EYE CATCHER")
for i, (batch_images, batch_labels) in enumerate(dataset):
start_time = time.time()
batch_preds = graph_func(batch_images)[0].numpy()
end_time = time.time()
iter_times.append(end_time - start_time)
if i % display_every == 0:
print(" step %d/%d, iter_time(ms)=%.0f" %
(i+1, 50000//batch_size, iter_times[-1]*1000))
corrects += eval_fn(
batch_preds, batch_labels.numpy(), adjust)
if i > 1 and target_duration is not None and \
time.time() - initial_time > target_duration:
break
print("batch_size = %d" % batch_size)
print("i = %d" % i)
print("corrects = %d" % corrects)
accuracy = corrects / (batch_size * i)
results['accuracy'] = accuracy
elif mode == 'benchmark':
for i, batch_images in enumerate(dataset):
if i >= num_warmup_iterations:
start_time = time.time()
batch_preds = list(graph_func(batch_images).values())[0].numpy()
iter_times.append(time.time() - start_time)
if i % display_every == 0:
print(" step %d/%d, iter_time(ms)=%.0f" %
(i+1, num_iterations, iter_times[-1]*1000))
else:
batch_preds = list(graph_func(batch_images).values())[0].numpy()
if i > 0 and target_duration is not None and \
time.time() - initial_time > target_duration:
break
if num_iterations is not None and i >= num_iterations:
break
if not iter_times:
return results
iter_times = np.array(iter_times)
iter_times = iter_times[num_warmup_iterations:]
results['total_time'] = np.sum(iter_times)
results['images_per_sec'] = np.mean(batch_size / iter_times)
results['99th_percentile'] = np.percentile(
iter_times, q=99, interpolation='lower') * 1000
results['latency_mean'] = np.mean(iter_times) * 1000
results['latency_median'] = np.median(iter_times) * 1000
results['latency_min'] = np.min(iter_times) * 1000
return results
def config_gpu_memory(gpu_mem_cap):
gpus=tf.config.experimental.list_physical_devices('GPU')
if not gpus:
return
print('Found the following GPUs:')
for gpu in gpus:
print(' ', gpu)
for gpu in gpus:
try:
if not gpu_mem_cap:
tf.config.experimental.set_memory_growth(gpu, True)
else:
tf.config.experimental.set_virtual_device_configuration(
gpu,
[tf.config.experimental.VirtualDeviceConfiguration(
memory_limit=gpu_mem_cap)])
except RuntimeError as e:
print('Can not set GPU memory config', e)
def get_trt_conversion_params(max_workspace_size_bytes,
precision_mode,
minimum_segment_size,
max_batch_size):
conversion_params = trt.DEFAULT_TRT_CONVERSION_PARAMS
conversion_params = conversion_params._replace(
max_workspace_size_bytes=max_workspace_size_bytes)
conversion_params = conversion_params._replace(precision_mode=precision_mode)
conversion_params = conversion_params._replace(
minimum_segment_size=minimum_segment_size)
conversion_params = conversion_params._replace(
use_calibration=precision_mode == 'INT8')
conversion_params = conversion_params._replace(
max_batch_size=max_batch_size)
return conversion_params
if __name__ == '__main__':
logging.getLogger("tensorflow").setLevel(logging.INFO)
parser = argparse.ArgumentParser(description='Evaluate model')
parser.add_argument('--input_saved_model_dir', type=str, default=None,
help='Directory containing the input saved model.')
parser.add_argument('--output_saved_model_dir', type=str, default=None,
help='Directory in which the converted model is saved')
parser.add_argument('--preprocess_method', type=str,
choices=['vgg', 'inception'], default='vgg',
help='The image preprocessing method')
parser.add_argument('--input_size', type=int, default=224,
help='Size of input images expected by the model')
parser.add_argument('--num_classes', type=int, default=1001,
help='Number of classes used when training the model')
parser.add_argument('--data_dir', type=str, default=None,
help='Directory containing validation set'
'TFRecord files.')
parser.add_argument('--calib_data_dir', type=str,
help='Directory containing TFRecord files for'
'calibrating INT8.')
parser.add_argument('--use_trt', action='store_true',
help='If set, the graph will be converted to a'
'TensorRT graph.')
parser.add_argument('--optimize_offline', action='store_true',
help='If set, TensorRT engines are built'
'before runtime.')
parser.add_argument('--precision', type=str,
choices=['FP32', 'FP16', 'INT8'], default='FP32',
help='Precision mode to use. FP16 and INT8 only'
'work in conjunction with --use_trt')
parser.add_argument('--batch_size', type=int, default=8,
help='Number of images per batch.')
parser.add_argument('--minimum_segment_size', type=int, default=2,
help='Minimum number of TF ops in a TRT engine.')
parser.add_argument('--num_iterations', type=int, default=2048,
help='How many iterations(batches) to evaluate.'
'If not supplied, the whole set will be evaluated.')
parser.add_argument('--display_every', type=int, default=100,
help='Number of iterations executed between'
'two consecutive display of metrics')
parser.add_argument('--use_synthetic', action='store_true',
help='If set, one batch of random data is'
'generated and used at every iteration.')
parser.add_argument('--num_warmup_iterations', type=int, default=50,
help='Number of initial iterations skipped from timing')
parser.add_argument('--num_calib_inputs', type=int, default=500,
help='Number of inputs (e.g. images) used for'
'calibration (last batch is skipped in case'
'it is not full)')
parser.add_argument('--gpu_mem_cap', type=int, default=0,
help='Upper bound for GPU memory in MB.'
'Default is 0 which means allow_growth will be used.')
parser.add_argument('--max_workspace_size', type=int, default=(1<<30),
help='workspace size in bytes')
parser.add_argument('--mode', choices=['validation', 'benchmark'],
default='validation',
help='Which mode to use (validation or benchmark)')
parser.add_argument('--target_duration', type=int, default=None,
help='If set, script will run for specified'
'number of seconds.')
parser.add_argument('--use_fashion_mnist_data', action='store_true',
help='If set, script will use Keras fashion_mnist dataset.')
args = parser.parse_args()
if args.precision != 'FP32' and not args.use_trt:
raise ValueError('TensorRT must be enabled for FP16'
'or INT8 modes (--use_trt).')
if (args.precision == 'INT8' and not args.calib_data_dir
and not args.use_synthetic):
raise ValueError('--calib_data_dir is required for INT8 mode')
if (args.num_iterations is not None
and args.num_iterations <= args.num_warmup_iterations):
raise ValueError(
'--num_iterations must be larger than --num_warmup_iterations '
'({} <= {})'.format(args.num_iterations, args.num_warmup_iterations))
if args.num_calib_inputs < args.batch_size:
raise ValueError(
'--num_calib_inputs must not be smaller than --batch_size'
'({} <= {})'.format(args.num_calib_inputs, args.batch_size))
if args.mode == 'validation' and args.use_synthetic:
raise ValueError('Cannot use both validation mode and synthetic dataset')
if args.data_dir is None and not args.use_synthetic and not args.use_fashion_mnist_data:
raise ValueError("--data_dir required if you are not using synthetic data or fashion mnist data")
if args.use_synthetic and args.num_iterations is None:
raise ValueError("--num_iterations is required for --use_synthetic")
if args.use_trt and not args.output_saved_model_dir:
raise ValueError("--output_saved_model_dir must be set if use_trt=True")
calib_files = []
data_files = []
def get_files(data_dir, filename_pattern):
if data_dir is None:
return []
files = tf.io.gfile.glob(os.path.join(data_dir, filename_pattern))
if files == []:
raise ValueError('Can not find any files in {} with '
'pattern "{}"'.format(data_dir, filename_pattern))
return files
if not args.use_synthetic and not args.use_fashion_mnist_data:
if args.mode == "validation":
data_files = get_files(args.data_dir, 'validation*')
elif args.mode == "benchmark":
data_files = [os.path.join(path, name) for path, _, files
in os.walk(args.data_dir) for name in files]
else:
raise ValueError("Mode must be either 'validation' or 'benchamark'")
if args.precision == 'INT8':
calib_files = get_files(args.calib_data_dir, 'train*')
config_gpu_memory(args.gpu_mem_cap)
params = get_trt_conversion_params(
args.max_workspace_size,
args.precision,
args.minimum_segment_size,
args.batch_size,)
graph_func, times = get_graph_func(
input_saved_model_dir=args.input_saved_model_dir,
output_saved_model_dir=args.output_saved_model_dir,
preprocess_method=args.preprocess_method,
input_size=args.input_size,
conversion_params=params,
use_trt=args.use_trt,
calib_files=calib_files,
batch_size=args.batch_size,
num_calib_inputs=args.num_calib_inputs,
use_synthetic=args.use_synthetic,
optimize_offline=args.optimize_offline)
def print_dict(input_dict, prefix=' ', postfix=''):
for k, v in sorted(input_dict.items()):
print('{}{}: {}{}'.format(prefix, k, '%.1f'%v if isinstance(v, float) else v, postfix))
print('Benchmark arguments:')
print_dict(vars(args))
print('TensorRT Conversion Params:')
print_dict(dict(params._asdict()))
print('Conversion times:')
print_dict(times, postfix='s')
if args.use_fashion_mnist_data:
print('USING FASHION MNIST DATA')
results = run_inference(graph_func,
data_files=data_files,
batch_size=args.batch_size,
num_iterations=args.num_iterations,
num_warmup_iterations=args.num_warmup_iterations,
preprocess_method=args.preprocess_method,
input_size=args.input_size,
num_classes=args.num_classes,
use_synthetic=args.use_synthetic,
display_every=args.display_every,
mode=args.mode,
target_duration=args.target_duration,
use_fashion_mnist_data=args.use_fashion_mnist_data)
if args.mode == 'validation':
print(' accuracy: %.2f' % (results['accuracy'] * 100))
print(' images/sec: %d' % results['images_per_sec'])
print(' 99th_percentile(ms): %.2f' % results['99th_percentile'])
print(' total_time(s): %.1f' % results['total_time'])
print(' latency_mean(ms): %.2f' % results['latency_mean'])
print(' latency_median(ms): %.2f' % results['latency_median'])
print(' latency_min(ms): %.2f' % results['latency_min'])
|
py | b402e70d35efd10f28acfa0a0db137221dcc3199 | import _plotly_utils.basevalidators
class TicktextValidator(_plotly_utils.basevalidators.DataArrayValidator):
def __init__(
self, plotly_name="ticktext", parent_name="indicator.gauge.axis", **kwargs
):
super(TicktextValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
**kwargs,
)
|
py | b402e7905b352be0aa5b4439d6570dc54902f00a | from typing import FrozenSet, Tuple
import pysmt.typing as types
from pysmt.environment import Environment as PysmtEnv
from pysmt.fnode import FNode
from utils import symb_to_next
from hint import Hint, Location
def transition_system(env: PysmtEnv) -> Tuple[FrozenSet[FNode], FNode, FNode,
FNode]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
symbols = frozenset([pc, x, y, z])
n_locs = 5
int_bound = n_locs
pcs = []
x_pcs = []
ints = [mgr.Int(i) for i in range(int_bound)]
for l in range(n_locs):
n = ints[l]
pcs.append(mgr.Equals(pc, n))
x_pcs.append(mgr.Equals(x_pc, n))
m_1 = mgr.Int(-1)
pcend = mgr.Equals(pc, m_1)
x_pcend = mgr.Equals(x_pc, m_1)
# initial location.
init = pcs[0]
# control flow graph.
cfg = mgr.And(
# pc = -1 : -1,
mgr.Implies(pcend, x_pcend),
# pc = 0 & !(y >= 1) : -1,
mgr.Implies(mgr.And(pcs[0], mgr.Not(mgr.GE(y, ints[1]))), x_pcend),
# pc = 0 & y >= 1 : 1,
mgr.Implies(mgr.And(pcs[0], mgr.GE(y, ints[1])), x_pcs[1]),
# pc = 1 & !(z >= 1) : -1,
mgr.Implies(mgr.And(pcs[1], mgr.Not(mgr.GE(z, ints[1]))), x_pcend),
# pc = 1 & z >= 1 : 2,
mgr.Implies(mgr.And(pcs[1], mgr.GE(z, ints[1])), x_pcs[2]),
# pc = 2 & !(x >= 0) : -1,
mgr.Implies(mgr.And(pcs[2], mgr.Not(mgr.GE(x, ints[0]))), x_pcend),
# pc = 2 & x >= 0 : 3,
mgr.Implies(mgr.And(pcs[2], mgr.GE(x, ints[0])), x_pcs[3]),
# pc = 3 : 4,
mgr.Implies(pcs[3], x_pcs[4]),
# pc = 4 : 2,
mgr.Implies(pcs[4], x_pcs[2]))
# transition labels.
labels = mgr.And(
# (pc = -1 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcend, x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 0 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[0], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 0 & pc' = 1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[0], x_pcs[1]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 1 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[1], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 1 & pc' = 2) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[1], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 2 & pc' = -1) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[2], x_pcend),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 2 & pc' = 3) -> (x' = x & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[2], x_pcs[3]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, y),
mgr.Equals(x_z, z))),
# (pc = 3 & pc' = 4) -> (x' = y*z - 1 & y' = y & z' = z),
mgr.Implies(
mgr.And(pcs[3], x_pcs[4]),
mgr.And(mgr.Equals(x_x, mgr.Minus(mgr.Times(y, z), ints[1])),
mgr.Equals(x_y, y), mgr.Equals(x_z, z))),
# (pc = 4 & pc' = 2) -> (x' = x & y' = y+1 & z' = z),
mgr.Implies(
mgr.And(pcs[4], x_pcs[2]),
mgr.And(mgr.Equals(x_x, x), mgr.Equals(x_y, mgr.Plus(y, ints[1])),
mgr.Equals(x_z, z))))
# transition relation.
trans = mgr.And(cfg, labels)
# fairness.
fairness = mgr.Not(pcend)
return symbols, init, trans, fairness
def hints(env: PysmtEnv) -> FrozenSet[Hint]:
assert isinstance(env, PysmtEnv)
mgr = env.formula_manager
pc = mgr.Symbol("pc", types.INT)
x = mgr.Symbol("x", types.INT)
y = mgr.Symbol("y", types.INT)
z = mgr.Symbol("z", types.INT)
symbs = frozenset([pc, x, y, z])
x_pc = symb_to_next(mgr, pc)
x_x = symb_to_next(mgr, x)
x_y = symb_to_next(mgr, y)
x_z = symb_to_next(mgr, z)
res = []
i_0 = mgr.Int(0)
i_1 = mgr.Int(1)
i_2 = mgr.Int(2)
i_3 = mgr.Int(3)
loc0 = Location(env, mgr.Equals(pc, i_1))
loc0.set_progress(1, mgr.Equals(x_pc, i_2))
loc1 = Location(env, mgr.Equals(pc, i_2))
loc1.set_progress(2, mgr.Equals(x_pc, i_3))
loc2 = Location(env, mgr.Equals(pc, i_3))
loc2.set_progress(0, mgr.Equals(x_pc, i_1))
h_pc = Hint("h_pc0", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1, loc2])
res.append(h_pc)
loc0 = Location(env, mgr.GE(z, i_3))
loc0.set_progress(0, mgr.GT(x_z, z))
h_z = Hint("h_z1", env, frozenset([z]), symbs)
h_z.set_locs([loc0])
res.append(h_z)
loc0 = Location(env, mgr.GE(y, i_3))
loc0.set_progress(1, mgr.Equals(x_y, mgr.Plus(y, i_1)))
loc1 = Location(env, mgr.GE(y, i_3), mgr.GE(x, i_2))
loc1.set_progress(0, mgr.Equals(x_y, mgr.Plus(y, x)))
h_y = Hint("h_y3", env, frozenset([y]), symbs)
h_y.set_locs([loc0, loc1])
res.append(h_y)
loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1))
loc1.set_progress(0, mgr.Equals(x_x, mgr.Plus(x, y)))
h_x = Hint("h_x3", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1])
res.append(h_x)
loc0 = Location(env, mgr.Equals(pc, i_2))
loc0.set_progress(1, mgr.GT(x_pc, i_2))
loc1 = Location(env, mgr.GE(pc, i_3))
loc1.set_progress(0, mgr.Equals(x_pc, i_2))
h_pc = Hint("h_pc3", env, frozenset([pc]), symbs)
h_pc.set_locs([loc0, loc1])
res.append(h_pc)
loc0 = Location(env, mgr.GT(x, i_3), mgr.And(mgr.GT(y, i_1), mgr.GT(z, i_1)))
loc0.set_progress(1, mgr.GE(x_x, mgr.Minus(mgr.Times(y, z), i_1)))
loc1 = Location(env, mgr.GT(x, i_0), mgr.GE(y, i_1))
loc1.set_progress(2, mgr.Equals(x_x, mgr.Plus(x, y)))
loc2 = Location(env, mgr.GT(x, i_3))
loc2.set_progress(2, mgr.Equals(x_x, x))
h_x = Hint("h_x4", env, frozenset([x]), symbs)
h_x.set_locs([loc0, loc1, loc2])
res.append(h_x)
return frozenset(res)
|
py | b402e857276a29afa355f9d9f2de6ff4e4b93de5 | """~dict <search_term> will return the meaning and usage of <search_term>"""
import urllib, json
import requests
try:
from urllib import quote
except ImportError:
from urllib.request import quote
import re
def dict(word):
query = quote(word)
url = "http://api.urbandictionary.com/v0/define?term={0}".format(query)
response = urllib.urlopen(url)
data = json.loads(response.read())
try:
example = data["list"][0]["example"]
definition = data["list"][0]["definition"]
except:
return "Sorry, this word doesn't exist!!"
answer = "definition : " + definition + "\n" + "example : " + example
return answer
def on_message(msg, server):
text = msg.get("text", "")
match = re.findall(r"~dict (.*)", text)
if not match:
return
return dict(match[0].encode("utf8"))
on_bot_message = on_message
|
py | b402e9bbaf7ff00342b097d9304edea625a09fac | # -*- coding: utf-8 -*-
"""
urlresolver XBMC Addon
Copyright (C) 2013 Bstrdsmkr
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
Adapted for use in xbmc from:
https://github.com/einars/js-beautify/blob/master/python/jsbeautifier/unpackers/packer.py
usage:
if detect(some_string):
unpacked = unpack(some_string)
Unpacker for Dean Edward's p.a.c.k.e.r
"""
import re
def detect(source):
"""Detects whether `source` is P.A.C.K.E.R. coded."""
source = source.replace(' ', '')
if re.search('eval\(function\(p,a,c,k,e,(?:r|d)', source):
return True
else:
return False
def unpack(source):
"""Unpacks P.A.C.K.E.R. packed js code."""
payload, symtab, radix, count = _filterargs(source)
if count != len(symtab):
raise UnpackingError('Malformed p.a.c.k.e.r. symtab.')
try:
unbase = Unbaser(radix)
except TypeError:
raise UnpackingError('Unknown p.a.c.k.e.r. encoding.')
def lookup(match):
"""Look up symbols in the synthetic symtab."""
word = match.group(0)
return symtab[unbase(word)] or word
source = re.sub(r'\b\w+\b', lookup, payload)
return _replacestrings(source)
def _filterargs(source):
"""Juice from a source file the four args needed by decoder."""
argsregex = (r"}\s*\('(.*)',\s*(.*?),\s*(\d+),\s*'(.*?)'\.split\('\|'\)")
args = re.search(argsregex, source, re.DOTALL).groups()
try:
payload, radix, count, symtab = args
radix = 36 if not radix.isdigit() else int(radix)
return payload, symtab.split('|'), radix, int(count)
except ValueError:
raise UnpackingError('Corrupted p.a.c.k.e.r. data.')
def _replacestrings(source):
"""Strip string lookup table (list) and replace values in source."""
match = re.search(r'var *(_\w+)\=\["(.*?)"\];', source, re.DOTALL)
if match:
varname, strings = match.groups()
startpoint = len(match.group(0))
lookup = strings.split('","')
variable = '%s[%%d]' % varname
for index, value in enumerate(lookup):
source = source.replace(variable % index, '"%s"' % value)
return source[startpoint:]
return source
class Unbaser(object):
"""Functor for a given base. Will efficiently convert
strings to natural numbers."""
ALPHABET = {
62: '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ',
95: (' !"#$%&\'()*+,-./0123456789:;<=>?@ABCDEFGHIJKLMNOPQRSTUVWXYZ'
'[\]^_`abcdefghijklmnopqrstuvwxyz{|}~')
}
def __init__(self, base):
self.base = base
# If base can be handled by int() builtin, let it do it for us
if 2 <= base <= 36:
self.unbase = lambda string: int(string, base)
else:
if base < 62:
self.ALPHABET[base] = self.ALPHABET[62][0:base]
elif 62 < base < 95:
self.ALPHABET[base] = self.ALPHABET[95][0:base]
# Build conversion dictionary cache
try:
self.dictionary = dict((cipher, index) for index, cipher in enumerate(self.ALPHABET[base]))
except KeyError:
raise TypeError('Unsupported base encoding.')
self.unbase = self._dictunbaser
def __call__(self, string):
return self.unbase(string)
def _dictunbaser(self, string):
"""Decodes a value to an integer."""
ret = 0
for index, cipher in enumerate(string[::-1]):
ret += (self.base ** index) * self.dictionary[cipher]
return ret
class UnpackingError(Exception):
"""Badly packed source or general error. Argument is a
meaningful description."""
pass
if __name__ == "__main__":
# test = '''eval(function(p,a,c,k,e,d){while(c--)if(k[c])p=p.replace(new RegExp('\\b'+c.toString(a)+'\\b','g'),k[c]);return p}('4(\'30\').2z({2y:\'5://a.8.7/i/z/y/w.2x\',2w:{b:\'2v\',19:\'<p><u><2 d="20" c="#17">2u 19.</2></u><16/><u><2 d="18" c="#15">2t 2s 2r 2q.</2></u></p>\',2p:\'<p><u><2 d="20" c="#17">2o 2n b.</2></u><16/><u><2 d="18" c="#15">2m 2l 2k 2j.</2></u></p>\',},2i:\'2h\',2g:[{14:"11",b:"5://a.8.7/2f/13.12"},{14:"2e",b:"5://a.8.7/2d/13.12"},],2c:"11",2b:[{10:\'2a\',29:\'5://v.8.7/t-m/m.28\'},{10:\'27\'}],26:{\'25-3\':{\'24\':{\'23\':22,\'21\':\'5://a.8.7/i/z/y/\',\'1z\':\'w\',\'1y\':\'1x\'}}},s:\'5://v.8.7/t-m/s/1w.1v\',1u:"1t",1s:"1r",1q:\'1p\',1o:"1n",1m:"1l",1k:\'5\',1j:\'o\',});l e;l k=0;l 6=0;4().1i(9(x){f(6>0)k+=x.r-6;6=x.r;f(q!=0&&k>=q){6=-1;4().1h();4().1g(o);$(\'#1f\').j();$(\'h.g\').j()}});4().1e(9(x){6=-1});4().1d(9(x){n(x)});4().1c(9(){$(\'h.g\').j()});9 n(x){$(\'h.g\').1b();f(e)1a;e=1;}',36,109,'||font||jwplayer|http|p0102895|me|vidto|function|edge3|file|color|size|vvplay|if|video_ad|div||show|tt102895|var|player|doPlay|false||21600|position|skin|test||static|1y7okrqkv4ji||00020|01|type|360p|mp4|video|label|FFFFFF|br|FF0000||deleted|return|hide|onComplete|onPlay|onSeek|play_limit_box|setFullscreen|stop|onTime|dock|provider|391|height|650|width|over|controlbar|5110|duration|uniform|stretching|zip|stormtrooper|213|frequency|prefix||path|true|enabled|preview|timeslidertooltipplugin|plugins|html5|swf|src|flash|modes|hd_default|3bjhohfxpiqwws4phvqtsnolxocychumk274dsnkblz6sfgq6uz6zt77gxia|240p|3bjhohfxpiqwws4phvqtsnolxocychumk274dsnkba36sfgq6uzy3tv2oidq|hd|original|ratio|broken|is|link|Your|such|No|nofile|more|any|availabe|Not|File|OK|previw|jpg|image|setup|flvplayer'.split('|')))'''
# test = '''eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('y.x(A(\'%0%f%b%9%1%d%8%8%o%e%B%c%0%e%d%0%f%w%1%7%3%2%p%d%1%n%2%1%c%0%t%0%f%7%8%8%d%5%6%1%7%e%b%l%7%1%2%e%9%q%c%0%6%1%z%2%0%f%b%1%9%c%0%s%6%6%l%G%4%4%5%5%5%k%b%7%5%8%o%i%2%k%6%i%4%2%3%p%2%n%4%5%7%6%9%s%4%j%q%a%h%a%3%a%E%a%3%D%H%9%K%C%I%m%r%g%h%L%v%g%u%F%r%g%3%J%3%j%3%m%h%4\'));',48,48,'22|72|65|6d|2f|77|74|61|6c|63|4e|73|3d|6f|6e|20|4d|32|76|59|2e|70|51|64|69|62|79|31|68|30|7a|34|66|write|document|75|unescape|67|4f|5a|57|55|3a|44|47|4a|78|49'.split('|'),0,{}))'''
# test = '''eval(function(p,a,c,k,e,d){e=function(c){return(c<a?'':e(parseInt(c/a)))+((c=c%a)>35?String.fromCharCode(c+29):c.toString(36))};if(!''.replace(/^/,String)){while(c--){d[e(c)]=k[c]||e(c)}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('x.w(z(\'%1%f%9%b%0%d%7%7%m%e%A%c%1%e%d%1%f%v%0%3%i%2%o%d%0%s%2%0%c%1%q%1%f%3%7%7%d%6%5%0%3%e%9%l%3%0%2%e%b%g%c%1%5%0%y%2%1%f%9%0%b%c%1%r%5%5%l%E%4%4%6%6%6%n%9%3%6%7%m%k%2%n%5%k%4%2%i%o%2%s%4%6%3%5%b%r%4%8%D%h%C%a%F%8%H%B%I%h%i%a%g%8%u%a%q%j%t%j%g%8%t%h%p%j%p%a%G%4\'));',45,45,'72|22|65|61|2f|74|77|6c|5a|73|55|63|3d|6f|6e|20|79|59|6d|4d|76|70|69|2e|62|7a|30|68|64|44|54|66|write|document|75|unescape|67|51|32|6a|3a|35|5f|47|34'.split('|'),0,{}))'''
test = '''eval(function(p,a,c,k,e,d){e=function(c){return c};if(!''.replace(/^/,String)){while(c--){d[c]=k[c]||c}k=[function(e){return d[e]}];e=function(){return'\\w+'};c=1};while(c--){if(k[c]){p=p.replace(new RegExp('\\b'+e(c)+'\\b','g'),k[c])}}return p}('7.6=8(1){9 1.3(1.4-2,1.4)+1.3(0,5)}',10,10,'|input||slice|length||k|cf|function|return'.split('|'),0,{}))'''
print(unpack(test))
|
py | b402e9c5d3e82ca0c9c85c5ad00e482b119a7bf0 | class Componente(object):
"""docstring for Componente"""
def __init__(self, nombre=''):
self.nombre = nombre
def __str__(self):
return 'Componente: {}'.format(self.nombre)
class Computadora(object):
"""docstring for Computadora"""
componentes = []
def __init__(self, marca, componentes=[]):
self.marca = marca
self.componentes = [Componente(i) for i in componentes]
def __str__(self):
return 'Computadora marca: {}\nEspecificaciones: {}'.format(self.marca, '\n'.join([str(i) for i in self.componentes]))
def set_componente(self, componente):
self.componentes.append(Componente(componente))
def presiona_boton(self):
print('Click!')
def manda_luz(self):
print('Pzzzzzzzt!')
def haz_sonido(self):
print('Beep Beep!')
def inicializa(self):
print('Inicializando componentes...\n' + '\n'.join([str(i) for i in self.componentes]))
def notifica(self):
print('Listo!')
def salir(self):
print("Apagando...")
def apaga_pantalla(self):
print("Se apaga pantalla")
def reiniciar(self):
print("Reiniciando...")
class ComputadoraMethFacade:
def __init__(self, marca, componentes):
self.computadora = Computadora(marca, componentes)
#Comenzamos con las operaciones que tendra nuestro equipo. Cada una de estas operaciones tiene su funcion
#por ejemplo self.computadora.inicializa()
def encender(self):
print("Encendiendo computadora...\n")
self.computadora.presiona_boton()
self.computadora.manda_luz()
self.computadora.haz_sonido()
self.computadora.inicializa()
self.computadora.notifica()
def apagar(self):
print("Apagando computadora...\n")
self.computadora.salir()
self.computadora.haz_sonido()
self.computadora.apaga_pantalla()
self.computadora.presiona_boton()
def reiniciar(self):
print("Reiniciando computadora...\n")
self.computadora.reiniciar()
self.computadora.haz_sonido()
self.computadora.apaga_pantalla()
self.computadora.manda_luz()
self.computadora.haz_sonido()
self.computadora.inicializa()
self.computadora.notifica()
pass
def func(self, do):
self.encender() if do.get('encender') else "Fallo de operacion de encendido"
self.apagar() if do.get('apagar') else "Fallo de operacion de apagado"
self.reiniciar() if do.get('reiniciar') else "Fallo de operacion de reinicio"
def main():
marca='HP'
componentes=['RAM 4GB','OS 64bits']
do = { 'encender': False, 'apagar': True, 'reiniciar': True }
facade = ComputadoraMethFacade(marca, componentes)
facade.func(do)
if __name__ == "__main__":
main() |
py | b402ea494e72747cbd5494281c8dba8191d92157 | import time
import sys
commands = {
'OUTPUT_RESET': 'A2',
'OUTPUT_STOP': 'A3',
'OUTPUT_POWER': 'A4', # seems to max out around 0x1F with 0x20 backwards
'OUTPUT_SPEED': 'A5',
'OUTPUT_START': 'A6',
'OUTPUT_POLARITY': 'A7', # 0x01 forwards, 0x00 toggle, 0xFF backwards
}
motors = {
'A': 1,
'B': 2,
'C': 4,
'D': 8
}
def ev3motor(cmd,m,pwr):
motorhx = 0
for i in list(m):
motorhx += motors[i]
motorhx = "%0.2X" % motorhx
cmdhx = commands[cmd]
cmdstr = cmdhx + '00' + motorhx
print(cmdstr)
ev3motor('OUTPUT_START','AB','')
sys.exit()
# command to start motor on port A at speed 20
# 0C 00 00 00 80 00 00 A4 00 01 14 A6 00 01
# 12 0 0 0 128 0 0 164 0 1 20 166 0 1
#
# Length: 0C 00 -> 12
# Counter: 00 00 -> 0
# Reply: 80 -> No reply
# Variables: 00 00 -> None (?)
# Command: A4 -> opOUTPUT_POWER
# 00: Null block
# Motor: 01 -> A
# Value: 14 -> 20
# Command: A6 -> opOUTPUT_START
# 00: Null block
# Motor: 01 -> A
start_motor_str = '0C000000800000A400061FA60006'
start_motor = bytes.fromhex(start_motor_str)
change_motor_power_str = '09000000800000A70006FF'
change_motor_power = bytes.fromhex(change_motor_power_str)
# command to stop motor on port A
# 09 00 01 00 80 00 00 A3 00 01 00
# 9 0 1 0 128 0 0 163 0 1 0
#
# Length: 09 00 -> 9
# Counter: 01 00 -> 1
# Reply: 80 -> No reply
# Variables: 00 00 -> None (?)
# Command: A3 -> opOUTPUT_STOP
# 00: Null block
# Motor: 01 -> A
# Value: 00 -> Float
stop_motor_str = '09000100800000A3000600'
stop_motor = bytes.fromhex(stop_motor_str)
# send commands to EV3 via bluetooth
with open('/dev/tty.EV3-SerialPort', 'wb', 0) as bt:
bt.write(start_motor)
time.sleep(5)
bt.write(change_motor_power)
time.sleep(5)
bt.write(stop_motor)
|
py | b402ea56041e1962a600ebd1f066c57235aa7dd9 | """
MIT License
Copyright (c) 2020 André Lousa Marques <andre.lousa.marques at gmail.com>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
class PageModel():
create_task_button_id = "create_task"
task_table_id = "tasks"
|
py | b402eafc95326949af3f885131f14fc1214ade59 | # Copyright 2014 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
import mock
from six.moves import http_client
import requests
class Test__request(unittest.TestCase):
@staticmethod
def _call_fut(*args, **kwargs):
from google.cloud.datastore._http import _request
return _request(*args, **kwargs)
def test_success(self):
from google.cloud import _http as connection_module
from google.cloud.datastore._http import _CLIENT_INFO
project = "PROJECT"
method = "METHOD"
data = b"DATA"
base_url = "http://api-url"
response_data = "CONTENT"
http = _make_requests_session([_make_response(content=response_data)])
# Call actual function under test.
response = self._call_fut(http, project, method, data, base_url)
self.assertEqual(response, response_data)
# Check that the mocks were called as expected.
expected_url = _build_expected_url(base_url, project, method)
expected_headers = {
"Content-Type": "application/x-protobuf",
"User-Agent": connection_module.DEFAULT_USER_AGENT,
connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO,
}
http.request.assert_called_once_with(
method="POST", url=expected_url, headers=expected_headers, data=data
)
def test_failure(self):
from google.cloud.exceptions import BadRequest
from google.rpc import code_pb2
from google.rpc import status_pb2
project = "PROJECT"
method = "METHOD"
data = "DATA"
uri = "http://api-url"
error = status_pb2.Status()
error.message = "Entity value is indexed."
error.code = code_pb2.FAILED_PRECONDITION
http = _make_requests_session(
[_make_response(http_client.BAD_REQUEST, content=error.SerializeToString())]
)
with self.assertRaises(BadRequest) as exc:
self._call_fut(http, project, method, data, uri)
expected_message = "400 Entity value is indexed."
self.assertEqual(str(exc.exception), expected_message)
class Test__rpc(unittest.TestCase):
@staticmethod
def _call_fut(*args, **kwargs):
from google.cloud.datastore._http import _rpc
return _rpc(*args, **kwargs)
def test_it(self):
from google.cloud.datastore_v1.proto import datastore_pb2
http = object()
project = "projectOK"
method = "beginTransaction"
base_url = "test.invalid"
request_pb = datastore_pb2.BeginTransactionRequest(project_id=project)
response_pb = datastore_pb2.BeginTransactionResponse(transaction=b"7830rmc")
patch = mock.patch(
"google.cloud.datastore._http._request",
return_value=response_pb.SerializeToString(),
)
with patch as mock_request:
result = self._call_fut(
http,
project,
method,
base_url,
request_pb,
datastore_pb2.BeginTransactionResponse,
)
self.assertEqual(result, response_pb)
mock_request.assert_called_once_with(
http, project, method, request_pb.SerializeToString(), base_url
)
class TestHTTPDatastoreAPI(unittest.TestCase):
@staticmethod
def _get_target_class():
from google.cloud.datastore._http import HTTPDatastoreAPI
return HTTPDatastoreAPI
def _make_one(self, *args, **kwargs):
return self._get_target_class()(*args, **kwargs)
@staticmethod
def _make_query_pb(kind):
from google.cloud.datastore_v1.proto import query_pb2
return query_pb2.Query(kind=[query_pb2.KindExpression(name=kind)])
def test_constructor(self):
client = object()
ds_api = self._make_one(client)
self.assertIs(ds_api.client, client)
def test_lookup_single_key_empty_response(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
key_pb = _make_key_pb(project)
rsp_pb = datastore_pb2.LookupResponse()
read_options = datastore_pb2.ReadOptions()
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.lookup(project, [key_pb], read_options=read_options)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "lookup")
self.assertEqual(len(response.found), 0)
self.assertEqual(len(response.missing), 0)
self.assertEqual(len(response.deferred), 0)
request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
self.assertEqual(list(request.keys), [key_pb])
self.assertEqual(request.read_options, read_options)
def test_lookup_single_key_empty_response_w_eventual(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
key_pb = _make_key_pb(project)
rsp_pb = datastore_pb2.LookupResponse()
read_options = datastore_pb2.ReadOptions(
read_consistency=datastore_pb2.ReadOptions.EVENTUAL
)
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.lookup(project, [key_pb], read_options=read_options)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "lookup")
self.assertEqual(len(response.found), 0)
self.assertEqual(len(response.missing), 0)
self.assertEqual(len(response.deferred), 0)
request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
self.assertEqual(list(request.keys), [key_pb])
self.assertEqual(request.read_options, read_options)
def test_lookup_single_key_empty_response_w_transaction(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
transaction = b"TRANSACTION"
key_pb = _make_key_pb(project)
rsp_pb = datastore_pb2.LookupResponse()
read_options = datastore_pb2.ReadOptions(transaction=transaction)
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.lookup(project, [key_pb], read_options=read_options)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "lookup")
self.assertEqual(len(response.found), 0)
self.assertEqual(len(response.missing), 0)
self.assertEqual(len(response.deferred), 0)
request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
self.assertEqual(list(request.keys), [key_pb])
self.assertEqual(request.read_options, read_options)
def test_lookup_single_key_nonempty_response(self):
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import entity_pb2
project = "PROJECT"
key_pb = _make_key_pb(project)
rsp_pb = datastore_pb2.LookupResponse()
entity = entity_pb2.Entity()
entity.key.CopyFrom(key_pb)
rsp_pb.found.add(entity=entity)
read_options = datastore_pb2.ReadOptions()
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.lookup(project, [key_pb], read_options=read_options)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "lookup")
self.assertEqual(len(response.found), 1)
self.assertEqual(len(response.missing), 0)
self.assertEqual(len(response.deferred), 0)
found = response.found[0].entity
self.assertEqual(found.key.path[0].kind, "Kind")
self.assertEqual(found.key.path[0].id, 1234)
request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
self.assertEqual(list(request.keys), [key_pb])
self.assertEqual(request.read_options, read_options)
def test_lookup_multiple_keys_empty_response(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
key_pb1 = _make_key_pb(project)
key_pb2 = _make_key_pb(project, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
read_options = datastore_pb2.ReadOptions()
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "lookup")
self.assertEqual(len(response.found), 0)
self.assertEqual(len(response.missing), 0)
self.assertEqual(len(response.deferred), 0)
request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
self.assertEqual(list(request.keys), [key_pb1, key_pb2])
self.assertEqual(request.read_options, read_options)
def test_lookup_multiple_keys_w_missing(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
key_pb1 = _make_key_pb(project)
key_pb2 = _make_key_pb(project, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
er_1 = rsp_pb.missing.add()
er_1.entity.key.CopyFrom(key_pb1)
er_2 = rsp_pb.missing.add()
er_2.entity.key.CopyFrom(key_pb2)
read_options = datastore_pb2.ReadOptions()
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "lookup")
self.assertEqual(len(response.found), 0)
self.assertEqual(len(response.deferred), 0)
missing_keys = [result.entity.key for result in response.missing]
self.assertEqual(missing_keys, [key_pb1, key_pb2])
request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
self.assertEqual(list(request.keys), [key_pb1, key_pb2])
self.assertEqual(request.read_options, read_options)
def test_lookup_multiple_keys_w_deferred(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
key_pb1 = _make_key_pb(project)
key_pb2 = _make_key_pb(project, id_=2345)
rsp_pb = datastore_pb2.LookupResponse()
rsp_pb.deferred.add().CopyFrom(key_pb1)
rsp_pb.deferred.add().CopyFrom(key_pb2)
read_options = datastore_pb2.ReadOptions()
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.lookup(project, [key_pb1, key_pb2], read_options=read_options)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "lookup")
self.assertEqual(len(response.found), 0)
self.assertEqual(len(response.missing), 0)
self.assertEqual(list(response.deferred), [key_pb1, key_pb2])
request = _verify_protobuf_call(http, uri, datastore_pb2.LookupRequest())
self.assertEqual(list(request.keys), [key_pb1, key_pb2])
self.assertEqual(request.read_options, read_options)
def test_run_query_w_eventual_no_transaction(self):
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import entity_pb2
from google.cloud.datastore_v1.proto import query_pb2
project = "PROJECT"
kind = "Nonesuch"
cursor = b"\x00"
query_pb = self._make_query_pb(kind)
partition_id = entity_pb2.PartitionId(project_id=project)
read_options = datastore_pb2.ReadOptions(
read_consistency=datastore_pb2.ReadOptions.EVENTUAL
)
rsp_pb = datastore_pb2.RunQueryResponse(
batch=query_pb2.QueryResultBatch(
entity_result_type=query_pb2.EntityResult.FULL,
end_cursor=cursor,
more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS,
)
)
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.run_query(project, partition_id, read_options, query=query_pb)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "runQuery")
request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest())
self.assertEqual(request.partition_id, partition_id)
self.assertEqual(request.query, query_pb)
self.assertEqual(request.read_options, read_options)
def test_run_query_wo_eventual_w_transaction(self):
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import entity_pb2
from google.cloud.datastore_v1.proto import query_pb2
project = "PROJECT"
kind = "Nonesuch"
cursor = b"\x00"
transaction = b"TRANSACTION"
query_pb = self._make_query_pb(kind)
partition_id = entity_pb2.PartitionId(project_id=project)
read_options = datastore_pb2.ReadOptions(transaction=transaction)
rsp_pb = datastore_pb2.RunQueryResponse(
batch=query_pb2.QueryResultBatch(
entity_result_type=query_pb2.EntityResult.FULL,
end_cursor=cursor,
more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS,
)
)
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.run_query(project, partition_id, read_options, query=query_pb)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "runQuery")
request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest())
self.assertEqual(request.partition_id, partition_id)
self.assertEqual(request.query, query_pb)
self.assertEqual(request.read_options, read_options)
def test_run_query_wo_namespace_empty_result(self):
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import entity_pb2
from google.cloud.datastore_v1.proto import query_pb2
project = "PROJECT"
kind = "Nonesuch"
cursor = b"\x00"
query_pb = self._make_query_pb(kind)
partition_id = entity_pb2.PartitionId(project_id=project)
read_options = datastore_pb2.ReadOptions()
rsp_pb = datastore_pb2.RunQueryResponse(
batch=query_pb2.QueryResultBatch(
entity_result_type=query_pb2.EntityResult.FULL,
end_cursor=cursor,
more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS,
)
)
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.run_query(project, partition_id, read_options, query=query_pb)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "runQuery")
request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest())
self.assertEqual(request.partition_id, partition_id)
self.assertEqual(request.query, query_pb)
self.assertEqual(request.read_options, read_options)
def test_run_query_w_namespace_nonempty_result(self):
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore_v1.proto import entity_pb2
from google.cloud.datastore_v1.proto import query_pb2
project = "PROJECT"
kind = "Kind"
namespace = "NS"
query_pb = self._make_query_pb(kind)
partition_id = entity_pb2.PartitionId(
project_id=project, namespace_id=namespace
)
read_options = datastore_pb2.ReadOptions()
rsp_pb = datastore_pb2.RunQueryResponse(
batch=query_pb2.QueryResultBatch(
entity_result_type=query_pb2.EntityResult.FULL,
entity_results=[query_pb2.EntityResult(entity=entity_pb2.Entity())],
more_results=query_pb2.QueryResultBatch.NO_MORE_RESULTS,
)
)
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.run_query(project, partition_id, read_options, query=query_pb)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "runQuery")
request = _verify_protobuf_call(http, uri, datastore_pb2.RunQueryRequest())
self.assertEqual(request.partition_id, partition_id)
self.assertEqual(request.query, query_pb)
def test_begin_transaction(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
transaction = b"TRANSACTION"
rsp_pb = datastore_pb2.BeginTransactionResponse()
rsp_pb.transaction = transaction
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.begin_transaction(project)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "beginTransaction")
request = _verify_protobuf_call(
http, uri, datastore_pb2.BeginTransactionRequest()
)
# The RPC-over-HTTP request does not set the project in the request.
self.assertEqual(request.project_id, u"")
def test_commit_wo_transaction(self):
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore.helpers import _new_value_pb
project = "PROJECT"
key_pb = _make_key_pb(project)
rsp_pb = datastore_pb2.CommitResponse()
req_pb = datastore_pb2.CommitRequest()
mutation = req_pb.mutations.add()
insert = mutation.upsert
insert.key.CopyFrom(key_pb)
value_pb = _new_value_pb(insert, "foo")
value_pb.string_value = u"Foo"
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
rq_class = datastore_pb2.CommitRequest
ds_api = self._make_one(client)
mode = rq_class.NON_TRANSACTIONAL
result = ds_api.commit(project, mode, [mutation])
# Check the result and verify the callers.
self.assertEqual(result, rsp_pb)
uri = _build_expected_url(client._base_url, project, "commit")
request = _verify_protobuf_call(http, uri, rq_class())
self.assertEqual(request.transaction, b"")
self.assertEqual(list(request.mutations), [mutation])
self.assertEqual(request.mode, rq_class.NON_TRANSACTIONAL)
def test_commit_w_transaction(self):
from google.cloud.datastore_v1.proto import datastore_pb2
from google.cloud.datastore.helpers import _new_value_pb
project = "PROJECT"
key_pb = _make_key_pb(project)
rsp_pb = datastore_pb2.CommitResponse()
req_pb = datastore_pb2.CommitRequest()
mutation = req_pb.mutations.add()
insert = mutation.upsert
insert.key.CopyFrom(key_pb)
value_pb = _new_value_pb(insert, "foo")
value_pb.string_value = u"Foo"
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
rq_class = datastore_pb2.CommitRequest
ds_api = self._make_one(client)
mode = rq_class.TRANSACTIONAL
result = ds_api.commit(project, mode, [mutation], transaction=b"xact")
# Check the result and verify the callers.
self.assertEqual(result, rsp_pb)
uri = _build_expected_url(client._base_url, project, "commit")
request = _verify_protobuf_call(http, uri, rq_class())
self.assertEqual(request.transaction, b"xact")
self.assertEqual(list(request.mutations), [mutation])
self.assertEqual(request.mode, rq_class.TRANSACTIONAL)
def test_rollback_ok(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
transaction = b"xact"
rsp_pb = datastore_pb2.RollbackResponse()
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.rollback(project, transaction)
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "rollback")
request = _verify_protobuf_call(http, uri, datastore_pb2.RollbackRequest())
self.assertEqual(request.transaction, transaction)
def test_allocate_ids_empty(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
rsp_pb = datastore_pb2.AllocateIdsResponse()
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.allocate_ids(project, [])
# Check the result and verify the callers.
self.assertEqual(response, rsp_pb)
self.assertEqual(list(response.keys), [])
uri = _build_expected_url(client._base_url, project, "allocateIds")
request = _verify_protobuf_call(http, uri, datastore_pb2.AllocateIdsRequest())
self.assertEqual(list(request.keys), [])
def test_allocate_ids_non_empty(self):
from google.cloud.datastore_v1.proto import datastore_pb2
project = "PROJECT"
before_key_pbs = [
_make_key_pb(project, id_=None),
_make_key_pb(project, id_=None),
]
after_key_pbs = [_make_key_pb(project), _make_key_pb(project, id_=2345)]
rsp_pb = datastore_pb2.AllocateIdsResponse()
rsp_pb.keys.add().CopyFrom(after_key_pbs[0])
rsp_pb.keys.add().CopyFrom(after_key_pbs[1])
# Create mock HTTP and client with response.
http = _make_requests_session(
[_make_response(content=rsp_pb.SerializeToString())]
)
client = mock.Mock(
_http=http, _base_url="test.invalid", spec=["_http", "_base_url"]
)
# Make request.
ds_api = self._make_one(client)
response = ds_api.allocate_ids(project, before_key_pbs)
# Check the result and verify the callers.
self.assertEqual(list(response.keys), after_key_pbs)
self.assertEqual(response, rsp_pb)
uri = _build_expected_url(client._base_url, project, "allocateIds")
request = _verify_protobuf_call(http, uri, datastore_pb2.AllocateIdsRequest())
self.assertEqual(len(request.keys), len(before_key_pbs))
for key_before, key_after in zip(before_key_pbs, request.keys):
self.assertEqual(key_before, key_after)
def _make_response(status=http_client.OK, content=b"", headers={}):
response = requests.Response()
response.status_code = status
response._content = content
response.headers = headers
response.request = requests.Request()
return response
def _make_requests_session(responses):
session = mock.create_autospec(requests.Session, instance=True)
session.request.side_effect = responses
return session
def _build_expected_url(api_base_url, project, method):
from google.cloud.datastore._http import API_VERSION
return "/".join([api_base_url, API_VERSION, "projects", project + ":" + method])
def _make_key_pb(project, id_=1234):
from google.cloud.datastore.key import Key
path_args = ("Kind",)
if id_ is not None:
path_args += (id_,)
return Key(*path_args, project=project).to_protobuf()
def _verify_protobuf_call(http, expected_url, pb):
from google.cloud import _http as connection_module
from google.cloud.datastore._http import _CLIENT_INFO
expected_headers = {
"Content-Type": "application/x-protobuf",
"User-Agent": connection_module.DEFAULT_USER_AGENT,
connection_module.CLIENT_INFO_HEADER: _CLIENT_INFO,
}
http.request.assert_called_once_with(
method="POST", url=expected_url, headers=expected_headers, data=mock.ANY
)
data = http.request.mock_calls[0][2]["data"]
pb.ParseFromString(data)
return pb
|
py | b402ebc66e50f9540f200322bd8e5e4aa0089300 | from typing import (
TYPE_CHECKING,
Any,
Callable,
Dict,
)
from eth_utils.curried import (
apply_formatter_at_index,
apply_formatter_if,
apply_formatters_to_dict,
is_null,
is_string,
)
from eth_utils.toolz import (
complement,
compose,
curry,
dissoc,
)
from hexbytes import (
HexBytes,
)
from web3._utils.formatters import (
hex_to_integer,
)
from web3._utils.rpc_abi import (
RPC,
)
from web3.exceptions import (
ExtraDataLengthError,
ValidationError,
)
from web3.middleware.formatting import (
async_construct_web3_formatting_middleware,
construct_web3_formatting_middleware,
)
from web3.types import (
AsyncMiddleware,
Formatters,
FormattersDict,
RPCEndpoint,
TxParams,
)
if TYPE_CHECKING:
from web3 import Web3 # noqa: F401
MAX_EXTRADATA_LENGTH = 32
is_not_null = complement(is_null)
to_integer_if_hex = apply_formatter_if(is_string, hex_to_integer)
@curry
def _validate_chain_id(web3_chain_id: int, chain_id: int) -> int:
chain_id_int = to_integer_if_hex(chain_id)
if chain_id_int == web3_chain_id:
return chain_id
else:
raise ValidationError(
f"The transaction declared chain ID {chain_id_int!r}, "
f"but the connected node is on {web3_chain_id!r}"
)
def _check_extradata_length(val: Any) -> Any:
if not isinstance(val, (str, int, bytes)):
return val
result = HexBytes(val)
if len(result) > MAX_EXTRADATA_LENGTH:
raise ExtraDataLengthError(
f"The field extraData is {len(result)} bytes, but should be "
f"{MAX_EXTRADATA_LENGTH}. It is quite likely that you are "
"connected to a POA chain. Refer to "
"http://web3py.readthedocs.io/en/stable/middleware.html#geth-style-proof-of-authority "
f"for more details. The full extraData is: {result!r}"
)
return val
def _transaction_normalizer(transaction: TxParams) -> TxParams:
return dissoc(transaction, "chainId")
def _transaction_param_validator(web3_chain_id: int) -> Callable[..., Any]:
transactions_params_validators = {
"chainId": apply_formatter_if(
# Bypass `validate_chain_id` if chainId can't be determined
lambda _: is_not_null(web3_chain_id),
_validate_chain_id(web3_chain_id),
),
}
return apply_formatter_at_index(
apply_formatters_to_dict(transactions_params_validators), 0
)
BLOCK_VALIDATORS = {
"extraData": _check_extradata_length,
}
block_validator = apply_formatter_if(
is_not_null, apply_formatters_to_dict(BLOCK_VALIDATORS)
)
METHODS_TO_VALIDATE = [RPC.eth_sendTransaction, RPC.eth_estimateGas, RPC.eth_call]
def _chain_id_validator(web3_chain_id: int) -> Callable[..., Any]:
return compose(
apply_formatter_at_index(_transaction_normalizer, 0),
_transaction_param_validator(web3_chain_id),
)
def _build_formatters_dict(
request_formatters: Dict[RPCEndpoint, Any]
) -> FormattersDict:
return dict(
request_formatters=request_formatters,
result_formatters={
RPC.eth_getBlockByHash: block_validator,
RPC.eth_getBlockByNumber: block_validator,
},
)
# -- sync -- #
def build_method_validators(w3: "Web3", method: RPCEndpoint) -> FormattersDict:
request_formatters = {}
if RPCEndpoint(method) in METHODS_TO_VALIDATE:
w3_chain_id = w3.eth.chain_id
for method in METHODS_TO_VALIDATE:
request_formatters[method] = _chain_id_validator(w3_chain_id)
return _build_formatters_dict(request_formatters)
validation_middleware = construct_web3_formatting_middleware(build_method_validators)
# -- async --- #
async def async_build_method_validators(
async_w3: "Web3", method: RPCEndpoint
) -> FormattersDict:
request_formatters: Formatters = {}
if RPCEndpoint(method) in METHODS_TO_VALIDATE:
w3_chain_id = await async_w3.eth.chain_id # type: ignore
for method in METHODS_TO_VALIDATE:
request_formatters[method] = _chain_id_validator(w3_chain_id)
return _build_formatters_dict(request_formatters)
async def async_validation_middleware(
make_request: Callable[[RPCEndpoint, Any], Any], w3: "Web3"
) -> AsyncMiddleware:
middleware = await async_construct_web3_formatting_middleware(
async_build_method_validators
)
return await middleware(make_request, w3)
|
py | b402ed555807170683e926f7a5d108c0cc4964df | #
# CAMP
#
# Copyright (C) 2017 -- 2019 SINTEF Digital
# All rights reserved.
#
# This software may be modified and distributed under the terms
# of the MIT license. See the LICENSE file for details.
#
from camp.entities.model import Configuration, Instance
from camp.util import redirect_stderr_to
from ozepy import load_all_classes, DefineObject, ObjectVar, \
get_all_meta_facts, get_all_config_facts, cast_all_objects, \
generate_config_constraints, generate_meta_constraints, start_over, \
ObjectConst
from pkgutil import get_data
from yaml import load as load_yaml
from z3 import Optimize, sat
class Z3Problem(object):
@staticmethod
def from_model(model):
start_over()
context = Context()
context.load_metamodel()
context.load_model(model)
solver = Optimize()
generate_meta_constraints()
generate_config_constraints()
solver.add(*get_all_meta_facts())
solver.add(*get_all_config_facts())
context.declare(INTEGRITY_VARIABLES)
context.declare_helper_functions()
for each_constraint in INTEGRITY_CONSTRAINTS:
solver.add(context.evaluate(each_constraint))
for each_running_service in model.goals.services:
constraint = RUNNING_SERVICE.format(each_running_service.name)
solver.add(context.evaluate(constraint))
for each_constraint in model.constraints:
solver.add(context.evaluate(each_constraint))
for each_constraint in context.value_constraints:
solver.add(context.evaluate(each_constraint))
#print solver.sexpr()
return Z3Problem(model, context, solver)
def __init__(self, model, context, solver):
self._model = model
self._context = context
self._solver = solver
@redirect_stderr_to("z3_errors.log")
def all_solutions(self):
self._solver.push()
while self.has_solution():
yield self._solve()
@redirect_stderr_to("z3_errors.log")
def coverage(self):
self._solver.push()
while self.has_solution():
yield self._cover()
def has_solution(self):
return self._solver.check() == sat
def _cover(self):
z3_solution = cast_all_objects(self._solver.model())
#import pprint; pprint.pprint(z3_solution)
self._context.mark_as_covered(z3_solution)
self._solver.pop()
self._solver.push()
self._solver.add(self._context.evaluate(self._as_constraint(z3_solution)))
self._solver.push()
self._solver.add(self._context.coverage_constraint())
self._solver.maximize(self._context.coverage_gain())
return self._extract_from(z3_solution)
def _solve(self):
z3_solution = cast_all_objects(self._solver.model())
#import pprint; pprint.pprint(z3_solution)
self._solver.add(self._context.evaluate(self._as_constraint(z3_solution)))
return self._extract_from(z3_solution)
@staticmethod
def _as_constraint(z3_solution):
clauses = []
for key, item in z3_solution.items():
if "use_feature" in item:
if item["use_feature"] is not None:
clauses.append("%s.use_feature != %s" % \
(key, item["use_feature"]))
if item["partners"]:
for each_partner in item["partners"]:
clauses.append("%s.endpoint != %s" %
(each_partner, z3_solution[each_partner]["endpoint"]))
if item["configuration"]:
for each_value in item["configuration"]:
clauses.append("Not(%s.value == %s)" %\
(each_value, z3_solution[each_value]["value"]))
return "Or(" + ",".join(clauses) + ")"
def _extract_from(self, z3_solution):
instances = []
for _, item in z3_solution.items():
if "definition" in item:
component = self._model.resolve(item["definition"])
instances.append(Instance(item["name"], component))
result = Configuration(self._model, instances)
for _, item in z3_solution.items():
if "definition" in item:
instance = result.resolve(item["name"])
if "use_feature" in item and item["use_feature"]:
provider = result.resolve(item["use_feature"])
instance.feature_provider = provider
if "partners" in item:
providers = [result.resolve(z3_solution[each]["endpoint"]) \
for each in item["partners"]]
instance.service_providers = providers
if "configuration" in item:
configuration = []
for each_value in item["configuration"]:
variable_name = z3_solution[each_value]["variable"]
variable = None
for any_variable in instance.definition.variables:
if variable_name.endswith(any_variable.name):
variable = any_variable
break
else:
raise KeyError("Component '%s' has no variable named '%s'" %
(instance.definition.name,
variable_name))
value = variable.value_at(z3_solution[each_value]["value"])
configuration.append((variable, value))
instance.configuration = configuration
return result
class Context(object):
def __init__(self):
self._definitions = {}
self._definitions[self.COVERED_VALUES] = []
self._definitions[self.COVERED_COMPONENTS] = []
exec("from ozepy import Not, Implies, Or, And", self._definitions)
self._value_constraints = []
COVERED_VALUES = "covered_values"
COVERED_COMPONENTS = "covered_components"
@property
def value_constraints(self):
return self._value_constraints
def evaluate(self, constraint):
#print constraint.strip()
return eval(constraint.strip(), self._definitions)
def declare(self, declarations):
for type_name, variables in declarations:
for variable_name in variables:
self.define(variable_name, ObjectVar(self.find(type_name),
variable_name))
def declare_helper_functions(self):
def variable_helper(component_name, variable_name):
variable_name = self.qualified_name(component_name, variable_name)
variable_object = self.find(variable_name)
return variable_object
self.define("variable", variable_helper)
def load_metamodel(self):
data = get_data('camp', 'data/metamodel.yml')
metamodel = load_yaml(data)
metaclasses = load_all_classes(metamodel)
for each in metaclasses:
self._definitions[each.name] = each
def load_model(self, model):
self._define_all_services(model)
self._define_all_features(model)
self._define_all_components(model)
def _define_all_services(self, model):
for each_service in model.services:
z3_service = DefineObject(each_service.name,
self.find("Service"), suspended=True)
self.define(each_service.name, z3_service)
def _define_all_features(self, model):
for each_feature in model.features:
z3_feature = DefineObject(each_feature.name,
self.find("Feature"))
self.define(each_feature.name, z3_feature)
def _define_all_components(self, model):
for each_component in model.components:
z3_component = DefineObject(each_component.name,
self.find("Component"))
self.define(each_component.name, z3_component)
self._define_all_variables(each_component)
provide_services = [self.find(each.name) \
for each in each_component.provided_services]
z3_component.force_value("provide_services", provide_services)
require_services = [self.find(each.name) \
for each in each_component.required_services]
z3_component.force_value("require_services", require_services)
provide_features = [self.find(each.name) \
for each in each_component.provided_features]
z3_component.force_value("provide_features", provide_features)
require_features = [self.find(each.name) \
for each in each_component.required_features]
z3_component.force_value("require_features", require_features)
settings = [self.find(self.qualified_name(each_component.name,
each.name)) \
for each in each_component.variables]
z3_component.force_value("settings", settings)
self._instantiate(each_component, 1)
def _define_all_variables(self, component):
for each_variable in component.variables:
qualified_name = self.qualified_name(component.name,
each_variable.name)
z3_variable = DefineObject(qualified_name,
self.find("Variable"))
self.define(qualified_name, z3_variable)
@staticmethod
def qualified_name(*parts):
return "_".join(parts)
def _instantiate(self, component, count=1):
for index in range(count):
instance_name = component.name.lower() + "_%d" % index
z3_instance = DefineObject(instance_name,
self.find("CInstance"),
suspended=True)
self.define(instance_name, z3_instance)
z3_instance.force_value("definition", self.find(component.name))
# define partners
partners = []
for each_required_service in component.required_services:
partner_name = self.qualified_name(instance_name,
each_required_service.name)
z3_partner = DefineObject(partner_name,
self.find("Partner"),
suspended=True)
self.define(partner_name, z3_partner)
partners.append(z3_partner)
z3_partner.force_value("service",
self.find(each_required_service.name))
z3_instance.force_value("partners", partners)
values = []
for each_variable in component.variables:
qualified_variable_name = "%s_%s" % (component.name, each_variable.name)
value_name = "%s_%s" % (instance_name, each_variable.name)
z3_value = DefineObject(value_name,
self.find("Value"),
suspended=True)
self.define(value_name, z3_value)
if each_variable.domain:
if each_variable.value_type != "Integer":
self._value_constraints.append(
"And([%s.value >= 0, %s.value < %d])" % (value_name,
value_name,
len(each_variable.domain)))
else:
clauses = ",".join("%s.value == %d" %
(value_name, v) for v in each_variable.domain)
self._value_constraints.append("Or([" + clauses + "])")
z3_value.force_value("variable", self.find(qualified_variable_name))
values.append(z3_value)
z3_instance.force_value("configuration", values)
def define(self, key, value):
if key in self._definitions:
raise AssertionError("'%s' has already been defined!" % key)
self._definitions[key] = value
def find(self, identifier):
return self._definitions[identifier]
def __contains__(self, identifier):
return identifier in self._definitions
@property
def covered_components(self):
return self.find(self.COVERED_COMPONENTS)
@property
def covered_values(self):
return self.find(self.COVERED_VALUES)
def mark_as_covered(self, z3_solution):
for _, item in z3_solution.items():
if "definition" in item:
definition = self.find(item["definition"])
if not definition in self.covered_components:
self.covered_components.append(definition)
if "configuration" in item:
for each_value in item["configuration"]:
value = z3_solution[each_value]
if not value in self.covered_values:
self.covered_values.append((value["variable"], value["value"]))
def coverage_constraint(self):
template = ("Or([CInstance.exists(ci, ci.configuration.exists(val, And([%s]))),"
"CInstance.exists(ci, And([%s]))])")
values = ", ".join("Implies(val.variable == %s, val.value != %d)" % (variable, value) \
for variable, value in self.covered_values)
components = ", ".join("ci.definition != %s" % each \
for each in self.covered_components)
constraint = template % (values, components)
return self.evaluate(constraint)
def coverage_gain(self):
constraint = "0"
if self.covered_values:
constraint = "CInstance.filter(ci, ci.configuration.exists(val, And([%s]))).count()"
values = ", ".join("Implies(val.variable != %s, val.value != %d)" % (variable, value) \
for variable, value in self.covered_values)
constraint = constraint % values
constraint += " + CInstance.filter(ci, And([%s])).count()"
components = ", ".join("ci.definition != %s" % each for each in self.covered_components)
constraint = constraint % components
return self.evaluate(constraint)
INTEGRITY_VARIABLES = [
("CInstance", ["ci", "ci1", "ci2", "ci3", "ci4", "ci5", "spi"]),
("Feature", ["fr", "fp", "f1", "f2", "f3"]),
("Partner", ["partner"]),
("Service", ["service", "sr", "sp"]),
("Variable", ["var"]),
("Value", ["val", "val1", "val2"]),
("Component", ["cp"])
]
INTEGRITY_CONSTRAINTS = [
# There must be at least one instance
"""
CInstance.all_instances().count() > 0
""",
# -----
# DEFINITION of CInstance::stack
# No feature provider, no stack
"""
CInstance.forall(ci1,
Implies(
ci1.use_feature.undefined(),
ci1.stack.count() == 0))
""",
# The feature provider must be in the stack
"""
CInstance.forall(ci1,
Implies(
Not(ci1.use_feature.undefined()),
ci1.stack.exists(ci2, ci2 == ci1.use_feature)))
""",
# Stack Correctness: Any element in the stack is either the
# underlying feature provider or somewhere further down the stack
"""
CInstance.forall(ci1,
ci1.stack.forall(ci2,
Or(ci1.use_feature == ci2,
ci1.use_feature.stack.exists(ci3, ci3 == ci2))))
""",
# Stack Completness: Every element in my stack is also in the stack of
# the element above me in the stack
"""
CInstance.forall(ci1,
CInstance.forall(ci2,
Implies(
ci2.use_feature == ci1,
And(
ci2.stack.exists(ci3, ci3 == ci1),
ci1.stack.forall(ci4,
ci2.stack.exists(ci5, ci5 == ci4))))))
""",
# No cycle in the deployment structure
"""
CInstance.forall(ci1,
Not(ci1.stack.exists(ci2, ci2 == ci1)))
""",
# Service bindings
# An instance cannot use its own services
"""
CInstance.forall(ci, Not(ci.partners.exists(
partner, partner.endpoint == ci)))
""",
# STACK CONSTRUCTION THROUGH FEATURES
# Instances that do not require features cannot have a feature_provider
"""
CInstance.forall(ci,
Implies(
ci.definition.require_features.count() == 0,
ci.use_feature.undefined()))
""",
# Instances that do require features must have one feature_provider that
# provides all the required features
"""
CInstance.forall(ci1,
ci1.definition.require_features.forall(f1,
CInstance.exists(ci2,
And(
ci2 == ci1.use_feature,
Or(
ci2.definition.provide_features.exists(f2, f2 == f1),
ci2.stack.exists(ci3,
ci3.definition.provide_features.exists(f3, f3 == f1)))))))
""",
# There shall not be two components providing the same feature in a
# given stack (See Issue 75)
"""
CInstance.forall(ci1,
ci1.definition.provide_features.forall(f1,
Not(
ci1.stack.exists(ci2,
ci2.definition.provide_features.exists(f2, f2 == f1)))))
""",
# All partner shall connect to an endpoint that provides the requested
# service
"""
Partner.forall(partner,
partner.endpoint.definition.provide_services.exists(service,
service == partner.service))
""",
# Instances that do not require services cannot have any
# service provider
"""
CInstance.forall(ci, Implies(
ci["definition"]["require_services"].count() == 0,
ci["partners"].count() == 0))
""",
# Only one pending service
"""
CInstance.filter(ci1,
And([ci1.definition.provide_services.count() > 0,
CInstance.forall(ci2, ci2.partners.forall(partner,
partner.endpoint != ci1))])).count() == 1
""",
# No pending instances
# """
# CInstance.forall(ci1,
# Or([
# Not(ci1.use_feature.undefined()),
# ci1.partners.count() > 0,
# CInstance.exists(ci2,
# Or([ci2.use_feature == ci1,
# ci2.partners.exists(partner, partner.endpoint == ci1)]))]))
# """
]
RUNNING_SERVICE = """CInstance.filter(ci, ci["definition"].provide_services.exists
( sp, sp == {})).count() == 1"""
|
py | b402ee0d26492f2a156349dbbea23012e84197a9 | #!/usr/bin/env python
#
# Copyright 2018 Espressif Systems (Shanghai) PTE LTD
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import argparse
import os
import re
import sys
try:
import pkg_resources
except Exception:
print('pkg_resources cannot be imported probably because the pip package is not installed and/or using a '
'legacy Python interpreter. Please refer to the Get Started section of the ESP-IDF Programming Guide for '
'setting up the required packages.')
sys.exit(1)
def escape_backslash(path):
if sys.platform == "win32":
# escaped backslashes are necessary in order to be able to copy-paste the printed path
return path.replace("\\", "\\\\")
else:
return path
if __name__ == "__main__":
idf_path = os.getenv("IDF_PATH")
default_requirements_path = os.path.join(idf_path, 'requirements.txt')
parser = argparse.ArgumentParser(description='ESP-IDF Python package dependency checker')
parser.add_argument('--requirements', '-r',
help='Path to the requirements file',
default=default_requirements_path)
args = parser.parse_args()
not_satisfied = []
with open(args.requirements) as f:
for line in f:
line = line.strip()
# pkg_resources.require() cannot handle the full requirements file syntax so we need to make
# adjustments for options which we use.
if line.startswith('file://'):
line = os.path.basename(line)
if line.startswith('-e') and '#egg=' in line: # version control URLs, take the egg= part at the end only
line = re.search(r'#egg=([^\s]+)', line).group(1)
try:
pkg_resources.require(line)
except Exception:
not_satisfied.append(line)
if len(not_satisfied) > 0:
print('The following Python requirements are not satisfied:')
for requirement in not_satisfied:
print(requirement)
if os.path.realpath(args.requirements) != os.path.realpath(default_requirements_path):
# we're using this script to check non-default requirements.txt, so tell the user to run pip
print('Please check the documentation for the feature you are using, or run "%s -m pip install -r %s"' % (sys.executable, args.requirements))
elif os.environ.get('IDF_PYTHON_ENV_PATH'):
# We are running inside a private virtual environment under IDF_TOOLS_PATH,
# ask the user to run install.bat again.
if sys.platform == "win32" and not os.environ.get("MSYSTEM"):
install_script = 'install.bat'
else:
install_script = 'install.sh'
print('To install the missing packages, please run "%s"' % os.path.join(idf_path, install_script))
elif sys.platform == "win32" and os.environ.get("MSYSTEM", None) == "MINGW32" and "/mingw32/bin/python" in sys.executable:
print("The recommended way to install a packages is via \"pacman\". Please run \"pacman -Ss <package_name>\" for"
" searching the package database and if found then "
"\"pacman -S mingw-w64-i686-python{}-<package_name>\" for installing it.".format(sys.version_info[0],))
print("NOTE: You may need to run \"pacman -Syu\" if your package database is older and run twice if the "
"previous run updated \"pacman\" itself.")
print("Please read https://github.com/msys2/msys2/wiki/Using-packages for further information about using "
"\"pacman\"")
# Special case for MINGW32 Python, needs some packages
# via MSYS2 not via pip or system breaks...
for requirement in not_satisfied:
if requirement.startswith('cryptography'):
print("WARNING: The cryptography package have dependencies on system packages so please make sure "
"you run \"pacman -Syu\" followed by \"pacman -S mingw-w64-i686-python{}-cryptography\"."
"".format(sys.version_info[0],))
continue
elif requirement.startswith('setuptools'):
print("Please run the following command to install MSYS2's MINGW Python setuptools package:")
print("pacman -S mingw-w64-i686-python{}-setuptools".format(sys.version_info[0],))
continue
else:
print('Please follow the instructions found in the "Set up the tools" section of '
'ESP-IDF Getting Started Guide')
print('Diagnostic information:')
idf_python_env_path = os.environ.get('IDF_PYTHON_ENV_PATH')
print(' IDF_PYTHON_ENV_PATH: {}'.format(idf_python_env_path or '(not set)'))
print(' Python interpreter used: {}'.format(sys.executable))
if idf_python_env_path not in sys.executable:
print(' Warning: python interpreter not running from IDF_PYTHON_ENV_PATH')
print(' PATH: {}'.format(os.getenv('PATH')))
sys.exit(1)
print('Python requirements from {} are satisfied.'.format(args.requirements))
|
py | b402ee3115bbf775463bf738e1d05a5f9ed6318b | from extruct.w3cmicrodata import MicrodataExtractor
import scrapy
from scrapy.spiders import SitemapSpider
import datetime
class BioschemasSpider(SitemapSpider):
target_types = ['http://schema.org/Event']
name = 'https://tess.elixir-europe.org/events'
sitemap_urls = ['https://tess.elixir-europe.org/sitemaps/events.xml']
custom_settings = {
'ITEM_PIPELINES': {
'bioschemas_scraper.pipelines.ElasticPipeline': 100
}
}
def parse(self, response):
mde = MicrodataExtractor()
data = mde.extract(response.body)
for item in data:
if item['type'] in self.target_types:
record = {'indexed_date': datetime.date.today().isoformat(), 'url': response.url, 'body': item}
yield record
|
py | b402ee3b44b83dc7855c8bdba9b8c73aec9c9065 | # -*- coding: utf-8 -*-
"""
TencentBlueKing is pleased to support the open source community by making 蓝鲸智云-节点管理(BlueKing-BK-NODEMAN) available.
Copyright (C) 2017-2021 THL A29 Limited, a Tencent company. All rights reserved.
Licensed under the MIT License (the "License"); you may not use this file except in compliance with the License.
You may obtain a copy of the License at https://opensource.org/licenses/MIT
Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
specific language governing permissions and limitations under the License.
"""
|
py | b402ef3acd5d89f9ff117f4f7590ac6326ab69b4 | # Copyright 2019 The Feast Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from collections import OrderedDict
from typing import Dict
from typing import List, Optional
import pandas as pd
import pyarrow as pa
from feast.core.FeatureSet_pb2 import FeatureSet as FeatureSetProto
from feast.core.FeatureSet_pb2 import FeatureSetMeta as FeatureSetMetaProto
from feast.core.FeatureSet_pb2 import FeatureSetSpec as FeatureSetSpecProto
from feast.entity import Entity
from feast.feature import Feature, Field
from feast.loaders import yaml as feast_yaml
from feast.source import Source
from feast.type_map import DATETIME_COLUMN
from feast.type_map import pa_to_feast_value_type
from feast.type_map import python_type_to_feast_value_type
from google.protobuf import json_format
from google.protobuf.duration_pb2 import Duration
from google.protobuf.json_format import MessageToJson
from pandas.api.types import is_datetime64_ns_dtype
from pyarrow.lib import TimestampType
class FeatureSet:
"""
Represents a collection of features and associated metadata.
"""
def __init__(
self,
name: str,
features: List[Feature] = None,
entities: List[Entity] = None,
source: Source = None,
max_age: Optional[Duration] = None
):
self._name = name
self._fields = OrderedDict() # type: Dict[str, Field]
if features is not None:
self.features = features
if entities is not None:
self.entities = entities
if source is None:
self._source = None
else:
self._source = source
self._max_age = max_age
self._version = None
self._client = None
self._status = None
self._created_timestamp = None
def __eq__(self, other):
if not isinstance(other, FeatureSet):
return NotImplemented
for key in self.fields.keys():
if key not in other.fields.keys() or self.fields[key] != other.fields[key]:
return False
if self.name != other.name or self.max_age != other.max_age:
return False
return True
def __str__(self):
return str(MessageToJson(self.to_proto()))
def __repr__(self):
shortname = "" + self._name
if self._version:
shortname += ":" + str(self._version).strip()
return shortname
@property
def fields(self) -> Dict[str, Field]:
"""
Returns a dict of fields from this feature set
"""
return self._fields
@property
def features(self) -> List[Feature]:
"""
Returns a list of features from this feature set
"""
return [field for field in self._fields.values() if isinstance(field, Feature)]
@features.setter
def features(self, features: List[Feature]):
"""
Sets the active features within this feature set
Args:
features: List of feature objects
"""
for feature in features:
if not isinstance(feature, Feature):
raise Exception("object type is not a Feature: " + str(type(feature)))
for key in list(self._fields.keys()):
if isinstance(self._fields[key], Feature):
del self._fields[key]
if features is not None:
self._add_fields(features)
@property
def entities(self) -> List[Entity]:
"""
Returns list of entities from this feature set
"""
return [field for field in self._fields.values() if isinstance(field, Entity)]
@entities.setter
def entities(self, entities: List[Entity]):
"""
Sets the active entities within this feature set
Args:
entities: List of entities objects
"""
for entity in entities:
if not isinstance(entity, Entity):
raise Exception("object type is not na Entity: " + str(type(entity)))
for key in list(self._fields.keys()):
if isinstance(self._fields[key], Entity):
del self._fields[key]
if entities is not None:
self._add_fields(entities)
@property
def name(self):
"""
Returns the name of this feature set
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this feature set
"""
self._name = name
@property
def source(self):
"""
Returns the source of this feature set
"""
return self._source
@source.setter
def source(self, source: Source):
"""
Sets the source of this feature set
"""
self._source = source
@property
def version(self):
"""
Returns the version of this feature set
"""
return self._version
@version.setter
def version(self, version):
"""
Sets the version of this feature set
"""
self._version = version
@property
def max_age(self):
"""
Returns the maximum age of this feature set. This is the total maximum
amount of staleness that will be allowed during feature retrieval for
each specific feature row that is looked up.
"""
return self._max_age
@max_age.setter
def max_age(self, max_age):
"""
Set the maximum age for this feature set
"""
self._max_age = max_age
@property
def status(self):
"""
Returns the status of this feature set
"""
return self._status
@status.setter
def status(self, status):
"""
Sets the status of this feature set
"""
self._status = status
@property
def created_timestamp(self):
"""
Returns the created_timestamp of this feature set
"""
return self._created_timestamp
@created_timestamp.setter
def created_timestamp(self, created_timestamp):
"""
Sets the status of this feature set
"""
self._created_timestamp = created_timestamp
def add(self, resource):
"""
Adds a resource (Feature, Entity) to this Feature Set.
Does not register the updated Feature Set with Feast Core
Args:
resource: A resource can be either a Feature or an Entity object
"""
if resource.name in self._fields.keys():
raise ValueError(
'could not add field "'
+ resource.name
+ '" since it already exists in feature set "'
+ self._name
+ '"'
)
if issubclass(type(resource), Field):
return self._set_field(resource)
raise ValueError("Could not identify the resource being added")
def _set_field(self, field: Field):
self._fields[field.name] = field
return
def drop(self, name: str):
"""
Removes a Feature or Entity from a Feature Set. This does not apply
any changes to Feast Core until the apply() method is called.
Args:
name: Name of Feature or Entity to be removed
"""
if name not in self._fields:
raise ValueError("Could not find field " + name + ", no action taken")
if name in self._fields:
del self._fields[name]
return
def _add_fields(self, fields: List[Field]):
"""
Adds multiple Fields to a Feature Set
Args:
fields: List of Field (Feature or Entity) Objects
"""
for field in fields:
self.add(field)
def infer_fields_from_df(
self,
df: pd.DataFrame,
entities: Optional[List[Entity]] = None,
features: Optional[List[Feature]] = None,
replace_existing_features: bool = False,
replace_existing_entities: bool = False,
discard_unused_fields: bool = False,
rows_to_sample: int = 100,
):
"""
Adds fields (Features or Entities) to a feature set based on the schema
of a Datatframe. Only Pandas dataframes are supported. All columns are
detected as features, so setting at least one entity manually is
advised.
Args:
df: Pandas dataframe to read schema from
entities: List of entities that will be set manually and not
inferred. These will take precedence over any existing entities
or entities found in the dataframe.
features: List of features that will be set manually and not
inferred. These will take precedence over any existing feature
or features found in the dataframe.
replace_existing_features: If true, will replace
existing features in this feature set with features found in
dataframe. If false, will skip conflicting features.
replace_existing_entities: If true, will replace existing entities
in this feature set with features found in dataframe. If false,
will skip conflicting entities.
discard_unused_fields: Boolean flag. Setting this to True will
discard any existing fields that are not found in the dataset or
provided by the user
rows_to_sample: Number of rows to sample to infer types. All rows
must have consistent types, even values within list types must
be homogeneous
"""
if entities is None:
entities = list()
if features is None:
features = list()
# Validate whether the datetime column exists with the right name
if DATETIME_COLUMN not in df:
raise Exception("No column 'datetime'")
# Validate the data type for the datetime column
if not is_datetime64_ns_dtype(df.dtypes[DATETIME_COLUMN]):
raise Exception(
"Column 'datetime' does not have the correct type: datetime64[ns]"
)
# Create dictionary of fields that will not be inferred (manually set)
provided_fields = OrderedDict()
for field in entities + features:
if not isinstance(field, Field):
raise Exception(f"Invalid field object type provided {type(field)}")
if field.name not in provided_fields:
provided_fields[field.name] = field
else:
raise Exception(f"Duplicate field name detected {field.name}.")
new_fields = self._fields.copy()
output_log = ""
# Add in provided fields
for name, field in provided_fields.items():
if name in new_fields.keys():
upsert_message = "created"
else:
upsert_message = "updated (replacing an existing field)"
output_log += (
f"{type(field).__name__} {field.name}"
f"({field.dtype}) manually {upsert_message}.\n"
)
new_fields[name] = field
# Iterate over all of the columns and create features
for column in df.columns:
column = column.strip()
# Skip datetime column
if DATETIME_COLUMN in column:
continue
# Skip user provided fields
if column in provided_fields.keys():
continue
# Only overwrite conflicting fields if replacement is allowed
if column in new_fields:
if (
isinstance(self._fields[column], Feature)
and not replace_existing_features
):
continue
if (
isinstance(self._fields[column], Entity)
and not replace_existing_entities
):
continue
# Store this field as a feature
new_fields[column] = Feature(
name=column,
dtype=_infer_pd_column_type(column, df[column], rows_to_sample),
)
output_log += f"{type(new_fields[column]).__name__} {new_fields[column].name} ({new_fields[column].dtype}) added from dataframe.\n"
# Discard unused fields from feature set
if discard_unused_fields:
keys_to_remove = []
for key in new_fields.keys():
if not (key in df.columns or key in provided_fields.keys()):
output_log += f"{type(new_fields[key]).__name__} {new_fields[key].name} ({new_fields[key].dtype}) removed because it is unused.\n"
keys_to_remove.append(key)
for key in keys_to_remove:
del new_fields[key]
# Update feature set
self._fields = new_fields
print(output_log)
def infer_fields_from_pa(
self, table: pa.lib.Table,
entities: Optional[List[Entity]] = None,
features: Optional[List[Feature]] = None,
replace_existing_features: bool = False,
replace_existing_entities: bool = False,
discard_unused_fields: bool = False
) -> None:
"""
Adds fields (Features or Entities) to a feature set based on the schema
of a PyArrow table. Only PyArrow tables are supported. All columns are
detected as features, so setting at least one entity manually is
advised.
Args:
table (pyarrow.lib.Table):
PyArrow table to read schema from.
entities (Optional[List[Entity]]):
List of entities that will be set manually and not inferred.
These will take precedence over any existing entities or
entities found in the PyArrow table.
features (Optional[List[Feature]]):
List of features that will be set manually and not inferred.
These will take precedence over any existing feature or features
found in the PyArrow table.
replace_existing_features (bool):
Boolean flag. If true, will replace existing features in this
feature set with features found in dataframe. If false, will
skip conflicting features.
replace_existing_entities (bool):
Boolean flag. If true, will replace existing entities in this
feature set with features found in dataframe. If false, will
skip conflicting entities.
discard_unused_fields (bool):
Boolean flag. Setting this to True will discard any existing
fields that are not found in the dataset or provided by the
user.
Returns:
None:
None
"""
if entities is None:
entities = list()
if features is None:
features = list()
# Validate whether the datetime column exists with the right name
if DATETIME_COLUMN not in table.column_names:
raise Exception("No column 'datetime'")
# Validate the date type for the datetime column
if not isinstance(table.column(DATETIME_COLUMN).type, TimestampType):
raise Exception(
"Column 'datetime' does not have the correct type: datetime64[ms]"
)
# Create dictionary of fields that will not be inferred (manually set)
provided_fields = OrderedDict()
for field in entities + features:
if not isinstance(field, Field):
raise Exception(f"Invalid field object type provided {type(field)}")
if field.name not in provided_fields:
provided_fields[field.name] = field
else:
raise Exception(f"Duplicate field name detected {field.name}.")
new_fields = self._fields.copy()
output_log = ""
# Add in provided fields
for name, field in provided_fields.items():
if name in new_fields.keys():
upsert_message = "created"
else:
upsert_message = "updated (replacing an existing field)"
output_log += (
f"{type(field).__name__} {field.name}"
f"({field.dtype}) manually {upsert_message}.\n"
)
new_fields[name] = field
# Iterate over all of the column names and create features
for column in table.column_names:
column = column.strip()
# Skip datetime column
if DATETIME_COLUMN in column:
continue
# Skip user provided fields
if column in provided_fields.keys():
continue
# Only overwrite conflicting fields if replacement is allowed
if column in new_fields:
if (
isinstance(self._fields[column], Feature)
and not replace_existing_features
):
continue
if (
isinstance(self._fields[column], Entity)
and not replace_existing_entities
):
continue
# Store this fields as a feature
# TODO: (Minor) Change the parameter name from dtype to patype
new_fields[column] = Feature(
name=column,
dtype=self._infer_pa_column_type(table.column(column))
)
output_log += f"{type(new_fields[column]).__name__} {new_fields[column].name} ({new_fields[column].dtype}) added from PyArrow Table.\n"
# Discard unused fields from feature set
if discard_unused_fields:
keys_to_remove = []
for key in new_fields.keys():
if not (key in table.column_names or key in provided_fields.keys()):
output_log += f"{type(new_fields[key]).__name__} {new_fields[key].name} ({new_fields[key].dtype}) removed because it is unused.\n"
keys_to_remove.append(key)
for key in keys_to_remove:
del new_fields[key]
# Update feature set
self._fields = new_fields
print(output_log)
def _infer_pd_column_type(self, column, series, rows_to_sample):
dtype = None
sample_count = 0
# Loop over all rows for this column to infer types
for key, value in series.iteritems():
sample_count += 1
# Stop sampling at the row limit
if sample_count > rows_to_sample:
continue
# Infer the specific type for this row
current_dtype = python_type_to_feast_value_type(name=column, value=value)
# Make sure the type is consistent for column
if dtype:
if dtype != current_dtype:
raise ValueError(
f"Type mismatch detected in column {column}. Both "
f"the types {current_dtype} and {dtype} "
f"have been found."
)
else:
# Store dtype in field to type map if it isnt already
dtype = current_dtype
return dtype
def _infer_pa_column_type(self, column: pa.lib.ChunkedArray):
"""
Infers the PyArrow column type.
:param column: Column from a PyArrow table
:type column: pa.lib.ChunkedArray
:return:
:rtype:
"""
# Validates the column to ensure that value types are consistent
column.validate()
return pa_to_feast_value_type(column)
def _update_from_feature_set(self, feature_set):
"""
Deep replaces one feature set with another
Args:
feature_set: Feature set to use as a source of configuration
"""
self.name = feature_set.name
self.version = feature_set.version
self.source = feature_set.source
self.max_age = feature_set.max_age
self.features = feature_set.features
self.entities = feature_set.entities
self.source = feature_set.source
self.status = feature_set.status
self.created_timestamp = feature_set.created_timestamp
def get_kafka_source_brokers(self) -> str:
"""
Get the broker list for the source in this feature set
"""
if self.source and self.source.source_type is "Kafka":
return self.source.brokers
raise Exception("Source type could not be identified")
def get_kafka_source_topic(self) -> str:
"""
Get the topic that this feature set has been configured to use as source
"""
if self.source and self.source.source_type == "Kafka":
return self.source.topic
raise Exception("Source type could not be identified")
def is_valid(self):
"""
Validates the state of a feature set locally. Raises an exception
if feature set is invalid.
"""
if len(self.entities) == 0:
raise ValueError(f"No entities found in feature set {self.name}")
@classmethod
def from_yaml(cls, yml: str):
"""
Creates a feature set from a YAML string body or a file path
Args:
yml: Either a file path containing a yaml file or a YAML string
Returns:
Returns a FeatureSet object based on the YAML file
"""
return cls.from_dict(feast_yaml.yaml_loader(yml, load_single=True))
@classmethod
def from_dict(cls, fs_dict):
"""
Creates a feature set from a dict
Args:
fs_dict: A dict representation of a feature set
Returns:
Returns a FeatureSet object based on the feature set dict
"""
if ("kind" not in fs_dict) and (fs_dict["kind"].strip() != "feature_set"):
raise Exception(f"Resource kind is not a feature set {str(fs_dict)}")
feature_set_proto = json_format.ParseDict(
fs_dict, FeatureSetProto(), ignore_unknown_fields=True
)
return cls.from_proto(feature_set_proto)
@classmethod
def from_proto(cls, feature_set_proto: FeatureSetProto):
"""
Creates a feature set from a protobuf representation of a feature set
Args:
feature_set_proto: A protobuf representation of a feature set
Returns:
Returns a FeatureSet object based on the feature set protobuf
"""
feature_set = cls(
name=feature_set_proto.spec.name,
features=[
Feature.from_proto(feature)
for feature in feature_set_proto.spec.features
],
entities=[
Entity.from_proto(entity) for entity in feature_set_proto.spec.entities
],
max_age=feature_set_proto.spec.max_age,
source=(
None
if feature_set_proto.spec.source.type == 0
else Source.from_proto(feature_set_proto.spec.source)
)
)
feature_set._version = feature_set_proto.spec.version
feature_set._status = feature_set_proto.meta.status
feature_set._created_timestamp = feature_set_proto.meta.created_timestamp
return feature_set
def to_proto(self) -> FeatureSetProto:
"""
Converts a feature set object to its protobuf representation
Returns:
FeatureSetProto protobuf
"""
meta = FeatureSetMetaProto(
created_timestamp=self.created_timestamp, status=self.status
)
spec = FeatureSetSpecProto(
name=self.name,
version=self.version,
max_age=self.max_age,
source=self.source.to_proto() if self.source is not None else None,
features=[
field.to_proto()
for field in self._fields.values()
if type(field) == Feature
],
entities=[
field.to_proto()
for field in self._fields.values()
if type(field) == Entity
],
)
return FeatureSetProto(spec=spec, meta=meta)
def _infer_pd_column_type(column, series, rows_to_sample):
dtype = None
sample_count = 0
# Loop over all rows for this column to infer types
for key, value in series.iteritems():
sample_count += 1
# Stop sampling at the row limit
if sample_count > rows_to_sample:
continue
# Infer the specific type for this row
current_dtype = python_type_to_feast_value_type(name=column, value=value)
# Make sure the type is consistent for column
if dtype:
if dtype != current_dtype:
raise ValueError(
f"Type mismatch detected in column {column}. Both "
f"the types {current_dtype} and {dtype} "
f"have been found."
)
else:
# Store dtype in field to type map if it isnt already
dtype = current_dtype
return dtype
|
py | b402ef71c817432f6d781e787e9d4d366d379cb7 | import numpy as np
import matplotlib.pyplot as plt
import sys
plt.style.use('classic')
filename = sys.argv[1]
nx = int(sys.argv[2])
ny = int(sys.argv[3])
nz = int(sys.argv[4])
dx = 0.1
dy = 0.1
dz = 0.5
xmax = dx*((nx-1)/2)
ymax = dy*((ny-1)/2)
x, y, z, v = np.loadtxt(filename, unpack=True)
xnew = np.linspace(-xmax, xmax, num=nx)
ynew = np.linspace(-ymax, ymax, num=ny)
vnew = v.reshape(nz, ny, nx).transpose()
front = vnew[:, :, 0].transpose()
back = vnew[:, :, -1].transpose()
middle = vnew[:, :, ((nz-1)/2)].transpose()
fig, ax = plt.subplots()
cset1 = ax.pcolormesh(xnew, ynew, middle)
plt.xlabel('x [fm]')
plt.ylabel('y [fm]')
plt.title('transverse energy density')
cbar = plt.colorbar(cset1)
cbar.ax.set_ylabel('energy density [fm^-4]')
plt.gca().set_aspect('equal', adjustable='box')
plt.show()
|
py | b402f0e25c0f93a651c26a30a726c3987667ff1a | import pandas as pd
import exifread
import os
from pathlib import Path
from typing import List
from multiprocessing import Pool
from .clean import clean_exif_data
picture_globs = ['*.jpg', '*.jpeg', '*.png', '*.JPG', '*.JPEG', '*.PNG']
PROCESSES_DEFAULT = 5
def get_extension(filename):
filename, file_extension = os.path.splitext(filename)
return file_extension.lower()
def get_pictures(directory: Path):
pics = []
for glob in picture_globs:
pics.extend(directory.rglob(glob))
return pics
def get_exif(path):
with open(path, 'rb') as f:
return clean_exif_data(path, exifread.process_file(f))
def multiprocess_extract_exif(fnames: List[Path], processes: int):
with Pool(processes) as pool:
return pool.map(get_exif, fnames)
def get_panda_df(folder_names, processes=PROCESSES_DEFAULT):
pics_filenames = []
for folder in folder_names:
abs_path = Path(folder).resolve()
pics_filenames.extend(get_pictures(abs_path))
cleaned_data = multiprocess_extract_exif(pics_filenames, processes)
return pd.DataFrame(cleaned_data)
|
py | b402f1c81a575223c208d33bb6b4f0ef9b2f744a | import json
import logging
import unittest
import os
from ...commandModule import CommandModule
class TestReadingCorrectFromPOGIFiles(unittest.TestCase):
def setUp(self):
self.logger = logging.basicConfig(level=logging.DEBUG, )
self.pogiData = dict()
self.pigoFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "testJSONs", "testPigo.json")
self.pogiFile = os.path.join(os.path.dirname(os.path.realpath(__file__)), "..", "testJSONs", "testPogi.json")
self.commandModule = CommandModule(pigoFileDirectory=self.pigoFile, pogiFileDirectory=self.pogiFile)
def tearDown(self):
self.pogiData = dict()
def __value_instantiate(self, key, value):
with open(self.pogiFile, "w") as file:
temp = {key: value}
json.dump(temp, file, ensure_ascii=False, indent=4, sort_keys=True)
def test_pass_if_get_airspeed_equals_correct(self):
self.__value_instantiate("currentAirspeed", 1.23)
self.assertEqual(1.23, self.commandModule.get_current_airspeed())
def test_pass_if_get_is_landed_equals_correct(self):
self.__value_instantiate("isLanded", True)
self.assertEqual(True, self.commandModule.get_is_landed())
def test_pass_if_get_euler_camera_equals_correct(self):
euler_camera = {"roll": 1.023, "pitch": 123.1, "yaw": 9.12}
self.__value_instantiate("eulerAnglesOfCamera", euler_camera)
self.assertEqual(euler_camera, self.commandModule.get_euler_angles_of_camera())
def test_pass_if_get_euler_plane_equals_correct(self):
euler_plane = {"roll": 1.023, "pitch": 123.1, "yaw": 9.12}
self.__value_instantiate("eulerAnglesOfPlane", euler_plane)
self.assertEqual(euler_plane, self.commandModule.get_euler_angles_of_plane())
def test_pass_if_get_gps_equals_correct(self):
gps = {"latitude": 1.212, "longitude": 2.134, "altitude": 1.234}
self.__value_instantiate("gpsCoordinates", gps)
self.assertEquals(gps, self.commandModule.get_gps_coordinates())
|
py | b402f337e3e8f20da62a40a6d3a9ba97177cfab3 | from rx import Observable
from rx.observer import AbstractObserver
from rx.anonymousobservable import AnonymousObservable
from rx.internal import extensionmethod
@extensionmethod(Observable, alias="tap")
def do_action(self, on_next=None, on_error=None, on_completed=None,
observer=None):
"""Invokes an action for each element in the observable sequence and
invokes an action upon graceful or exceptional termination of the
observable sequence. This method can be used for debugging, logging,
etc. of query behavior by intercepting the message stream to run
arbitrary actions for messages on the pipeline.
1 - observable.do_action(observer)
2 - observable.do_action(on_next)
3 - observable.do_action(on_next, on_error)
4 - observable.do_action(on_next, on_error, on_completed)
observer -- [Optional] Observer, or ...
on_next -- [Optional] Action to invoke for each element in the
observable sequence.
on_error -- [Optional] Action to invoke upon exceptional termination
of the observable sequence.
on_completed -- [Optional] Action to invoke upon graceful termination
of the observable sequence.
Returns the source sequence with the side-effecting behavior applied.
"""
source = self
if isinstance(observer, AbstractObserver):
on_next = observer.on_next
on_error = observer.on_error
on_completed = observer.on_completed
elif isinstance(on_next, AbstractObserver):
on_error = on_next.on_error
on_completed = on_next.on_completed
on_next = on_next.on_next
def subscribe(observer):
def _on_next(x):
try:
on_next(x)
except Exception as e:
observer.on_error(e)
observer.on_next(x)
def _on_error(exception):
if not on_error:
observer.on_error(exception)
else:
try:
on_error(exception)
except Exception as e:
observer.on_error(e)
observer.on_error(exception)
def _on_completed():
if not on_completed:
observer.on_completed()
else:
try:
on_completed()
except Exception as e:
observer.on_error(e)
observer.on_completed()
return source.subscribe(_on_next, _on_error, _on_completed)
return AnonymousObservable(subscribe)
|
py | b402f352654833d5f19a88fd3e62d38c662fcd4e | import sys
import logging
import numpy as np
from nltk.tokenize import word_tokenize
import nltk
#nltk.download('punkt') # Resource punkt not found. Please use the NLTK Downloader to obtain the resource
FORMAT = '%(asctime)-15s %(levelname)-10s %(filename)-10s %(funcName)-15s %(message)s'
logging.basicConfig(format=FORMAT, level=logging.DEBUG)
def _get_stats(token_count, filename):
median = np.median(token_count)
mean = np.mean(token_count)
min_val = np.amin(token_count)
max_val = np.amax(token_count)
quantile_075 = np.quantile(token_count, 0.75)
quantile_09 = np.quantile(token_count, 0.9)
quantile_08 = np.quantile(token_count, 0.8)
logging.info('token_count_stats file={} median={}, mean={}, min_value={}, max_value={}, 0.75-quantil={}, 0.8-quantil={}, 0.9-quantil={} \n\n'
.format(filename, str(median), str(mean), str(min_val), str(max_val), str(quantile_075), str(quantile_08), str(quantile_09)))
return median, mean, min_val, max_val
def main():
"""
"""
token_count = []
# generate new tokens file, which contains tokens of any positive occurence
try:
docs_file = 'data/document_collection/collection.tsv'
query_file = 'data/training_data/triples.train.tsv'
logging.info('Obtaining token count of doc_texts from file "{}"'.format(docs_file))
count_line = 0
with open(docs_file, 'r') as doc_col_file:
for line in doc_col_file:
# e.g. 0 doc_text
line_components = line.rstrip('\n').split('\t')
doc_text = line_components[1]
doc_text_tokens = word_tokenize(doc_text)
len_doc_text_tokens = len(doc_text_tokens)
token_count.append(len_doc_text_tokens)
count_line += 1
if count_line % 100000 == 0:
logging.info('processed {} lines'.format(count_line))
_get_stats(token_count, docs_file)
logging.info('Obtaining token count of query_texts from file "{}"'.format(query_file))
token_count = []
count_line = 0
with open(query_file, 'r') as query_f:
for line in query_f:
# <query> <passage_1> <passage_2>
line_components = line.rstrip('\n').split('\t')
query_text = line_components[0]
query_text_tokens = word_tokenize(query_text)
len_query_text_tokens = len(query_text_tokens)
token_count.append(len_query_text_tokens)
count_line += 1
if count_line % 100000 == 0:
logging.info('processed {} lines'.format(count_line))
_get_stats(token_count, query_file)
except Exception as e:
logging.error('Failure occured ', exc_info=True)
exit()
if __name__ == '__main__':
main(); |
py | b402f3a1506d08169900518e64a27a6d8cd9b94b | #
from __future__ import print_function
import psutil
import sys
print('cpu_percent:\t', psutil.cpu_percent(interval=1), '%')
print('memory_used:\t', psutil.virtual_memory()[2], '%')
print('cpu_count:\t\t', psutil.cpu_count(logical=False), 'cores')
print('cpu_freq:\t\t', psutil.cpu_freq())
print('disk_usage C:\t', psutil.disk_usage('C:/')[3], '%')
print('disk_usage D:\t', psutil.disk_usage('D:/')[3], '%')
|
py | b402f3ebfe81152e01a96be3f5a192b4b04c303f | password="c41893a88b5b1cf29dc287327223ba12"
|
py | b402f436e300b268fc38c59b619a44715bd7f7e5 | """
Copyright (c) 2022 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import re
from nncf.tensorflow.graph.utils import get_original_name_and_instance_idx
def is_ignored(node_name, ignored_scopes):
original_name, _ = get_original_name_and_instance_idx(node_name)
return any(re.fullmatch(ignored.replace('{re}', ''), original_name) if ignored.startswith('{re}')
else ignored == original_name
for ignored in ignored_scopes)
|
py | b402f441cd3b5dc4eb97b0307f7ef2bd7236f452 | # coding=utf-8
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for RevBlock."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=g-direct-tensorflow-import
import tensorflow.compat.v1 as tf
from tf_slim.layers import layers
from tf_slim.layers import rev_block_lib
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.layers import convolutional
from tensorflow.python.layers import core as core_layers
from tensorflow.python.layers import normalization as normalization_layers
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import init_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import variable_scope
from tensorflow.python.ops import variables
from tensorflow.python.platform import test
def setUpModule():
tf.disable_eager_execution()
class RevBlockTest(test.TestCase):
CHANNELS = 8
NUM_LAYERS = 4
BATCH_SIZE = 16
def testForwardBackward(self):
def f(x):
return core_layers.dense(x, self.CHANNELS // 2, use_bias=True)
def g(x):
return core_layers.dense(x, self.CHANNELS // 2, use_bias=True)
x = random_ops.random_uniform(
[self.BATCH_SIZE, self.CHANNELS], dtype=dtypes.float32)
x1, x2 = array_ops.split(x, 2, axis=-1)
block = rev_block_lib.RevBlock(f, g, num_layers=3)
y1, y2 = block.forward(x1, x2)
x1_inv, x2_inv = block.backward(y1, y2)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
x1, x2, x1_inv, x2_inv = sess.run([x1, x2, x1_inv, x2_inv])
self.assertAllClose(x1, x1_inv, atol=1e-5)
self.assertAllClose(x2, x2_inv, atol=1e-5)
def testBackwardForward(self):
def f(x):
return core_layers.dense(x, self.CHANNELS // 2, use_bias=True)
def g(x):
return core_layers.dense(x, self.CHANNELS // 2, use_bias=True)
y = random_ops.random_uniform(
[self.BATCH_SIZE, self.CHANNELS], dtype=dtypes.float32)
y1, y2 = array_ops.split(y, 2, axis=-1)
block = rev_block_lib.RevBlock(f, g, num_layers=3)
x1, x2 = block.backward(y1, y2)
y1_inv, y2_inv = block.forward(x1, x2)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
y1, y2, y1_inv, y2_inv = sess.run([y1, y2, y1_inv, y2_inv])
self.assertAllClose(y1, y1_inv, rtol=1e-5)
self.assertAllClose(y2, y2_inv, rtol=1e-5)
def _testRevBlock(self,
x=None,
f=None,
g=None,
f_side_input=None,
g_side_input=None):
random_seed.set_random_seed(1234)
if f is None:
def f(x): # pylint: disable=function-redefined
return core_layers.dense(x, self.CHANNELS // 2, use_bias=True)
if g is None:
def g(x): # pylint: disable=function-redefined
return core_layers.dense(x, self.CHANNELS // 2, use_bias=True)
if f_side_input is None:
f_side_input = []
if g_side_input is None:
g_side_input = []
if x is None:
x = random_ops.random_uniform(
[self.BATCH_SIZE, self.CHANNELS], dtype=dtypes.float32)
x1, x2 = array_ops.split(x, 2, axis=-1)
with variable_scope.variable_scope("rev_test") as vs:
y1_rev, y2_rev = rev_block_lib.rev_block(
x1,
x2,
f,
g,
f_side_input=f_side_input,
g_side_input=g_side_input,
num_layers=self.NUM_LAYERS)
y_rev = array_ops.concat([y1_rev, y2_rev], axis=1)
fg_vars = vs.trainable_variables()
num_vars = len(variables.global_variables())
with variable_scope.variable_scope(vs, reuse=True):
y1, y2 = rev_block_lib.rev_block(
x1,
x2,
f,
g,
f_side_input=f_side_input,
g_side_input=g_side_input,
num_layers=self.NUM_LAYERS,
is_training=False)
y = array_ops.concat([y1, y2], axis=1)
# Ensure no new vars were created - full reuse
assert len(variables.global_variables()) == num_vars
loss_rev = math_ops.reduce_mean(y_rev + 10.)
loss = math_ops.reduce_mean(y + 10.)
wrt = [x] + f_side_input + g_side_input + fg_vars
grads_rev = gradients_impl.gradients(loss_rev, wrt)
grads = gradients_impl.gradients(loss, wrt)
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
y_val, yd_val, gd_val, g_val = sess.run([y, y_rev, grads_rev, grads])
self.assertAllClose(y_val, yd_val, rtol=1e-5)
for g1, g2 in zip(gd_val, g_val):
self.assertAllClose(g1, g2, rtol=1e-5)
def testRevBlock(self):
self._testRevBlock()
def testSideInput(self):
f_side_input = random_ops.random_uniform(
[self.BATCH_SIZE, self.CHANNELS // 2])
def f(x, side_input):
return core_layers.dense(
x, self.CHANNELS // 2, use_bias=True) + side_input[0]
self._testRevBlock(f=f, f_side_input=[f_side_input])
def testMultipleFns(self):
def f1(x):
return core_layers.dense(x, self.CHANNELS // 2)
def f2(x):
return core_layers.dense(x, self.CHANNELS // 2, activation=nn_ops.relu)
self._testRevBlock(f=[f1, f2, f1, f2])
def testConvAndBatchNorm(self):
x = random_ops.random_uniform(
[self.BATCH_SIZE, 10, self.CHANNELS], dtype=dtypes.float32)
def f(x):
x = convolutional.conv1d(x, self.CHANNELS // 2, 3, padding="same")
x = layers.batch_norm(x, is_training=False)
x = convolutional.conv1d(x, self.CHANNELS // 2, 3, padding="same")
x = layers.batch_norm(x, is_training=False)
return x
self._testRevBlock(x=x, f=f)
def testReuse(self):
def f(x):
return core_layers.dense(x, self.CHANNELS // 2)
def g(x):
return core_layers.dense(x, self.CHANNELS // 2)
x = random_ops.random_uniform(
[self.BATCH_SIZE, self.CHANNELS], dtype=dtypes.float32)
x1, x2 = array_ops.split(x, 2, axis=-1)
with variable_scope.variable_scope("test"):
y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)
num_vars_before = len(variables.global_variables())
with variable_scope.variable_scope("test", reuse=True):
y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)
num_vars_after = len(variables.global_variables())
self.assertEqual(num_vars_before, num_vars_after)
loss = math_ops.reduce_mean(y1 + y2)
_ = gradients_impl.gradients(loss,
[x] + variables.trainable_variables())
with variable_scope.variable_scope("test", reuse=True):
y1, y2 = rev_block_lib.rev_block(x1, x2, f, g, num_layers=self.NUM_LAYERS)
num_vars_after = len(variables.global_variables())
self.assertEqual(num_vars_before, num_vars_after)
class RecomputeTest(test.TestCase):
def testRecompute(self):
def layer(x, name=None):
with variable_scope.variable_scope(name, default_name="layer"):
x = layers.layer_norm(x)
x = convolutional.conv1d(
x,
10,
1,
use_bias=False,
kernel_initializer=init_ops.constant_initializer(42.42))
x = nn_ops.relu(x)
return x
def fn(x):
out = x
for _ in range(3):
out = layer(out)
return out
@rev_block_lib.recompute_grad
def fn_recompute(x):
return fn(x)
@rev_block_lib.recompute_grad(use_data_dep=True)
def fn_use_data_dep(x):
return fn(x)
@rev_block_lib.recompute_grad(tupleize_grads=True)
def fn_tupleize(x):
return fn(x)
@rev_block_lib.recompute_grad(use_data_dep=True, tupleize_grads=True)
def fn_both(x):
return fn(x)
x = random_ops.random_uniform((3, 1, 3))
names_and_fns = [
("recompute", fn_recompute),
("regular", fn),
("use_data_dep", fn_use_data_dep),
("tupleize", fn_tupleize),
("tuple_and_data_dep", fn_both),
]
outputs_and_vars = []
for name, wrapped_fn in names_and_fns:
with variable_scope.variable_scope(name, use_resource=True) as vs:
out = math_ops.reduce_sum(wrapped_fn(x))
outputs_and_vars.append((out, vs.trainable_variables()))
all_grads = []
for out, scope_vars in outputs_and_vars:
all_grads.append(gradients_impl.gradients(out, scope_vars))
with self.cached_session() as sess:
sess.run(variables.global_variables_initializer())
outputs = list(zip(*outputs_and_vars))[0]
outs, all_grads_val = sess.run([outputs, all_grads])
# All outputs are the same
current = outs[0]
for out in outs[1:]:
self.assertAllClose(current, out)
current = out
# All gradients are the same
for grads in zip(all_grads_val):
current = grads[0]
for g in grads[1:]:
self.assertAllClose(current, g)
current = g
def testDoubleCallInSameScopeFails(self):
@rev_block_lib.recompute_grad
def layer_with_recompute(inputs):
return core_layers.dense(inputs, 2)
with variable_scope.variable_scope("layer", use_resource=True):
inputs = array_ops.ones((2, 4), dtypes.float32)
out1 = layer_with_recompute(inputs)
out2 = layer_with_recompute(inputs) + out1
out = math_ops.reduce_sum(out2)
tvars = variables.trainable_variables()
assert len(tvars) == 4
with self.assertRaisesWithPredicateMatch(
ValueError, "called twice in the same enclosing scope"):
gradients_impl.gradients(out, [inputs] + tvars)
def testDoubleCallInUniqueScope(self):
@rev_block_lib.recompute_grad
def layer_with_recompute(inputs):
with variable_scope.variable_scope("inner", use_resource=True):
return core_layers.dense(inputs, 2)
with variable_scope.variable_scope("layer", use_resource=True):
inputs = array_ops.ones((2, 4), dtypes.float32)
with variable_scope.variable_scope("layer1", use_resource=True):
out1 = layer_with_recompute(inputs)
with variable_scope.variable_scope("layer2", use_resource=True):
out2 = layer_with_recompute(inputs) + out1
out = math_ops.reduce_sum(out2)
tvars = variables.trainable_variables()
assert len(tvars) == 4
grads = gradients_impl.gradients(out, [inputs] + tvars)
for grad in grads:
self.assertIsNotNone(grad)
def testWithIsRecomputeKwarg(self):
kwarg_values = []
@rev_block_lib.recompute_grad
def layer_with_recompute(inputs, is_recomputing=False):
kwarg_values.append(is_recomputing)
out = core_layers.dense(inputs, 2)
out = normalization_layers.batch_normalization(out, training=True)
if is_recomputing:
# Ensure that the updates are not duplicated by popping off the latest
# 2 additions.
update_ops = ops.get_collection_ref(ops.GraphKeys.UPDATE_OPS)
update_ops.pop()
update_ops.pop()
return out
x = array_ops.ones((2, 4), dtypes.float32)
with variable_scope.variable_scope("layer1", use_resource=True):
y = layer_with_recompute(x)
loss = math_ops.reduce_sum(y)
tvars = variables.trainable_variables()
gradients_impl.gradients(loss, [x] + tvars)
update_ops = ops.get_collection(ops.GraphKeys.UPDATE_OPS)
self.assertEqual(2, len(update_ops))
self.assertEqual([False, True], kwarg_values)
def testWithoutVariables(self):
def concat_n(layer_list, num_inputs):
return math_ops.reduce_sum(
array_ops.concat([x for x in layer_list[-num_inputs:]], axis=-1),
axis=1, keepdims=True)
@rev_block_lib.recompute_grad
def concat_n_wrap(*args):
return concat_n(args, 3)
# DenseNet-style layers
layer_list = [random_ops.random_uniform((4, 8))]
for _ in range(5):
layer_list.append(math_ops.sqrt(concat_n_wrap(*layer_list)))
grads = gradients_impl.gradients(layer_list[-1], layer_list[0])
with self.cached_session() as sess:
sess.run(grads)
def testErrorOnClosedOverTensor(self):
x = random_ops.random_uniform((4, 8))
y = random_ops.random_uniform((4, 8))
z = x * y
with self.assertRaisesWithPredicateMatch(ValueError, "closes over"):
@rev_block_lib.recompute_grad
def fn_with_capture(a): # pylint: disable=unused-variable
return a * z
if __name__ == "__main__":
test.main()
|
py | b402f478b14ae36540a6859e01aba2a7f458bb71 | """
The main window class that all object-oriented applications should
derive from.
"""
from typing import Tuple
from numbers import Number
from arcade.window_commands import set_viewport
from arcade.window_commands import get_viewport
import pyglet
MOUSE_BUTTON_LEFT = 1
MOUSE_BUTTON_MIDDLE = 2
MOUSE_BUTTON_RIGHT = 4
class Window(pyglet.window.Window):
"""
Window class
>>> import arcade
>>> window = arcade.Window(200, 100, resizable=True)
>>> window.set_update_rate(1/20)
>>> window.set_mouse_visible(True)
>>> window.on_mouse_motion(0, 0, 0, 0)
>>> window.on_mouse_press(0, 0, 0, 0)
>>> window.on_mouse_release(0, 0, 0, 0)
>>> window.on_key_press(0, 0)
>>> window.on_key_release(0, 0)
>>> window.on_mouse_drag(0, 0, 1, 1, 1, 0)
>>> window.on_mouse_scroll(1, 1, 1, 1)
>>> window.on_draw()
>>> window.on_resize(500, 500)
>>> window.set_size(500, 500)
>>> window.update(1/20)
>>> window.set_visible(True)
>>> window.close()
"""
def __init__(self, width: float = 800, height: float = 600,
title: str = 'Arcade Window', fullscreen: bool = False,
resizable: bool = False):
super().__init__(width=width, height=height, caption=title,
resizable=resizable)
self.set_update_rate(1 / 60)
super().set_fullscreen(fullscreen)
self.invalid = False
# set_viewport(0, self.width, 0, self.height)
def update(self, delta_time: float):
"""
Move everything. For better consistency in naming, use ``on_update`` instead.
Args:
:dt (float): Time interval since the last time the function was called.
"""
pass
def on_update(self, delta_time: float):
"""
Move everything.
Args:
:dt (float): Time interval since the last time the function was called.
"""
pass
def set_update_rate(self, rate: float):
"""
Set how often the screen should be updated.
For example, self.set_update_rate(1 / 20) will set the update rate to 60 fps
"""
pyglet.clock.unschedule(self.update)
pyglet.clock.schedule_interval(self.update, rate)
pyglet.clock.unschedule(self.on_update)
pyglet.clock.schedule_interval(self.on_update, rate)
def on_mouse_motion(self, x: float, y: float, dx: float, dy: float):
""" Override this function to add mouse functionality. """
pass
def on_mouse_press(self, x: float, y: float, button: int, modifiers: int):
""" Override this function to add mouse button functionality. """
pass
def on_mouse_drag(self, x: float, y: float, dx: float, dy: float, buttons: int, modifiers: int):
""" Override this function to add mouse button functionality. """
self.on_mouse_motion(x, y, dx, dy)
def on_mouse_release(self, x: float, y: float, button: int,
modifiers: int):
""" Override this function to add mouse button functionality. """
pass
def on_mouse_scroll(self, x: int, y: int, scroll_x: int, scroll_y: int):
""" User moves the scroll wheel. """
pass
def set_mouse_visible(self, visible=True):
""" If true, user can see the mouse cursor while it is over the window. Set false,
the mouse is not visible. Default is true. """
super().set_mouse_visible(visible)
def on_key_press(self, symbol: int, modifiers: int):
""" Override this function to add key press functionality. """
pass
def on_key_release(self, symbol: int, modifiers: int):
""" Override this function to add key release functionality. """
pass
def on_draw(self):
""" Override this function to add your custom drawing code. """
pass
def on_resize(self, width, height):
""" Override this function to add custom code to be called any time the window
is resized. """
super().on_resize(width, height)
def set_min_size(self, width: float, height: float):
""" Wrap the Pyglet window call to set minimum size
Args:
:width: width in pixels.
:height: height in pixels.
Example:
>>> import arcade
>>> window = arcade.Window(200, 100, resizable=True)
>>> window.set_min_size(200, 200)
>>> window.close()
"""
if self._resizable:
super().set_minimum_size(width, height)
else:
raise ValueError('Cannot set min size on non-resizable window')
def set_max_size(self, width: float, height: float):
""" Wrap the Pyglet window call to set maximum size
Args:
:width: width in pixels.
:height: height in pixels.
Returns:
None
Raises:
ValueError
Example:
>>> import arcade
>>> window = arcade.Window(200, 100, resizable=True)
>>> window.set_max_size(200, 200)
>>> window.close()
"""
if self._resizable:
super().set_maximum_size(width, height)
else:
raise ValueError('Cannot set max size on non-resizable window')
def set_size(self, width: float, height: float):
""" Ignore the resizable flag and set the size """
super().set_size(width, height)
def get_size(self):
""" Get the size of the window. """
return super().get_size()
def get_location(self) -> Tuple[int, int]:
""" Return the X/Y coordinates of the window """
return super().get_location()
def set_visible(self, visible=True):
""" Set if the window is visible or not. Normally, a program's window is visible. """
super().set_visible(visible)
def set_viewport(self, left: Number, right: Number, bottom: Number, top: Number):
""" Set the viewport. (What coordinates we can see.
Used to scale and/or scroll the screen.) """
set_viewport(left, right, bottom, top)
def get_viewport(self) -> (float, float, float, float):
""" Get the viewport. (What coordinates we can see.) """
return get_viewport()
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.