repo_name
stringlengths 5
92
| path
stringlengths 4
232
| copies
stringclasses 19
values | size
stringlengths 4
7
| content
stringlengths 721
1.04M
| license
stringclasses 15
values | hash
int64 -9,223,277,421,539,062,000
9,223,102,107B
| line_mean
float64 6.51
99.9
| line_max
int64 15
997
| alpha_frac
float64 0.25
0.97
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
googleapis/python-datastore | google/cloud/datastore_v1/services/datastore/async_client.py | 1 | 28702 | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import functools
import re
from typing import Dict, Sequence, Tuple, Type, Union
import pkg_resources
import google.api_core.client_options as ClientOptions # type: ignore
from google.api_core import exceptions # type: ignore
from google.api_core import gapic_v1 # type: ignore
from google.api_core import retry as retries # type: ignore
from google.auth import credentials # type: ignore
from google.oauth2 import service_account # type: ignore
from google.cloud.datastore_v1.types import datastore
from google.cloud.datastore_v1.types import entity
from google.cloud.datastore_v1.types import query
from .transports.base import DatastoreTransport, DEFAULT_CLIENT_INFO
from .transports.grpc_asyncio import DatastoreGrpcAsyncIOTransport
from .client import DatastoreClient
class DatastoreAsyncClient:
"""Each RPC normalizes the partition IDs of the keys in its
input entities, and always returns entities with keys with
normalized partition IDs. This applies to all keys and entities,
including those in values, except keys with both an empty path
and an empty or unset partition ID. Normalization of input keys
sets the project ID (if not already set) to the project ID from
the request.
"""
_client: DatastoreClient
DEFAULT_ENDPOINT = DatastoreClient.DEFAULT_ENDPOINT
DEFAULT_MTLS_ENDPOINT = DatastoreClient.DEFAULT_MTLS_ENDPOINT
common_billing_account_path = staticmethod(
DatastoreClient.common_billing_account_path
)
parse_common_billing_account_path = staticmethod(
DatastoreClient.parse_common_billing_account_path
)
common_folder_path = staticmethod(DatastoreClient.common_folder_path)
parse_common_folder_path = staticmethod(DatastoreClient.parse_common_folder_path)
common_organization_path = staticmethod(DatastoreClient.common_organization_path)
parse_common_organization_path = staticmethod(
DatastoreClient.parse_common_organization_path
)
common_project_path = staticmethod(DatastoreClient.common_project_path)
parse_common_project_path = staticmethod(DatastoreClient.parse_common_project_path)
common_location_path = staticmethod(DatastoreClient.common_location_path)
parse_common_location_path = staticmethod(
DatastoreClient.parse_common_location_path
)
from_service_account_file = DatastoreClient.from_service_account_file
from_service_account_json = from_service_account_file
@property
def transport(self) -> DatastoreTransport:
"""Return the transport used by the client instance.
Returns:
DatastoreTransport: The transport used by the client instance.
"""
return self._client.transport
get_transport_class = functools.partial(
type(DatastoreClient).get_transport_class, type(DatastoreClient)
)
def __init__(
self,
*,
credentials: credentials.Credentials = None,
transport: Union[str, DatastoreTransport] = "grpc_asyncio",
client_options: ClientOptions = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiate the datastore client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ~.DatastoreTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (ClientOptions): Custom options for the client. It
won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
Raises:
google.auth.exceptions.MutualTlsChannelError: If mutual TLS transport
creation failed for any reason.
"""
self._client = DatastoreClient(
credentials=credentials,
transport=transport,
client_options=client_options,
client_info=client_info,
)
async def lookup(
self,
request: datastore.LookupRequest = None,
*,
project_id: str = None,
read_options: datastore.ReadOptions = None,
keys: Sequence[entity.Key] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> datastore.LookupResponse:
r"""Looks up entities by key.
Args:
request (:class:`~.datastore.LookupRequest`):
The request object. The request for
[Datastore.Lookup][google.datastore.v1.Datastore.Lookup].
project_id (:class:`str`):
Required. The ID of the project
against which to make the request.
This corresponds to the ``project_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
read_options (:class:`~.datastore.ReadOptions`):
The options for this lookup request.
This corresponds to the ``read_options`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
keys (:class:`Sequence[~.entity.Key]`):
Required. Keys of entities to look
up.
This corresponds to the ``keys`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.datastore.LookupResponse:
The response for
[Datastore.Lookup][google.datastore.v1.Datastore.Lookup].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id, read_options, keys])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = datastore.LookupRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_id is not None:
request.project_id = project_id
if read_options is not None:
request.read_options = read_options
if keys:
request.keys.extend(keys)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.lookup,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def run_query(
self,
request: datastore.RunQueryRequest = None,
*,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> datastore.RunQueryResponse:
r"""Queries for entities.
Args:
request (:class:`~.datastore.RunQueryRequest`):
The request object. The request for
[Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery].
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.datastore.RunQueryResponse:
The response for
[Datastore.RunQuery][google.datastore.v1.Datastore.RunQuery].
"""
# Create or coerce a protobuf request object.
request = datastore.RunQueryRequest(request)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.run_query,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def begin_transaction(
self,
request: datastore.BeginTransactionRequest = None,
*,
project_id: str = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> datastore.BeginTransactionResponse:
r"""Begins a new transaction.
Args:
request (:class:`~.datastore.BeginTransactionRequest`):
The request object. The request for
[Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction].
project_id (:class:`str`):
Required. The ID of the project
against which to make the request.
This corresponds to the ``project_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.datastore.BeginTransactionResponse:
The response for
[Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = datastore.BeginTransactionRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_id is not None:
request.project_id = project_id
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.begin_transaction,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def commit(
self,
request: datastore.CommitRequest = None,
*,
project_id: str = None,
mode: datastore.CommitRequest.Mode = None,
transaction: bytes = None,
mutations: Sequence[datastore.Mutation] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> datastore.CommitResponse:
r"""Commits a transaction, optionally creating, deleting
or modifying some entities.
Args:
request (:class:`~.datastore.CommitRequest`):
The request object. The request for
[Datastore.Commit][google.datastore.v1.Datastore.Commit].
project_id (:class:`str`):
Required. The ID of the project
against which to make the request.
This corresponds to the ``project_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
mode (:class:`~.datastore.CommitRequest.Mode`):
The type of commit to perform. Defaults to
``TRANSACTIONAL``.
This corresponds to the ``mode`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
transaction (:class:`bytes`):
The identifier of the transaction associated with the
commit. A transaction identifier is returned by a call
to
[Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction].
This corresponds to the ``transaction`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
mutations (:class:`Sequence[~.datastore.Mutation]`):
The mutations to perform.
When mode is ``TRANSACTIONAL``, mutations affecting a
single entity are applied in order. The following
sequences of mutations affecting a single entity are not
permitted in a single ``Commit`` request:
- ``insert`` followed by ``insert``
- ``update`` followed by ``insert``
- ``upsert`` followed by ``insert``
- ``delete`` followed by ``update``
When mode is ``NON_TRANSACTIONAL``, no two mutations may
affect a single entity.
This corresponds to the ``mutations`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.datastore.CommitResponse:
The response for
[Datastore.Commit][google.datastore.v1.Datastore.Commit].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id, mode, transaction, mutations])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = datastore.CommitRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_id is not None:
request.project_id = project_id
if mode is not None:
request.mode = mode
if transaction is not None:
request.transaction = transaction
if mutations:
request.mutations.extend(mutations)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.commit,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def rollback(
self,
request: datastore.RollbackRequest = None,
*,
project_id: str = None,
transaction: bytes = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> datastore.RollbackResponse:
r"""Rolls back a transaction.
Args:
request (:class:`~.datastore.RollbackRequest`):
The request object. The request for
[Datastore.Rollback][google.datastore.v1.Datastore.Rollback].
project_id (:class:`str`):
Required. The ID of the project
against which to make the request.
This corresponds to the ``project_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
transaction (:class:`bytes`):
Required. The transaction identifier, returned by a call
to
[Datastore.BeginTransaction][google.datastore.v1.Datastore.BeginTransaction].
This corresponds to the ``transaction`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.datastore.RollbackResponse:
The response for
[Datastore.Rollback][google.datastore.v1.Datastore.Rollback].
(an empty message).
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id, transaction])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = datastore.RollbackRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_id is not None:
request.project_id = project_id
if transaction is not None:
request.transaction = transaction
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.rollback,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def allocate_ids(
self,
request: datastore.AllocateIdsRequest = None,
*,
project_id: str = None,
keys: Sequence[entity.Key] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> datastore.AllocateIdsResponse:
r"""Allocates IDs for the given keys, which is useful for
referencing an entity before it is inserted.
Args:
request (:class:`~.datastore.AllocateIdsRequest`):
The request object. The request for
[Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds].
project_id (:class:`str`):
Required. The ID of the project
against which to make the request.
This corresponds to the ``project_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
keys (:class:`Sequence[~.entity.Key]`):
Required. A list of keys with
incomplete key paths for which to
allocate IDs. No key may be
reserved/read-only.
This corresponds to the ``keys`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.datastore.AllocateIdsResponse:
The response for
[Datastore.AllocateIds][google.datastore.v1.Datastore.AllocateIds].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id, keys])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = datastore.AllocateIdsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_id is not None:
request.project_id = project_id
if keys:
request.keys.extend(keys)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.allocate_ids,
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
async def reserve_ids(
self,
request: datastore.ReserveIdsRequest = None,
*,
project_id: str = None,
keys: Sequence[entity.Key] = None,
retry: retries.Retry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> datastore.ReserveIdsResponse:
r"""Prevents the supplied keys' IDs from being auto-
llocated by Cloud Datastore.
Args:
request (:class:`~.datastore.ReserveIdsRequest`):
The request object. The request for
[Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds].
project_id (:class:`str`):
Required. The ID of the project
against which to make the request.
This corresponds to the ``project_id`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
keys (:class:`Sequence[~.entity.Key]`):
Required. A list of keys with
complete key paths whose numeric IDs
should not be auto-allocated.
This corresponds to the ``keys`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
~.datastore.ReserveIdsResponse:
The response for
[Datastore.ReserveIds][google.datastore.v1.Datastore.ReserveIds].
"""
# Create or coerce a protobuf request object.
# Sanity check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_id, keys])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
request = datastore.ReserveIdsRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_id is not None:
request.project_id = project_id
if keys:
request.keys.extend(keys)
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = gapic_v1.method_async.wrap_method(
self._client._transport.reserve_ids,
default_retry=retries.Retry(
initial=0.1,
maximum=60.0,
multiplier=1.3,
predicate=retries.if_exception_type(
exceptions.DeadlineExceeded, exceptions.ServiceUnavailable,
),
),
default_timeout=60.0,
client_info=DEFAULT_CLIENT_INFO,
)
# Send the request.
response = await rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution("google-cloud-datastore",).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("DatastoreAsyncClient",)
| apache-2.0 | 3,394,507,495,499,587,600 | 39.255259 | 93 | 0.602676 | false |
takeflight/wagtailtestutils | tests/settings.py | 1 | 2498 | DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
SECRET_KEY = 'not needed'
ROOT_URLCONF = 'wagtail.tests.urls'
STATIC_URL = '/static/'
STATIC_ROOT = 'static/'
COMPRESS_ENABLED = False
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
'compressor.finders.CompressorFinder',
)
USE_TZ = True
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.auth.middleware.SessionAuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'wagtail.wagtailcore.middleware.SiteMiddleware',
)
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
'django.template.context_processors.request',
'wagtail.tests.context_processors.do_not_use_static_url',
],
},
},
]
INSTALLED_APPS = (
'tests',
'wagtail.wagtailsites',
'wagtail.wagtailusers',
'wagtail.wagtailadmin',
'wagtail.wagtailcore',
'wagtail.wagtailimages',
'wagtail.wagtaildocs',
'wagtail.wagtailsearch',
'taggit',
'compressor',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
)
# Using DatabaseCache to make sure that the cache is cleared between tests.
# This prevents false-positives in some wagtail core tests where we are
# changing the 'wagtail_root_paths' key which may cause future tests to fail.
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.db.DatabaseCache',
'LOCATION': 'cache',
}
}
PASSWORD_HASHERS = (
'django.contrib.auth.hashers.MD5PasswordHasher', # don't use the intentionally slow default password hasher
)
WAGTAIL_SITE_NAME = "Test Site"
| bsd-2-clause | 2,868,741,275,238,496,000 | 26.152174 | 112 | 0.665733 | false |
sergiocallegari/PyDSM | cvxpy_tinoco/functions/abs.py | 1 | 2839 | #***********************************************************************#
# Copyright (C) 2010-2012 Tomas Tinoco De Rubira #
# #
# This file is part of CVXPY #
# #
# CVXPY is free software: you can redistribute it and/or modify #
# it under the terms of the GNU General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# CVXPY is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU General Public License for more details. #
# #
# You should have received a copy of the GNU General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
#***********************************************************************#
import numpy as np
from ..defs import *
from ..utils import *
from ..interface import *
from ..arrays import cvxpy_array
from ..arrays import cvxpy_matrix
# abs
def abs(x):
"""
| :math:`\mbox{abs} :
\mathbb{R}^{m \\times n} \\to \mathbb{R}^{m \\times n},
\ \mbox{abs}(X)_{ij} = |X_{ij}|`.
| Convex.
:param x: number,
:ref:`scalar object<scalar_ref>` or
:ref:`multidimensional object<multi_ref>`.
:return: number,
:ref:`tree<tree_obj>`,
:ref:`matrix<matrix_obj>` or
:ref:`array<array_obj>`.
"""
# Prepare input
if (np.isscalar(x) or
type(x).__name__ in SCALAR_OBJS):
arg = vstack([x])
elif (type(x) is cvxpy_matrix or
type(x).__name__ in ARRAY_OBJS):
arg = x
else:
raise TypeError('Invalid argument')
# Prepare output
if type(arg) is cvxpy_matrix:
output = zeros(arg.shape)
else:
output = cvxpy_array(arg.shape[0],arg.shape[1])
# Construct program
for i in range(0,arg.shape[0],1):
for j in range(0,arg.shape[1],1):
t = variable()
v = variable()
p = program(minimize(t),
[less_equals(v,t),less_equals(-t,v)],
[v],
name='abs')
output[i,j] = p(arg[i,j])
# Return output
if output.shape == (1,1):
return output[0,0]
else:
return output
| gpl-3.0 | -5,290,735,920,992,748,000 | 36.853333 | 78 | 0.450863 | false |
cortical-io/retina-sdk.py | retinasdk/client/expressions_api.py | 1 | 8726 | """
/*******************************************************************************
* Copyright (c) cortical.io GmbH. All rights reserved.
*
* This software is confidential and proprietary information.
* You shall use it only in accordance with the terms of the
* license agreement you entered into with cortical.io GmbH.
******************************************************************************/
"""
from retinasdk.model import context
from retinasdk.model import fingerprint
from retinasdk.model import term
class ExpressionsApi(object):
def __init__(self, apiClient):
self.apiClient = apiClient
def resolveExpression(self, retina_name, body, sparsity=1.0):
"""Resolve an expression
Args:
retina_name, str: The retina name (required)
body, ExpressionOperation: The JSON formatted encoded to be evaluated (required)
sparsity, float: Sparsify the resulting expression to this percentage (optional)
Returns: Fingerprint
"""
resourcePath = '/expressions'
method = 'POST'
queryParams = {}
headerParams = {'Accept': 'Application/json', 'Content-Type': 'application/json'}
postData = None
queryParams['retina_name'] = retina_name
queryParams['sparsity'] = sparsity
postData = body
response = self.apiClient._callAPI(resourcePath, method, queryParams, postData, headerParams)
return fingerprint.Fingerprint(**response.json())
def getContextsForExpression(self, retina_name, body, get_fingerprint=None, start_index=0, max_results=5, sparsity=1.0):
"""Get semantic contexts for the input expression
Args:
retina_name, str: The retina name (required)
body, ExpressionOperation: The JSON encoded expression to be evaluated (required)
get_fingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
start_index, int: The start-index for pagination (optional) (optional)
max_results, int: Max results per page (optional) (optional)
sparsity, float: Sparsify the resulting expression to this percentage (optional)
Returns: Array[Context]
"""
resourcePath = '/expressions/contexts'
method = 'POST'
queryParams = {}
headerParams = {'Accept': 'Application/json', 'Content-Type': 'application/json'}
postData = None
queryParams['retina_name'] = retina_name
queryParams['start_index'] = start_index
queryParams['max_results'] = max_results
queryParams['sparsity'] = sparsity
queryParams['get_fingerprint'] = get_fingerprint
postData = body
response = self.apiClient._callAPI(resourcePath, method, queryParams, postData, headerParams)
return [context.Context(**r) for r in response.json()]
def getSimilarTermsForExpressionContext(self, retina_name, body, context_id=None, pos_type=None, get_fingerprint=None, start_index=0, max_results=10, sparsity=1.0):
"""Get similar terms for the contexts of an expression
Args:
retina_name, str: The retina name (required)
body, ExpressionOperation: The JSON encoded expression to be evaluated (required)
context_id, int: The identifier of a context (optional) (optional)
pos_type, str: Part of speech (optional) (optional)
get_fingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
start_index, int: The start-index for pagination (optional) (optional)
max_results, int: Max results per page (optional) (optional)
sparsity, float: Sparsify the resulting expression to this percentage (optional)
Returns: Array[Term]
"""
resourcePath = '/expressions/similar_terms'
method = 'POST'
queryParams = {}
headerParams = {'Accept': 'Application/json', 'Content-Type': 'application/json'}
postData = None
queryParams['retina_name'] = retina_name
queryParams['context_id'] = context_id
queryParams['start_index'] = start_index
queryParams['max_results'] = max_results
queryParams['pos_type'] = pos_type
queryParams['sparsity'] = sparsity
queryParams['get_fingerprint'] = get_fingerprint
postData = body
response = self.apiClient._callAPI(resourcePath, method, queryParams, postData, headerParams)
return [term.Term(**r) for r in response.json()]
def resolveBulkExpression(self, retina_name, body, sparsity=1.0):
"""Bulk resolution of expressions
Args:
retina_name, str: The retina name (required)
body, ExpressionOperation: The JSON encoded expression to be evaluated (required)
sparsity, float: Sparsify the resulting expression to this percentage (optional)
Returns: Array[Fingerprint]
"""
resourcePath = '/expressions/bulk'
method = 'POST'
queryParams = {}
headerParams = {'Accept': 'Application/json', 'Content-Type': 'application/json'}
postData = None
queryParams['retina_name'] = retina_name
queryParams['sparsity'] = sparsity
postData = body
response = self.apiClient._callAPI(resourcePath, method, queryParams, postData, headerParams)
return [fingerprint.Fingerprint(**r) for r in response.json()]
def getContextsForBulkExpression(self, retina_name, body, get_fingerprint=None, start_index=0, max_results=5, sparsity=1.0):
"""Bulk get contexts for input expressions
Args:
retina_name, str: The retina name (required)
body, ExpressionOperation: The JSON encoded expression to be evaluated (required)
get_fingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
start_index, int: The start-index for pagination (optional) (optional)
max_results, int: Max results per page (optional) (optional)
sparsity, float: Sparsify the resulting expression to this percentage (optional)
Returns: Array[Context]
"""
resourcePath = '/expressions/contexts/bulk'
method = 'POST'
queryParams = {}
headerParams = {'Accept': 'Application/json', 'Content-Type': 'application/json'}
postData = None
queryParams['retina_name'] = retina_name
queryParams['start_index'] = start_index
queryParams['max_results'] = max_results
queryParams['sparsity'] = sparsity
queryParams['get_fingerprint'] = get_fingerprint
postData = body
response = self.apiClient._callAPI(resourcePath, method, queryParams, postData, headerParams)
return [[context.Context(**c) for c in r] for r in response.json()]
def getSimilarTermsForBulkExpressionContext(self, retina_name, body, context_id=None, pos_type=None, get_fingerprint=None, start_index=0, max_results=10, sparsity=1.0):
"""Bulk get similar terms for input expressions
Args:
retina_name, str: The retina name (required)
body, ExpressionOperation: The JSON encoded expression to be evaluated (required)
context_id, int: The identifier of a context (optional) (optional)
pos_type, str: Part of speech (optional) (optional)
get_fingerprint, bool: Configure if the fingerprint should be returned as part of the results (optional)
start_index, int: The start-index for pagination (optional) (optional)
max_results, int: Max results per page (optional) (optional)
sparsity, float: Sparsify the resulting expression to this percentage (optional)
Returns: Array[Term]
"""
resourcePath = '/expressions/similar_terms/bulk'
method = 'POST'
queryParams = {}
headerParams = {'Accept': 'Application/json', 'Content-Type': 'application/json'}
postData = None
queryParams['retina_name'] = retina_name
queryParams['context_id'] = context_id
queryParams['start_index'] = start_index
queryParams['max_results'] = max_results
queryParams['pos_type'] = pos_type
queryParams['sparsity'] = sparsity
queryParams['get_fingerprint'] = get_fingerprint
postData = body
response = self.apiClient._callAPI(resourcePath, method, queryParams, postData, headerParams)
return [[term.Term(**t) for t in r] for r in response.json()]
| bsd-2-clause | -2,128,285,797,401,714,700 | 43.520408 | 172 | 0.637062 | false |
bluelightning32/coquille | autoload/coquille.py | 1 | 27276 | from __future__ import print_function
from __future__ import unicode_literals
from __future__ import division
import vim
import re
import xml.etree.ElementTree as ET
import coqtop as CT
import project_file
from collections import deque
import vimbufsync
vimbufsync.check_version([0,1,0], who="coquille")
# Define unicode in python 3
if isinstance(__builtins__, dict):
unicode = __builtins__.get('unicode', str)
else:
unicode = getattr(__builtins__, 'unicode', str)
# Cache whether vim has a bool type
vim_has_bool = vim.eval("exists('v:false')")
def vim_repr(value):
"Converts a python value into a vim value"
if isinstance(value, bool):
if value:
if vim_has_bool:
return "v:true"
else:
return "1"
else:
if vim_has_bool:
return "v:false"
else:
return "0"
if isinstance(value, int) or isinstance(value, long):
return str(value)
if isinstance(value, bytes):
value = value.decode("utf-8")
if isinstance(value, unicode):
return value.replace("'", "''")
return "unknown"
# Convert 0-based (line, col, byte) tuples into 1-based lists in the form
# [line, byte]
def make_vim_range(start, stop):
return [[start[0] + 1, start[2] + 1], [stop[0] + 1, stop[2] + 1]]
# Return a list of all windows that are displaying the buffer, along with their
# current cursor positions.
def get_cursors_for_buffer(vim_buffer):
result = []
for win in vim.windows:
if win.buffer is vim_buffer:
result.append((win, win.cursor))
return result
# Takes the list of window cursor positions from get_cursor_for_buffer. If the
# cursor position is now lower for any of the windows, they are entered to
# rescroll the window.
def fix_scroll(cursors):
refresh_now = None
for win, (row, col) in cursors:
if win.cursor[0] < row or win.cursor[1] < col:
win.vars['coquille_needs_scroll_fix'] = 1
if win.tabpage is vim.current.tabpage:
vim.command("call coquille#FixWindowScrollTabWin(%d, %d)" %
(win.tabpage.number, win.number))
# All the python side state associated with the vim source buffer
class BufferState(object):
# Dict mapping source buffer id to BufferState
source_mapping = {}
@classmethod
def lookup_bufid(cls, bufid):
# For convenience, the vim script passes vim.eval("l:bufid") to this
# function, and vim.eval() returns a string.
bufid = int(bufid)
if bufid in cls.source_mapping:
state = cls.source_mapping[bufid]
else:
state = BufferState(vim.buffers[bufid])
cls.source_mapping[bufid] = state
if state.sync_vars():
return state
else:
del cls.source_mapping[bufid]
return None
def __init__(self, source_buffer):
self.source_buffer = source_buffer
self.info_buffer = None
self.goal_buffer = None
#: See vimbufsync ( https://github.com/def-lkb/vimbufsync )
self.saved_sync = None
self.coq_top = CT.CoqTop()
def sync_vars(self):
"Updates python member variables based on the vim variables"
if not self.source_buffer.valid:
return False
if self.source_buffer.options["filetype"] != b"coq":
return False
goal_bufid = self.source_buffer.vars.get("coquille_goal_bufid", -1)
if goal_bufid != -1:
self.goal_buffer = vim.buffers[goal_bufid]
else:
self.goal_buffer = None
info_bufid = self.source_buffer.vars.get("coquille_info_bufid", -1)
if info_bufid != -1:
self.info_buffer = vim.buffers[info_bufid]
else:
self.info_buffer = None
return True
###################
# synchronization #
###################
def sync(self):
curr_sync = vimbufsync.sync(self.source_buffer)
if not self.saved_sync or curr_sync.buf() != self.saved_sync.buf():
if self.coq_top.get_active_command_count() > 1:
self._reset()
else:
(line, col) = self.saved_sync.pos()
# vim indexes from lines 1, coquille from 0
self.rewind_to(line - 1, col - 1)
self.saved_sync = curr_sync
def _reset(self):
self.coq_top.kill_coqtop()
self.saved_sync = None
self.reset_color()
#####################
# exported commands #
#####################
def kill_coqtop(self):
if self is None:
return
self._reset()
def goto_last_sent_dot(self):
last = self.coq_top.get_last_active_command()
(line, col) = ((0,1) if not last else last.end)
vim.current.window.cursor = (line + 1, col)
def coq_rewind(self, steps=1):
self.clear_info()
# Do not allow the root state to be rewound
if steps < 1 or self.coq_top.get_active_command_count() < 2:
return
if self.coq_top.coqtop is None:
print("Error: Coqtop isn't running. Are you sure you called :CoqLaunch?")
return
response = self.coq_top.rewind(steps)
if response is None:
vim.command("call coquille#KillSession()")
print('ERROR: the Coq process died')
return
self.refresh()
# steps != 1 means that either the user called "CoqToCursor" or just started
# editing in the "locked" zone. In both these cases we don't want to move
# the cursor.
if (steps == 1 and vim.eval('g:coquille_auto_move') == 'true'):
self.goto_last_sent_dot()
def coq_to_cursor(self):
if self.coq_top.coqtop is None:
print("Error: Coqtop isn't running. Are you sure you called :CoqLaunch?")
return
self.sync()
(cline, ccol) = vim.current.window.cursor
cline -= 1
last = self.coq_top.get_last_active_command()
last_sent = ((0,0,0) if not last else last.end)
(line, col, byte) = last_sent
if cline < line or (cline == line and ccol < col):
# Add 1 to the column to leave whatever is at the
# cursor as sent.
self.rewind_to(cline, ccol + 1)
else:
send_queue = deque([])
while True:
r = self._get_message_range(last_sent)
if (r is not None
and (r[1][0], r[1][1]) <= (cline, ccol + 1)):
last_sent = r[1]
send_queue.append(r)
else:
break
self.send_until_fail(send_queue)
def coq_next(self):
if self.coq_top.coqtop is None:
print("Error: Coqtop isn't running. Are you sure you called :CoqLaunch?")
return
self.sync()
last = self.coq_top.get_last_active_command()
last_sent = ((0,0,0) if not last else last.end)
message_range = self._get_message_range(last_sent)
if message_range is None: return
send_queue = deque([])
send_queue.append(message_range)
self.send_until_fail(send_queue)
if (vim.eval('g:coquille_auto_move') == 'true'):
self.goto_last_sent_dot()
def coq_raw_query(self, *args):
self.clear_info()
if self.coq_top.coqtop is None:
print("Error: Coqtop isn't running. Are you sure you called :CoqLaunch?")
return
raw_query = ' '.join(args)
response = self.coq_top.query(raw_query)
if response is None:
vim.command("call coquille#KillSession()")
print('ERROR: the Coq process died')
return
info_msg = self.coq_top.get_messages()
self.show_info(info_msg)
def launch_coq(self, *args):
use_project_args = self.source_buffer.vars.get(
"coquille_append_project_args",
vim.vars.get("coquille_append_project_args", 0))
if use_project_args:
# Vim passes the args as a tuple
args = list(args)
args.extend(project_file.find_and_parse_file(
self.source_buffer.name))
return self.coq_top.restart_coq(*args)
def debug(self):
commands = self.coq_top.get_active_commands()
print("encountered dots = [")
for (line, col) in commands:
print(" (%d, %d) ; " % (line, col))
print("]")
#####################################
# IDE tools: Goal, Infos and colors #
#####################################
def refresh(self):
last_info = [None]
def update():
self.reset_color()
vim.command('redraw')
new_info = self.coq_top.get_messages()
if last_info[0] != new_info:
self.show_info(new_info)
last_info[0] = new_info
# It seems that coqtop needs some kind of call like Status or Goal to
# trigger it to start processing all the commands that have been added.
# So show_goal needs to be called before waiting for all the unchecked
# commands finished.
response = self.coq_top.goals(update)
if self.show_goal(response):
while self.coq_top.has_unchecked_commands():
self.coq_top.process_response()
update()
update()
def show_goal(self, response):
# Temporarily make the goal buffer modifiable
modifiable = self.goal_buffer.options["modifiable"]
self.goal_buffer.options["modifiable"] = True
try:
cursors = get_cursors_for_buffer(self.goal_buffer)
del self.goal_buffer[:]
if response is None:
return False
goals = response.val
if goals is None:
self.goal_buffer[0] = 'No goals.'
return True
sub_goals = goals.fg
msg_format = '{0} subgoal{1}'
show_hyps = True
if not sub_goals:
show_hyps = False
sub_goals = []
for (before, after) in goals.bg:
sub_goals.extend(reversed(before))
sub_goals.extend(after)
if sub_goals:
msg_format = ('This subproof is complete, but there {2} {0}'
' unfocused goal{1}')
if not sub_goals:
msg_format = 'No more subgoals.'
nb_subgoals = len(sub_goals)
self.goal_buffer[0] = msg_format.format(nb_subgoals,
'' if nb_subgoals == 1 else 's',
'is' if nb_subgoals == 1 else 'are')
self.goal_buffer.append([''])
for idx, sub_goal in enumerate(sub_goals):
_id = sub_goal.id
hyps = sub_goal.hyp
ccl = sub_goal.ccl
if show_hyps:
# we print the environment only for the current subgoal
for hyp in hyps:
lst = map(lambda s: s.encode('utf-8'), hyp.split('\n'))
self.goal_buffer.append(list(lst))
show_hyps = False
self.goal_buffer.append('')
self.goal_buffer.append('======================== ( %d / %d )' % (idx+1 , nb_subgoals))
lines = map(lambda s: s.encode('utf-8'), ccl.split('\n'))
self.goal_buffer.append(list(lines))
self.goal_buffer.append('')
fix_scroll(cursors)
finally:
self.goal_buffer.options["modifiable"] = modifiable
return True
def show_info(self, message):
# Temporarily make the info buffer modifiable
modifiable = self.info_buffer.options["modifiable"]
self.info_buffer.options["modifiable"] = True
try:
cursors = get_cursors_for_buffer(self.info_buffer)
del self.info_buffer[:]
lst = []
if message is not None:
lst = list(map(lambda s: s.encode('utf-8'),
message.split('\n')))
if len(lst) >= 1:
# If self.info_buffers was a regular list, the del statement
# above would have deleted all the lines. However with a vim
# buffer, that actually leaves 1 blank line. So now for setting
# the new contents, the very first line has to be overwritten,
# then the rest can be appended.
#
# Also note that if info_buffer was a list, extend would be the
# appropriate function. However info_buffer does not have an
# extend function, and its append mostly behaves like extend.
self.info_buffer[0] = lst[0]
self.info_buffer.append(lst[1:])
fix_scroll(cursors)
finally:
self.info_buffer.options["modifiable"] = modifiable
def clear_info(self):
self.coq_top.clear_messages()
self.show_info(None)
def convert_offset(self, range_start, offset, range_end):
message = self._between(range_start, range_end)
(line, col, byte) = _pos_from_offset(range_start[1], range_start[2],
message, offset)
return (line + range_start[0], col, byte)
def reset_color(self):
sent = []
checked = []
warnings = []
errors = []
prev_end = None
sent_start = None
checked_start = None
commands = self.coq_top.get_commands()
for c in commands:
if c.state in (CT.Command.REVERTED, CT.Command.ABANDONED):
break
if c.state == CT.Command.SENT:
if sent_start is None:
# Start a sent range
sent_start = prev_end
elif sent_start is not None:
# Finish a sent range
sent.append(make_vim_range(sent_start, prev_end))
sent_start = None
# Include all the processed commands as checked, even if they
# produced a warning or error message. A subrange will also be
# marked as a warning or error, but that will override the checked
# group.
if c.state == CT.Command.PROCESSED:
if checked_start is None:
# Start a checked range
checked_start = prev_end
elif checked_start is not None:
# Finish a checked range
checked.append(make_vim_range(checked_start, prev_end))
checked_start = None
prev_end = c.end
if sent_start is not None:
# Finish a sent range
sent.append(make_vim_range(sent_start, prev_end))
if checked_start is not None:
# Finish a checked range
checked.append(make_vim_range(checked_start, prev_end))
prev_end = None
for c in commands:
if c.msg_type != CT.Command.NONE:
# Normalize the start and stop positions, if it hasn't been done yet.
if c.msg_start_offset is not None and c.msg_start is None:
c.msg_start = self.convert_offset(prev_end,
c.msg_start_offset,
c.end)
if c.msg_stop_offset is not None and c.msg_stop is None:
c.msg_stop = self.convert_offset(prev_end,
c.msg_stop_offset,
c.end)
start = c.msg_start
stop = c.msg_stop
if start == stop:
start = prev_end
stop = c.end
if c.msg_type == CT.Command.WARNING:
warnings.append(make_vim_range(start, stop))
else:
errors.append(make_vim_range(start, stop))
prev_end = c.end
self.source_buffer.vars['coquille_sent'] = sent
self.source_buffer.vars['coquille_checked'] = checked
self.source_buffer.vars['coquille_warnings'] = warnings
self.source_buffer.vars['coquille_errors'] = errors
vim.command("call coquille#SyncBufferColors(%d)" %
self.source_buffer.number)
def rewind_to(self, line, col):
""" Go backwards to the specified position
line and col are 0-based and point to the first position to
remove from the sent region.
"""
if self.coq_top.coqtop is None:
print('Internal error: vimbufsync is still being called but coqtop\
appears to be down.')
print('Please report.')
return
last = self.coq_top.get_last_active_command()
if (last and (last.end[0], last.end[1]) <= (line, col)):
# The caller asked to rewind to a position after what has been
# processed. This quick path exits without having to search the
# state list.
return
predicate = lambda x: (x.end[0], x.end[1]) <= (line, col)
commands = self.coq_top.get_active_commands()
lst = filter(predicate, commands)
steps = len(commands) - len(list(lst))
if steps != 0:
self.coq_rewind(steps)
#############################
# Communication with Coqtop #
#############################
def send_until_fail(self, send_queue):
"""
Tries to send every message in [send_queue] to Coq, stops at the first
error.
When this function returns, [send_queue] is empty.
"""
self.clear_info()
# Start sending on a background thread
self.coq_top.send_async(send_queue)
# Redraw the screen when the background thread makes progress
while True:
result = self.coq_top.wait_for_result()
if result & CT.CoqTop.COMMAND_CHANGED:
self.reset_color()
vim.command('redraw')
if result & CT.CoqTop.MESSAGE_RECEIVED:
new_info = self.coq_top.get_messages()
self.show_info(new_info)
if result & CT.CoqTop.SEND_DONE:
break
self.coq_top.finish_send()
self.refresh()
#################
# Miscellaneous #
#################
# col_offset is a character offset, not byte offset
def _get_remaining_line(self, line, col_offset):
s = self.source_buffer[line]
if not isinstance(s, unicode):
s = s.decode("utf-8")
return s[col_offset:]
def _between(self, begin, end):
"""
Returns a string corresponding to the portion of the buffer between the
[begin] and [end] positions.
"""
(bline, bcol, bbyte) = begin
(eline, ecol, ebyte) = end
acc = ""
for line, str in enumerate(self.source_buffer[bline:eline + 1]):
if not isinstance(str, unicode):
str = str.decode("utf-8")
start = bcol if line == 0 else 0
stop = ecol + 1 if line == eline - bline else len(str)
acc += str[start:stop] + '\n'
return acc
# Convert a pos from (line, col) to (line, col, byte_offset)
#
# The byte_offset is relative to the start of the line. It is the same as
# col, unless there are non-ascii characters.
#
# line, col, and byte_offset are all 0-indexed.
def _add_byte_offset(self, pos):
(line, col) = pos
s = self.source_buffer[line]
if not isinstance(s, unicode):
s = s.decode("utf-8")
return (line, col, len(s[:col].encode("utf-8")))
def _get_message_range(self, after):
""" See [_find_next_chunk] """
(line, col, byte) = after
end_pos = self._find_next_chunk(line, col)
if end_pos is None:
return None
else:
end_pos = self._add_byte_offset(end_pos)
(eline, ecol, ebyte) = end_pos
message = self._between(after,
(eline, ecol - 1, ebyte - 1))
return (message, end_pos)
# A bullet is:
# - One or more '-'
# - One or more '+'
# - One or more '*'
# - Exactly 1 '{' (additional ones are parsed as separate statements)
# - Exactly 1 '}' (additional ones are parsed as separate statements)
bullets = re.compile("-+|\++|\*+|{|}")
def _find_next_chunk(self, line, col):
"""
Returns the position of the next chunk dot after a certain position.
That can either be a bullet if we are in a proof, or "a string" terminated
by a dot (outside of a comment, and not denoting a path).
"""
blen = len(self.source_buffer)
# We start by striping all whitespaces (including \n) from the beginning of
# the chunk.
while line < blen:
line_val = self.source_buffer[line]
if not isinstance(line_val, unicode):
line_val = line_val.decode("utf-8")
while col < len(line_val) and line_val[col] in (' ', '\t'):
col += 1
if col < len(line_val) and line_val[col] not in (' ', '\t'):
break
line += 1
col = 0
if line >= blen: return
# Then we check if the first character of the chunk is a bullet.
# Intially I did that only when I was sure to be in a proof (by looking in
# [encountered_dots] whether I was after a "collapsable" chunk or not), but
# 1/ that didn't play well with coq_to_cursor (as the "collapsable chunk"
# might not have been sent/detected yet).
# 2/ The bullet chars can never be used at the *beginning* of a chunk
# outside of a proof. So the check was unecessary.
bullet_match = self.bullets.match(line_val, col)
if bullet_match:
return (line, bullet_match.end())
# We might have a commentary before the bullet, we should be skiping it and
# keep on looking.
tail_len = len(line_val) - col
if ((tail_len - 1 > 0) and line_val[col] == '('
and line_val[col + 1] == '*'):
com_end = self._skip_comment(line, col + 2, 1)
if not com_end: return
(line, col) = com_end
return self._find_next_chunk(line, col)
# If the chunk doesn't start with a bullet, we look for a dot.
dot = self._find_dot_after(line, col)
if dot:
# Return the position one after the dot
return (dot[0], dot[1] + 1)
else:
return None
def _find_dot_after(self, line, col):
"""
Returns the position of the next "valid" dot after a certain position.
Valid here means: recognized by Coq as terminating an input, so dots in
comments, strings or ident paths are not valid.
"""
if line >= len(self.source_buffer): return
s = self._get_remaining_line(line, col)
dot_pos = s.find('.')
com_pos = s.find('(*')
str_pos = s.find('"')
if com_pos == -1 and dot_pos == -1 and str_pos == -1:
# Nothing on this line
return self._find_dot_after(line + 1, 0)
elif dot_pos == -1 or (com_pos > - 1 and dot_pos > com_pos) or (str_pos > - 1 and dot_pos > str_pos):
if str_pos == -1 or (com_pos > -1 and str_pos > com_pos):
# We see a comment opening before the next dot
com_end = self._skip_comment(line, com_pos + 2 + col, 1)
if not com_end: return
(line, col) = com_end
return self._find_dot_after(line, col)
else:
# We see a string starting before the next dot
str_end = self._skip_str(line, str_pos + col + 1)
if not str_end: return
(line, col) = str_end
return self._find_dot_after(line, col)
elif dot_pos < len(s) - 1 and s[dot_pos + 1] != ' ':
# Sometimes dot are used to access module fields, we don't want to stop
# just after the module name.
# Example: [Require Import Coq.Arith]
return self._find_dot_after(line, col + dot_pos + 1)
elif dot_pos + col > 0 and self._get_remaining_line(line, col + dot_pos - 1)[0] == '.':
# FIXME? There might be a cleaner way to express this.
# We don't want to capture ".."
if dot_pos + col > 1 and self._get_remaining_line(line, col + dot_pos - 2)[0] == '.':
# But we want to capture "..."
return (line, dot_pos + col)
else:
return self._find_dot_after(line, col + dot_pos + 1)
else:
return (line, dot_pos + col)
# TODO? factorize [_skip_str] and [_skip_comment]
def _skip_str(self, line, col):
"""
Used when we encountered the start of a string before a valid dot (see
[_find_dot_after]).
Returns the position of the end of the string.
"""
while True:
if line >= len(self.source_buffer): return
s = self._get_remaining_line(line, col)
str_end = s.find('"')
if str_end > -1:
return (line, col + str_end + 1)
line += 1
col = 0
def _skip_comment(self, line, col, nb_left):
"""
Used when we encountered the start of a comment before a valid dot (see
[_find_dot_after]).
Returns the position of the end of the comment.
"""
while nb_left > 0:
if line >= len(self.source_buffer): return None
s = self._get_remaining_line(line, col)
com_start = s.find('(*')
com_end = s.find('*)')
if com_end > -1 and (com_end < com_start or com_start == -1):
col += com_end + 2
nb_left -= 1
elif com_start > -1:
col += com_start + 2
nb_left += 1
else:
line += 1
col = 0
return (line, col)
def _empty_range():
return [ { 'line': 0, 'col': 0}, { 'line': 0, 'col': 0} ]
# Converts a byte offset into a message into a (line, col, byte) tuple
#
# msg is a unicode string the offset is relative to. col is the column where
# msg starts, and byte is the byte offset where it starts.
#
# All indecies are 0 based.
def _pos_from_offset(col, byte, msg, offset):
str = msg.encode("utf-8")[:offset].decode("utf-8")
lst = str.split('\n')
line = len(lst) - 1
col = len(lst[-1]) + (col if line == 0 else 0)
byte = len(lst[-1].encode("utf-8")) + (byte if line == 0 else 0)
return (line, col, byte)
| isc | -3,639,177,339,411,047,000 | 36.778393 | 109 | 0.530356 | false |
LabKey/labkey-api-python | samples/query_examples.py | 1 | 9462 | #
# Copyright (c) 2015-2018 LabKey Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
Examples using the Query.py API
Sample data from the New Study tutorial on labkey.org:
https://www.labkey.org/Documentation/wiki-page.view?name=studySetupManual
"""
from labkey.api_wrapper import APIWrapper
from labkey.exceptions import (
RequestError,
QueryNotFoundError,
ServerContextError,
ServerNotFoundError,
)
from labkey.query import Pagination, QueryFilter
from requests.exceptions import Timeout
import copy
print("Create a server context")
labkey_server = "localhost:8080"
project_name = "ModuleAssayTest" # Project folder name
context_path = "labkey"
api = APIWrapper(labkey_server, project_name, context_path, use_ssl=False)
schema = "lists"
table = "Demographics"
column1 = "Group Assignment"
column2 = "Participant ID"
###################
# Test basic select_rows
###################
result = api.query.select_rows(schema, table)
if result is not None:
print(result["rows"][0])
print("select_rows: There are " + str(result["rowCount"]) + " rows.")
else:
print("select_rows: Failed to load results from " + schema + "." + table)
###################
# Test error handling
###################
# catch base error
try:
result = api.query.select_rows(schema, "badtable")
print(result)
except RequestError:
print("Caught base error")
# catch table not found error
try:
result = api.query.select_rows(schema, "badtable")
print(result)
except QueryNotFoundError:
print("Caught bad table")
# catch schema error
try:
result = api.query.select_rows("badSchema", table)
print(result)
except QueryNotFoundError:
print("Caught bad schema")
# catch SSL error
ssl_api = APIWrapper(labkey_server, project_name, context_path, use_ssl=True)
try:
result = ssl_api.query.select_rows(schema, table)
print(result)
except ServerContextError:
print("Caught SSL Error")
# catch bad context path
bad_api = APIWrapper(labkey_server, project_name, "", use_ssl=False)
try:
result = bad_api.query.select_rows(schema, table)
print(result)
except ServerNotFoundError:
print("Caught context path")
# catch bad folder path error
bad_api = APIWrapper(labkey_server, "bad_project_name", context_path, use_ssl=False)
try:
result = bad_api.query.select_rows(schema, table)
print(result)
except ServerNotFoundError:
print("Caught bad folder name")
###################
# Test some parameters of select_rows
###################
result = api.query.select_rows(
schema,
table,
max_rows=5,
offset=10,
include_total_count=True,
include_details_column=True,
include_update_column=True,
)
if result is not None:
print("select_rows: There are " + str(len(result["rows"])) + " rows.")
print("select_rows: There are " + str(result["rowCount"]) + " total rows.")
print("select_rows: Response API version [" + str(result["formatVersion"]) + "].")
column_statement = "select_rows: Included columns: "
for column in result["columnModel"]:
column_statement = column_statement + " " + column["header"] + ", "
print(column_statement)
row = result["rows"][0]
dataIndex = result["metaData"]["id"]
print("select_rows: The first row Key is: " + str(row[dataIndex]))
else:
print("select_rows: Failed to load results from " + schema + "." + table)
###################
# Test get all results
###################
result = api.query.select_rows(schema, table, show_rows=Pagination.ALL, include_total_count=True)
if result is not None:
print("select_rows: There are " + str(len(result["rows"])) + " rows.")
print("select_rows: There are " + str(result["rowCount"]) + " total rows.")
else:
print("select_rows: Failed to load results from " + schema + "." + table)
###################
# Test sort and select columns
###################
result = api.query.select_rows(
schema,
table,
max_rows=5,
offset=10,
include_total_count=False,
columns=",".join([column1, column2]),
sort=column1 + ", -" + column2,
) # use '-' to sort descending
if result is not None:
print("select_rows: There are " + str(result["rowCount"]) + " rows.")
print("select_rows: " + table)
for row in result["rows"]:
print("\t" + str(row[column1]) + ", " + str(row[column2]))
else:
print("select_rows: Failed to load results from " + schema + "." + table)
###################
# Test basic filters
###################
filters = [
QueryFilter(column1, "Group 2: HIV-1 Negative"),
QueryFilter("Height (inches)", "50, 70", QueryFilter.Types.BETWEEN),
QueryFilter("Country", "Germany;Uganda", QueryFilter.Types.IN),
]
result = api.query.select_rows(schema, table, filter_array=filters)
if result is not None:
print("select_rows: There are " + str(result["rowCount"]) + " rows.")
else:
print("select_rows: Failed to load results from " + schema + "." + table)
###################
# Test update_rows
###################
rows = result["rows"]
test_row_idx = 1
original_value = rows[test_row_idx]
column3 = "Country"
test_row = {"Key": original_value["Key"], column3: "Pangea"}
print("update_rows: original value [ " + original_value[column3] + " ]")
update_result = api.query.update_rows(schema, table, [test_row])
print("update_rows: updated value [ " + update_result["rows"][0][column3] + " ]")
update_result = api.query.update_rows(schema, table, [original_value])
print("update_rows: reset value [ " + update_result["rows"][0][column3] + " ]")
###################
# Test insert_rows & delete_rows
###################
test_row = copy.copy(original_value)
test_row["Key"] = None
test_row["Country"] = "Antarctica"
all_rows = api.query.select_rows(schema, table)
print("Insert Rows: Initials row count [ " + str(all_rows["rowCount"]) + " ]")
insert_result = api.query.select_rows(schema, table, [test_row])
print("Insert Rows: New rowId [ " + str(insert_result["rows"][0]["Key"]) + " ]")
all_rows = api.query.select_rows(schema, table)
print("Insert Rows: after row count [ " + str(all_rows["rowCount"]) + " ]")
test_row = insert_result["rows"][0]
deleteResult = api.query.delete_rows(schema, table, [test_row])
print("Delete Rows: deleted rowId [ " + str(deleteResult["rows"][0]["Key"]) + " ]")
all_rows = api.query.select_rows(schema, table)
print("Delete Rows: after row count [ " + str(all_rows["rowCount"]) + " ]")
###################
# Test truncate_table
###################
truncate_info = api.query.truncate_table(schema, table)
print("Delete all rows in table: [ " + str(truncate_info["deletedRows"]) + " ] rows deleted")
###################
# Test execute_sql
###################
sql = "select * from lists.demographics"
# base execute_sql
sql_result = api.query.execute_sql(schema, sql)
if sql_result is not None:
print("execute_sql: There are " + str(sql_result["rowCount"]) + " rows.")
else:
print("execute_sql: Failed to load results from " + schema + "." + table)
# paging
sql_result = api.query.execute_sql(
schema, sql, max_rows=5, offset=10, sort=(column1 + ", -" + column2)
)
if sql_result is not None:
print("execute_sql: There are " + str(len(sql_result["rows"])) + " rows.")
print("execute_sql: There are " + str(sql_result["rowCount"]) + " total rows.")
print("execute_sql: " + table)
for row in sql_result["rows"]:
print("\t" + str(row[column1]) + ", " + str(row[column2]))
else:
print("execute_sql: Failed to load results from " + schema + "." + table)
# Save query within the session
sql_result = api.query.execute_sql(schema, sql, max_rows=5, offset=10, save_in_session=True)
print("execute_sql: query saved as [ " + sql_result["queryName"] + " ]")
# set timeout
try:
sql_result = api.query.execute_sql(schema, sql, timeout=0.001)
print("execute_sql did not timeout")
except Timeout:
print("Caught Timeout")
###################
# Test QC State Definitions
###################
# Create new QC state definitions
qc_states = [
{
"label": "needs verification",
"description": "please look at this",
"publicData": False,
},
{"label": "approved", "publicData": True},
]
result = api.query.insert_rows("core", "qcstate", qc_states)
for row in result["rows"]:
print("Created QC state: " + row["label"])
result = api.query.select_rows("core", "qcstate")
# Update a QC state definitions
original_value = result["rows"][1]
test_row = {"RowId": original_value["RowId"], "label": "Updated Label"}
update_result = api.query.update_rows("core", "qcstate", [test_row])
print("Updated label: approved -> " + update_result["rows"][0]["label"])
# Delete all unused QC state definitions
result = api.query.select_rows("core", "qcstate")
for row in result["rows"]:
print("Deleting QC state: " + row["Label"])
try:
api.query.delete_rows("core", "qcstate", [row])
except ServerContextError as e:
print(e.message)
| apache-2.0 | -4,380,558,403,375,946,000 | 29.921569 | 97 | 0.643733 | false |
8l/beri | cheritest/trunk/tests/fpu/test_raw_fpu_cvt_l_s_d64.py | 2 | 2312 | #-
# Copyright (c) 2013 Michael Roe
# All rights reserved.
#
# This software was developed by SRI International and the University of
# Cambridge Computer Laboratory under DARPA/AFRL contract FA8750-10-C-0237
# ("CTSRD"), as part of the DARPA CRASH research programme.
#
# @BERI_LICENSE_HEADER_START@
#
# Licensed to BERI Open Systems C.I.C. (BERI) under one or more contributor
# license agreements. See the NOTICE file distributed with this work for
# additional information regarding copyright ownership. BERI licenses this
# file to you under the BERI Hardware-Software License, Version 1.0 (the
# "License"); you may not use this file except in compliance with the
# License. You may obtain a copy of the License at:
#
# http://www.beri-open-systems.org/legal/license-1-0.txt
#
# Unless required by applicable law or agreed to in writing, Work distributed
# under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR
# CONDITIONS OF ANY KIND, either express or implied. See the License for the
# specific language governing permissions and limitations under the License.
#
# @BERI_LICENSE_HEADER_END@
#
#
# Test single-precision round to long word when the FPU is in 64 bit mode
#
from beritest_tools import BaseBERITestCase
from nose.plugins.attrib import attr
class test_raw_fpu_cvt_l_s_d64(BaseBERITestCase):
@attr('float64')
def test_raw_fpu_cvt_l_s_d64_1(self):
'''Test single precision convert of -0.75'''
self.assertRegisterEqual(self.MIPS.a0 , 0xffffffffffffffff, "-0.75 did not round down to -1")
@attr('float64')
def test_raw_fpu_cvt_l_s_d64_2(self):
'''Test single precision convert of -0.5'''
self.assertRegisterEqual(self.MIPS.a1 , 0, "-0.5 did not round up to 0")
@attr('float64')
def test_raw_fpu_cvt_l_s_d64_3(self):
'''Test single precision convert of -0.25'''
self.assertRegisterEqual(self.MIPS.a2, 0, "-0.25 did not round up to 0")
@attr('float64')
def test_raw_fpu_cvt_l_s_d64_4(self):
'''Test single precision convert of 0.5'''
self.assertRegisterEqual(self.MIPS.a3, 0, "0.5 did not round down to 0")
@attr('float64')
def test_raw_fpu_cvt_l_s_d64_5(self):
'''Test single precision convert of 1.5'''
self.assertRegisterEqual(self.MIPS.a4, 2, "1.5 did not round up to 2")
| apache-2.0 | -3,856,973,387,873,070,600 | 37.533333 | 94 | 0.713668 | false |
blink1073/image_inspector | setup.py | 1 | 1488 | """Setup script for image_inspector package.
"""
DISTNAME = 'iminspector'
DESCRIPTION = 'Image Interaction widgets and viewer.'
LONG_DESCRIPTION = open('README.rst', 'rb').read().decode('utf-8')
MAINTAINER = 'Steven Silvester'
MAINTAINER_EMAIL = '[email protected]'
URL = 'http://github.com/blink1073/image_inspector'
LICENSE = 'MIT'
REQUIRES = ["numpy (>= 1.7.1)", "matplotlib (>= 1.4)"]
CLASSIFIERS = """\
Intended Audience :: Developers
Intended Audience :: Science/Research
License :: OSI Approved :: MIT License
Operating System :: OS Independent
Programming Language :: Python
Programming Language :: Python :: 2.7
Programming Language :: Python :: 3.3
Programming Language :: Python :: 3.4
Topic :: Scientific/Engineering
Topic :: Software Development
"""
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('iminspector/__init__.py', 'rb') as fid:
for line in fid:
line = line.decode('utf-8')
if line.startswith('__version__'):
version = line.strip().split()[-1][1:-1]
break
setup(
name=DISTNAME,
version=version,
maintainer=MAINTAINER,
maintainer_email=MAINTAINER_EMAIL,
url=URL,
download_url=URL,
license=LICENSE,
platforms=["Any"],
description=DESCRIPTION,
long_description=LONG_DESCRIPTION,
classifiers=list(filter(None, CLASSIFIERS.split('\n'))),
requires=REQUIRES
)
| mit | -5,003,442,439,908,984,000 | 27.76 | 66 | 0.66129 | false |
gwax/nikola | nikola/plugins/task/sections.py | 1 | 7188 | # -*- coding: utf-8 -*-
# Copyright © 2012-2017 Roberto Alsina and others.
# Permission is hereby granted, free of charge, to any
# person obtaining a copy of this software and associated
# documentation files (the "Software"), to deal in the
# Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the
# Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice
# shall be included in all copies or substantial portions of
# the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY
# KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE
# WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR
# PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS
# OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
# OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
# OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
"""Render the blog indexes."""
from nikola.plugin_categories import Taxonomy
from nikola import utils
class ClassifySections(Taxonomy):
"""Classify the posts by sections."""
name = "classify_sections"
classification_name = "section_index"
overview_page_variable_name = "sections"
more_than_one_classifications_per_post = False
has_hierarchy = False
generate_atom_feeds_for_post_lists = False
template_for_classification_overview = None
apply_to_posts = True
apply_to_pages = False
omit_empty_classifications = True
also_create_classifications_from_other_languages = False
add_other_languages_variable = True
path_handler_docstrings = {
'section_index_index': False,
'section_index': """Link to the index for a section.
Example:
link://section_index/cars => /cars/index.html""",
'section_index_atom': """Link to the Atom index for a section.
Example:
link://section_index_atom/cars => /cars/index.atom""",
'section_index_rss': """Link to the RSS feed for a section.
Example:
link://section_index_rss/cars => /cars/rss.xml""",
}
def set_site(self, site):
"""Set Nikola site."""
super(ClassifySections, self).set_site(site)
self.show_list_as_index = site.config["POSTS_SECTIONS_ARE_INDEXES"]
self.template_for_single_list = "sectionindex.tmpl" if self.show_list_as_index else "list.tmpl"
self.enable_for_lang = {}
self.translation_manager = utils.ClassificationTranslationManager()
def is_enabled(self, lang=None):
"""Return True if this taxonomy is enabled, or False otherwise."""
if not self.site.config['POSTS_SECTIONS']:
return False
if lang is not None:
return self.enable_for_lang.get(lang, False)
return True
def classify(self, post, lang):
"""Classify the given post for the given language."""
return [post.section_slug(lang)]
def _get_section_name(self, section, lang):
# Check whether we have a name for this section
if section in self.site.config['POSTS_SECTION_NAME'](lang):
return self.site.config['POSTS_SECTION_NAME'](lang)[section]
else:
return section.replace('-', ' ').title()
def get_classification_friendly_name(self, section, lang, only_last_component=False):
"""Extract a friendly name from the classification."""
return self._get_section_name(section, lang)
def get_path(self, section, lang, dest_type='page'):
"""Return a path for the given classification."""
result = [_f for _f in [self.site.config['SECTION_PATH'](lang), section] if _f]
if dest_type == 'rss':
return result + ['rss.xml'], 'never'
return result, 'always'
def provide_context_and_uptodate(self, classification, lang, node=None):
"""Provide data for the context and the uptodate list for the list of the given classifiation."""
kw = {
"messages": self.site.MESSAGES,
}
section_name = self._get_section_name(classification, lang)
# Compose section title
section_title = section_name
posts_section_title = self.site.config['POSTS_SECTION_TITLE'](lang)
if isinstance(posts_section_title, dict):
if classification in posts_section_title:
section_title = posts_section_title[classification]
elif isinstance(posts_section_title, (utils.bytes_str, utils.unicode_str)):
section_title = posts_section_title
section_title = section_title.format(name=section_name)
# Compose context
context = {
"title": section_title,
"description": self.site.config['POSTS_SECTION_DESCRIPTIONS'](lang)[classification] if classification in self.site.config['POSTS_SECTION_DESCRIPTIONS'](lang) else "",
"pagekind": ["section_page", "index" if self.show_list_as_index else "list"],
"section": classification,
}
kw.update(context)
return context, kw
def postprocess_posts_per_classification(self, posts_per_classification_per_language, flat_hierarchy_per_lang=None, hierarchy_lookup_per_lang=None):
"""Rearrange, modify or otherwise use the list of posts per classification and per language."""
for lang, posts_per_section in posts_per_classification_per_language.items():
# Don't build sections when there is only one, a.k.a. default setups
sections = set()
for section, posts in posts_per_section.items():
for post in posts:
if not self.site.config["SHOW_UNTRANSLATED_POSTS"] and not post.is_translation_available(lang):
continue
sections.add(section)
self.enable_for_lang[lang] = (len(sections) > 1)
self.translation_manager.read_from_config(self.site, 'POSTS_SECTION', posts_per_classification_per_language, False)
def should_generate_classification_page(self, classification, post_list, lang):
"""Only generates list of posts for classification if this function returns True."""
short_destination = classification + '/' + self.site.config['INDEX_FILE']
# If there is an index.html pending to be created from a page, do not generate the section page.
# The section page would be useless anyways. (via Issue #2613)
for post in self.site.timeline:
if not self.site.config["SHOW_UNTRANSLATED_POSTS"] and not post.is_translation_available(lang):
continue
if post.destination_path(lang, sep='/') == short_destination:
return False
return True
def get_other_language_variants(self, classification, lang, classifications_per_language):
"""Return a list of variants of the same section in other languages."""
return self.translation_manager.get_translations_as_list(classification, lang, classifications_per_language)
| mit | -7,825,555,232,465,642,000 | 44.201258 | 178 | 0.669542 | false |
vnleonenko/Influenza | experiments/draw_speedup.py | 1 | 2138 | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Auxiliary script for paper, DGTS conference.
Draw speedup graph
"""
import json
import time
import matplotlib
import matplotlib.pyplot as plt
from common import RESULTS_PATH
SPEEDUP_FILE = RESULTS_PATH + '/dgts/speedup.json'
OUTPUT_FILE = RESULTS_PATH + '/dgts/speedup.pdf'
def main():
data = {}
with open(SPEEDUP_FILE) as f:
data = json.load(f)
speedups = dict()
for size, measurements in data.items():
if int(size) == 1:
continue # pass trivial case
one_process = float(measurements["1"])
for process_count, seconds in measurements.items():
if int(process_count) == 1:
continue # speedup for 1 process === 1.0
try:
speedups[int(process_count)][int(size)] = one_process / float(seconds)
except KeyError:
speedups[int(process_count)] = {int(size): one_process / float(seconds)}
fig = plt.figure(figsize=(10, 6)) # if smooth else (20, 12))
matplotlib.rcParams.update({'font.size': 20})
ax = fig.add_subplot(111)
sizes = next(iter(speedups.values())).keys()
x_axis = [i for i in range(min(sizes), max(sizes) + 1)]
colors = {'c', 'm', 'y', 'k'}
opt_color = {
2: 'b', 4: 'r', 8: 'g',
}
for process_count, measurements in speedups.items():
speedup_list = [measurements[key] for key in sorted(measurements.keys())]
if process_count not in opt_color:
opt_color[process_count] = colors.pop()
plt.plot(x_axis, speedup_list, opt_color[process_count] + "o-",
label='%d processes speedup' % (process_count),
linewidth=2.0)
plt.xlabel('Time periods')
plt.ylabel('Speedup')
plt.legend(loc='lower right', numpoints=1,
prop={'size': 16}, fancybox=True, shadow=True)
plt.grid()
plt.savefig(OUTPUT_FILE, dpi=450, format='pdf', bbox_inches='tight')
plt.show()
if __name__ == '__main__':
t0 = time.time()
main()
print('Total elapsed: %d seconds' % (time.time() - t0))
| gpl-3.0 | 9,161,574,696,086,953,000 | 27.506667 | 88 | 0.581384 | false |
fangxingli/hue | desktop/libs/notebook/src/notebook/connectors/hiveserver2.py | 1 | 19836 | #!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import copy
import logging
import re
import StringIO
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext as _
from desktop.conf import USE_DEFAULT_CONFIGURATION
from desktop.lib.conf import BoundConfig
from desktop.lib.exceptions import StructuredException
from desktop.lib.exceptions_renderable import PopupException
from desktop.lib.i18n import force_unicode
from desktop.models import DefaultConfiguration
from notebook.connectors.base import Api, QueryError, QueryExpired, OperationTimeout
LOG = logging.getLogger(__name__)
try:
from beeswax import data_export
from beeswax.api import _autocomplete, _get_sample_data
from beeswax.conf import CONFIG_WHITELIST as hive_settings
from beeswax.data_export import upload
from beeswax.design import hql_query, strip_trailing_semicolon, split_statements
from beeswax import conf as beeswax_conf
from beeswax.models import QUERY_TYPES, HiveServerQueryHandle, HiveServerQueryHistory, QueryHistory, Session
from beeswax.server import dbms
from beeswax.server.dbms import get_query_server_config, QueryServerException
from beeswax.views import _parse_out_hadoop_jobs
except ImportError, e:
LOG.warn('Hive and HiveServer2 interfaces are not enabled')
hive_settings = None
try:
from impala import api # Force checking if Impala is enabled
from impala.conf import CONFIG_WHITELIST as impala_settings
except ImportError, e:
LOG.warn("Impala app is not enabled")
impala_settings = None
DEFAULT_HIVE_ENGINE = 'mr'
def query_error_handler(func):
def decorator(*args, **kwargs):
try:
return func(*args, **kwargs)
except StructuredException, e:
message = force_unicode(str(e))
if 'timed out' in message:
raise OperationTimeout(e)
else:
raise QueryError(message)
except QueryServerException, e:
message = force_unicode(str(e))
if 'Invalid query handle' in message or 'Invalid OperationHandle' in message:
raise QueryExpired(e)
else:
raise QueryError(message)
return decorator
def is_hive_enabled():
return hive_settings is not None and type(hive_settings) == BoundConfig
def is_impala_enabled():
return impala_settings is not None and type(impala_settings) == BoundConfig
class HiveConfiguration(object):
APP_NAME = 'hive'
PROPERTIES = [
{
"multiple": True,
"defaultValue": [],
"value": [],
"nice_name": _("Files"),
"key": "files",
"help_text": _("Add one or more files, jars, or archives to the list of resources."),
"type": "hdfs-files"
}, {
"multiple": True,
"defaultValue": [],
"value": [],
"nice_name": _("Functions"),
"key": "functions",
"help_text": _("Add one or more registered UDFs (requires function name and fully-qualified class name)."),
"type": "functions"
}, {
"multiple": True,
"defaultValue": [],
"value": [],
"nice_name": _("Settings"),
"key": "settings",
"help_text": _("Hive and Hadoop configuration properties."),
"type": "settings",
"options": [config.lower() for config in hive_settings.get()] if is_hive_enabled() and hasattr(hive_settings, 'get') else []
}
]
class ImpalaConfiguration(object):
APP_NAME = 'impala'
PROPERTIES = [
{
"multiple": True,
"defaultValue": [],
"value": [],
"nice_name": _("Settings"),
"key": "settings",
"help_text": _("Impala configuration properties."),
"type": "settings",
"options": [config.lower() for config in impala_settings.get()] if is_impala_enabled() else []
}
]
class HS2Api(Api):
@staticmethod
def get_properties(lang='hive'):
return ImpalaConfiguration.PROPERTIES if lang == 'impala' else HiveConfiguration.PROPERTIES
@query_error_handler
def create_session(self, lang='hive', properties=None):
application = 'beeswax' if lang == 'hive' else lang
session = Session.objects.get_session(self.user, application=application)
if session is None:
session = dbms.get(self.user, query_server=get_query_server_config(name=lang)).open_session(self.user)
response = {
'type': lang,
'id': session.id
}
if not properties:
config = None
if USE_DEFAULT_CONFIGURATION.get():
config = DefaultConfiguration.objects.get_configuration_for_user(app=lang, user=self.user)
if config is not None:
properties = config.properties_list
else:
properties = self.get_properties(lang)
response['properties'] = properties
if lang == 'impala':
impala_settings = session.get_formatted_properties()
http_addr = next((setting['value'] for setting in impala_settings if setting['key'].lower() == 'http_addr'), None)
response['http_addr'] = http_addr
return response
@query_error_handler
def close_session(self, session):
app_name = session.get('type')
session_id = session.get('id')
query_server = get_query_server_config(name=app_name)
response = {'status': -1, 'message': ''}
try:
filters = {'id': session_id, 'application': query_server['server_name']}
if not self.user.is_superuser:
filters['owner'] = self.user
session = Session.objects.get(**filters)
except Session.DoesNotExist:
response['message'] = _('Session does not exist or you do not have permissions to close the session.')
if session:
session = dbms.get(self.user, query_server).close_session(session)
response['status'] = 0
response['message'] = _('Session successfully closed.')
response['session'] = {'id': session_id, 'application': session.application, 'status': session.status_code}
return response
@query_error_handler
def execute(self, notebook, snippet):
db = self._get_db(snippet)
statement = self._get_current_statement(db, snippet)
session = self._get_session(notebook, snippet['type'])
query = self._prepare_hql_query(snippet, statement['statement'], session)
try:
db.use(query.database)
handle = db.client.query(query)
except QueryServerException, ex:
raise QueryError(ex.message, handle=statement)
# All good
server_id, server_guid = handle.get()
response = {
'secret': server_id,
'guid': server_guid,
'operation_type': handle.operation_type,
'has_result_set': handle.has_result_set,
'modified_row_count': handle.modified_row_count,
'log_context': handle.log_context,
}
response.update(statement)
return response
@query_error_handler
def check_status(self, notebook, snippet):
response = {}
db = self._get_db(snippet)
handle = self._get_handle(snippet)
operation = db.get_operation_status(handle)
status = HiveServerQueryHistory.STATE_MAP[operation.operationState]
if status.index in (QueryHistory.STATE.failed.index, QueryHistory.STATE.expired.index):
if operation.errorMessage and 'transition from CANCELED to ERROR' in operation.errorMessage: # Hive case on canceled query
raise QueryExpired()
else:
raise QueryError(operation.errorMessage)
response['status'] = 'running' if status.index in (QueryHistory.STATE.running.index, QueryHistory.STATE.submitted.index) else 'available'
return response
@query_error_handler
def fetch_result(self, notebook, snippet, rows, start_over):
db = self._get_db(snippet)
handle = self._get_handle(snippet)
results = db.fetch(handle, start_over=start_over, rows=rows)
# No escaping...
return {
'has_more': results.has_more,
'data': results.rows(),
'meta': [{
'name': column.name,
'type': column.type,
'comment': column.comment
} for column in results.data_table.cols()],
'type': 'table'
}
@query_error_handler
def fetch_result_metadata(self):
pass
@query_error_handler
def cancel(self, notebook, snippet):
db = self._get_db(snippet)
handle = self._get_handle(snippet)
db.cancel_operation(handle)
return {'status': 0}
@query_error_handler
def get_log(self, notebook, snippet, startFrom=None, size=None):
db = self._get_db(snippet)
handle = self._get_handle(snippet)
return db.get_log(handle, start_over=startFrom == 0)
@query_error_handler
def close_statement(self, snippet):
if snippet['type'] == 'impala':
from impala import conf as impala_conf
if (snippet['type'] == 'hive' and beeswax_conf.CLOSE_QUERIES.get()) or (snippet['type'] == 'impala' and impala_conf.CLOSE_QUERIES.get()):
db = self._get_db(snippet)
handle = self._get_handle(snippet)
db.close_operation(handle)
return {'status': 0}
else:
return {'status': -1} # skipped
@query_error_handler
def download(self, notebook, snippet, format):
try:
db = self._get_db(snippet)
handle = self._get_handle(snippet)
# Test handle to verify if still valid
db.fetch(handle, start_over=True, rows=1)
return data_export.download(handle, format, db, id=snippet['id'])
except Exception, e:
title = 'The query result cannot be downloaded.'
LOG.exception(title)
if hasattr(e, 'message') and e.message:
message = e.message
else:
message = e
raise PopupException(_(title), detail=message)
@query_error_handler
def progress(self, snippet, logs):
if snippet['type'] == 'hive':
match = re.search('Total jobs = (\d+)', logs, re.MULTILINE)
total = int(match.group(1)) if match else 1
started = logs.count('Starting Job')
ended = logs.count('Ended Job')
progress = int((started + ended) * 100 / (total * 2))
return max(progress, 5) # Return 5% progress as a minimum
elif snippet['type'] == 'impala':
match = re.findall('(\d+)% Complete', logs, re.MULTILINE)
# Retrieve the last reported progress percentage if it exists
return int(match[-1]) if match and isinstance(match, list) else 0
else:
return 50
@query_error_handler
def get_jobs(self, notebook, snippet, logs):
jobs = []
if snippet['type'] == 'hive':
engine = self._get_hive_execution_engine(notebook, snippet)
jobs_with_state = _parse_out_hadoop_jobs(logs, engine=engine, with_state=True)
jobs = [{
'name': job.get('job_id', ''),
'url': reverse('jobbrowser.views.single_job', kwargs={'job': job.get('job_id', '')}),
'started': job.get('started', False),
'finished': job.get('finished', False)
} for job in jobs_with_state]
return jobs
@query_error_handler
def autocomplete(self, snippet, database=None, table=None, column=None, nested=None):
db = self._get_db(snippet)
return _autocomplete(db, database, table, column, nested)
@query_error_handler
def get_sample_data(self, snippet, database=None, table=None, column=None):
db = self._get_db(snippet)
return _get_sample_data(db, database, table, column)
@query_error_handler
def explain(self, notebook, snippet):
db = self._get_db(snippet)
response = self._get_current_statement(db, snippet)
session = self._get_session(notebook, snippet['type'])
query = self._prepare_hql_query(snippet, response.pop('statement'), session)
try:
explanation = db.explain(query)
except QueryServerException, ex:
raise QueryError(ex.message)
return {
'status': 0,
'explanation': explanation.textual,
'statement': query.get_query_statement(0),
}
@query_error_handler
def export_data_as_hdfs_file(self, snippet, target_file, overwrite):
db = self._get_db(snippet)
handle = self._get_handle(snippet)
upload(target_file, handle, self.request.user, db, self.request.fs)
return '/filebrowser/view=%s' % target_file
def export_data_as_table(self, notebook, snippet, destination):
db = self._get_db(snippet)
response = self._get_current_statement(db, snippet)
session = self._get_session(notebook, snippet['type'])
query = self._prepare_hql_query(snippet, response.pop('statement'), session)
if 'select' not in query.hql_query.strip().lower():
raise Exception(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
database = snippet.get('database') or 'default'
table = destination
if '.' in table:
database, table = table.split('.', 1)
db.use(query.database)
hql = 'CREATE TABLE `%s`.`%s` AS %s' % (database, table, query.hql_query)
success_url = reverse('metastore:describe_table', kwargs={'database': database, 'table': table})
return hql, success_url
def export_large_data_to_hdfs(self, notebook, snippet, destination):
db = self._get_db(snippet)
response = self._get_current_statement(db, snippet)
session = self._get_session(notebook, snippet['type'])
query = self._prepare_hql_query(snippet, response.pop('statement'), session)
if 'select' not in query.hql_query.strip().lower():
raise Exception(_('Only SELECT statements can be saved. Provided statement: %(query)s') % {'query': query.hql_query})
db.use(query.database)
hql = "INSERT OVERWRITE DIRECTORY '%s' %s" % (destination, query.hql_query)
success_url = '/filebrowser/view=%s' % destination
return hql, success_url
def upgrade_properties(self, lang='hive', properties=None):
upgraded_properties = copy.deepcopy(self.get_properties(lang))
# Check that current properties is a list of dictionary objects with 'key' and 'value' keys
if not isinstance(properties, list) or \
not all(isinstance(prop, dict) for prop in properties) or \
not all('key' in prop for prop in properties) or not all('value' in prop for prop in properties):
LOG.warn('Current properties are not formatted correctly, will replace with defaults.')
return upgraded_properties
valid_props_dict = dict((prop["key"], prop) for prop in upgraded_properties)
curr_props_dict = dict((prop['key'], prop) for prop in properties)
# Upgrade based on valid properties as needed
if set(valid_props_dict.keys()) != set(curr_props_dict.keys()):
settings = next((prop for prop in upgraded_properties if prop['key'] == 'settings'), None)
if settings is not None and isinstance(properties, list):
settings['value'] = properties
else: # No upgrade needed so return existing properties
upgraded_properties = properties
return upgraded_properties
def _get_session(self, notebook, type='hive'):
session = next((session for session in notebook['sessions'] if session['type'] == type), None)
return session
def _get_hive_execution_engine(self, notebook, snippet):
# Get hive.execution.engine from snippet properties, if none, then get from session
properties = snippet['properties']
settings = properties.get('settings', [])
if not settings:
session = self._get_session(notebook, 'hive')
if not session:
LOG.warn('Cannot get jobs, failed to find active HS2 session for user: %s' % self.user.username)
else:
properties = session['properties']
settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
if settings:
engine = next((setting['value'] for setting in settings if setting['key'] == 'hive.execution.engine'), DEFAULT_HIVE_ENGINE)
else:
engine = DEFAULT_HIVE_ENGINE
return engine
def _get_statements(self, hql_query):
hql_query = strip_trailing_semicolon(hql_query)
hql_query_sio = StringIO.StringIO(hql_query)
statements = []
for (start_row, start_col), (end_row, end_col), statement in split_statements(hql_query_sio.read()):
statements.append({
'start': {
'row': start_row,
'column': start_col
},
'end': {
'row': end_row,
'column': end_col
},
'statement': strip_trailing_semicolon(statement.strip())
})
return statements
def _get_current_statement(self, db, snippet):
# Multiquery, if not first statement or arrived to the last query
statement_id = snippet['result']['handle'].get('statement_id', 0)
statements_count = snippet['result']['handle'].get('statements_count', 1)
if snippet['result']['handle'].get('has_more_statements'):
try:
handle = self._get_handle(snippet)
db.close_operation(handle) # Close all the time past multi queries
except:
LOG.warn('Could not close previous multiquery query')
statement_id += 1
else:
statement_id = 0
statements = self._get_statements(snippet['statement'])
resp = {
'statement_id': statement_id,
'has_more_statements': statement_id < len(statements) - 1,
'statements_count': len(statements)
}
if statements_count != len(statements):
statement_id = 0
resp.update(statements[statement_id])
return resp
def _prepare_hql_query(self, snippet, statement, session):
settings = snippet['properties'].get('settings', None)
file_resources = snippet['properties'].get('files', None)
functions = snippet['properties'].get('functions', None)
properties = session['properties'] if session else []
# Get properties from session if not defined in snippet
if not settings:
settings = next((prop['value'] for prop in properties if prop['key'] == 'settings'), None)
if not file_resources:
file_resources = next((prop['value'] for prop in properties if prop['key'] == 'files'), None)
if not functions:
functions = next((prop['value'] for prop in properties if prop['key'] == 'functions'), None)
database = snippet.get('database') or 'default'
return hql_query(
statement,
query_type=QUERY_TYPES[0],
settings=settings,
file_resources=file_resources,
functions=functions,
database=database
)
def get_select_star_query(self, snippet, database, table):
db = self._get_db(snippet)
table = db.get_table(database, table)
return db.get_select_star_query(database, table)
def _get_handle(self, snippet):
snippet['result']['handle']['secret'], snippet['result']['handle']['guid'] = HiveServerQueryHandle.get_decoded(snippet['result']['handle']['secret'], snippet['result']['handle']['guid'])
for key in snippet['result']['handle'].keys():
if key not in ('log_context', 'secret', 'has_result_set', 'operation_type', 'modified_row_count', 'guid'):
snippet['result']['handle'].pop(key)
return HiveServerQueryHandle(**snippet['result']['handle'])
def _get_db(self, snippet):
if snippet['type'] == 'hive':
name = 'beeswax'
elif snippet['type'] == 'impala':
name = 'impala'
else:
name = 'spark-sql'
return dbms.get(self.user, query_server=get_query_server_config(name=name))
| apache-2.0 | -1,149,335,945,032,415,200 | 31.518033 | 190 | 0.661877 | false |
LaurensScheldeman/TwitchBot | src/lib/gui.py | 1 | 2462 | import string
from datetime import datetime
import Tkinter as tk
import ttk
import tkFont
import webbrowser
import socket
import src.lib.fileHandler as fileHandler
import src.lib.twitchHandler as twitchHandler
from src.lib.irc import irc as irc_
from src.lib.variables import global_variables
from src.lib.gui_botsettings import Botsettings
from src.lib.gui_commands import Commands
class GUI():
def __init__(self):
# GUI
self.__ROOT = tk.Tk()
self.__ROOT.withdraw() # Makes gui invisible
# Loading window
loading = tk.Tk()
loading.wm_title(' ')
loading.iconbitmap('src/images/santaBot_icon.ico')
tk.Label(loading, text='Loading SantaBot...', padx=20, pady=10).grid(row=1,column=0)
loading.update()
self.__ROOT.wm_title('SantaBot v0.2.0')
self.__ROOT.iconbitmap('src/images/santaBot_icon.ico')
self.__active = True
self.__ROOT.protocol("WM_DELETE_WINDOW", self.__quit)
self.__notebook = ttk.Notebook(self.__ROOT, width=1120, height=690)
# Tab1: Botsettings
self.__botsettings = Botsettings(self.__notebook)
self.__config = fileHandler.read_json('data/SantaBot/config.json')
# Tab2: commands
self.__commands = Commands(self.__notebook)
self.__notebook.grid(row=1, column=0, columnspan=10, sticky='wen', padx=15, pady=15)
# Buttons
button_frame = tk.Frame(self.__ROOT)
button_frame.grid(row=2, column = 9)
tk.Button(button_frame, text='Save changes', command=self.__save, width=13).grid(row=0, column=0, padx=5, pady=(0,20))
tk.Button(button_frame, text='Quit', command=self.__quit, width=13).grid(row=0, column=1, padx=5, pady=(0,20))
# Save initial state
self.__save()
self.__ROOT.deiconify() # Makes gui visible
loading.destroy() # Delete loading window
def update(self):
self.__ROOT.update() # Update the GUI itself
def check_active(self):
return self.__active
def add_chatmessage(self, user, message):
self.__botsettings.add_chatmessage(user, message)
def get_irc_connection_status(self):
return self.__botsettings.irc_connection
def __save(self):
# config.json
self.__botsettings.save()
# config_commands.json
self.__commands.save()
def __quit(self):
self.__active = False
self.__ROOT.destroy()
| gpl-3.0 | 2,424,998,428,401,631,000 | 28.662651 | 126 | 0.632006 | false |
aelarabawy/hostap | tests/hwsim/test_ap_ciphers.py | 1 | 6880 | # Cipher suite tests
# Copyright (c) 2013, Jouni Malinen <[email protected]>
#
# This software may be distributed under the terms of the BSD license.
# See README for more details.
import time
import subprocess
import logging
logger = logging.getLogger()
import os.path
import hwsim_utils
import hostapd
def check_cipher(dev, ap, cipher):
if cipher not in dev.get_capability("pairwise"):
return "skip"
params = { "ssid": "test-wpa2-psk",
"wpa_passphrase": "12345678",
"wpa": "2",
"wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": cipher }
hapd = hostapd.add_ap(ap['ifname'], params)
dev.connect("test-wpa2-psk", psk="12345678",
pairwise=cipher, group=cipher, scan_freq="2412")
hwsim_utils.test_connectivity(dev, hapd)
def test_ap_cipher_tkip(dev, apdev):
"""WPA2-PSK/TKIP connection"""
return check_cipher(dev[0], apdev[0], "TKIP")
def test_ap_cipher_tkip_countermeasures_ap(dev, apdev):
"""WPA-PSK/TKIP countermeasures (detected by AP)"""
testfile = "/sys/kernel/debug/ieee80211/%s/netdev:%s/tkip_mic_test" % (dev[0].get_driver_status_field("phyname"), dev[0].ifname)
if not os.path.exists(testfile):
return "skip"
params = { "ssid": "tkip-countermeasures",
"wpa_passphrase": "12345678",
"wpa": "1",
"wpa_key_mgmt": "WPA-PSK",
"wpa_pairwise": "TKIP" }
hapd = hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].connect("tkip-countermeasures", psk="12345678",
pairwise="TKIP", group="TKIP", scan_freq="2412")
dev[0].dump_monitor()
cmd = subprocess.Popen(["sudo", "tee", testfile],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cmd.stdin.write(apdev[0]['bssid'])
cmd.stdin.close()
cmd.stdout.read()
cmd.stdout.close()
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection on first Michael MIC failure")
cmd = subprocess.Popen(["sudo", "tee", testfile],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cmd.stdin.write("ff:ff:ff:ff:ff:ff")
cmd.stdin.close()
cmd.stdout.read()
cmd.stdout.close()
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=10)
if ev is None:
raise Exception("No disconnection after two Michael MIC failures")
if "reason=14" not in ev:
raise Exception("Unexpected disconnection reason: " + ev)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection during TKIP countermeasures")
def test_ap_cipher_tkip_countermeasures_sta(dev, apdev):
"""WPA-PSK/TKIP countermeasures (detected by STA)"""
params = { "ssid": "tkip-countermeasures",
"wpa_passphrase": "12345678",
"wpa": "1",
"wpa_key_mgmt": "WPA-PSK",
"wpa_pairwise": "TKIP" }
hapd = hostapd.add_ap(apdev[0]['ifname'], params)
testfile = "/sys/kernel/debug/ieee80211/%s/netdev:%s/tkip_mic_test" % (hapd.get_driver_status_field("phyname"), apdev[0]['ifname'])
if not os.path.exists(testfile):
return "skip"
dev[0].connect("tkip-countermeasures", psk="12345678",
pairwise="TKIP", group="TKIP", scan_freq="2412")
dev[0].dump_monitor()
cmd = subprocess.Popen(["sudo", "tee", testfile],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cmd.stdin.write(dev[0].p2p_dev_addr())
cmd.stdin.close()
cmd.stdout.read()
cmd.stdout.close()
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected disconnection on first Michael MIC failure")
cmd = subprocess.Popen(["sudo", "tee", testfile],
stdin=subprocess.PIPE, stdout=subprocess.PIPE)
cmd.stdin.write("ff:ff:ff:ff:ff:ff")
cmd.stdin.close()
cmd.stdout.read()
cmd.stdout.close()
ev = dev[0].wait_event(["CTRL-EVENT-DISCONNECTED"], timeout=10)
if ev is None:
raise Exception("No disconnection after two Michael MIC failures")
if "reason=14 locally_generated=1" not in ev:
raise Exception("Unexpected disconnection reason: " + ev)
ev = dev[0].wait_event(["CTRL-EVENT-CONNECTED"], timeout=1)
if ev is not None:
raise Exception("Unexpected connection during TKIP countermeasures")
def test_ap_cipher_ccmp(dev, apdev):
"""WPA2-PSK/CCMP connection"""
return check_cipher(dev[0], apdev[0], "CCMP")
def test_ap_cipher_gcmp(dev, apdev):
"""WPA2-PSK/GCMP connection"""
return check_cipher(dev[0], apdev[0], "GCMP")
def test_ap_cipher_ccmp_256(dev, apdev):
"""WPA2-PSK/CCMP-256 connection"""
return check_cipher(dev[0], apdev[0], "CCMP-256")
def test_ap_cipher_gcmp_256(dev, apdev):
"""WPA2-PSK/GCMP-256 connection"""
return check_cipher(dev[0], apdev[0], "GCMP-256")
def test_ap_cipher_mixed_wpa_wpa2(dev, apdev):
"""WPA2-PSK/CCMP/ and WPA-PSK/TKIP mixed configuration"""
ssid = "test-wpa-wpa2-psk"
passphrase = "12345678"
params = { "ssid": ssid,
"wpa_passphrase": passphrase,
"wpa": "3",
"wpa_key_mgmt": "WPA-PSK",
"rsn_pairwise": "CCMP",
"wpa_pairwise": "TKIP" }
hapd = hostapd.add_ap(apdev[0]['ifname'], params)
dev[0].connect(ssid, psk=passphrase, proto="WPA2",
pairwise="CCMP", group="TKIP", scan_freq="2412")
status = dev[0].get_status()
if status['key_mgmt'] != 'WPA2-PSK':
raise Exception("Incorrect key_mgmt reported")
if status['pairwise_cipher'] != 'CCMP':
raise Exception("Incorrect pairwise_cipher reported")
if status['group_cipher'] != 'TKIP':
raise Exception("Incorrect group_cipher reported")
bss = dev[0].get_bss(apdev[0]['bssid'])
if bss['ssid'] != ssid:
raise Exception("Unexpected SSID in the BSS entry")
if "[WPA-PSK-TKIP]" not in bss['flags']:
raise Exception("Missing BSS flag WPA-PSK-TKIP")
if "[WPA2-PSK-CCMP]" not in bss['flags']:
raise Exception("Missing BSS flag WPA2-PSK-CCMP")
hwsim_utils.test_connectivity(dev[0], hapd)
dev[1].connect(ssid, psk=passphrase, proto="WPA",
pairwise="TKIP", group="TKIP", scan_freq="2412")
status = dev[1].get_status()
if status['key_mgmt'] != 'WPA-PSK':
raise Exception("Incorrect key_mgmt reported")
if status['pairwise_cipher'] != 'TKIP':
raise Exception("Incorrect pairwise_cipher reported")
if status['group_cipher'] != 'TKIP':
raise Exception("Incorrect group_cipher reported")
hwsim_utils.test_connectivity(dev[1], hapd)
hwsim_utils.test_connectivity(dev[0], dev[1])
| gpl-2.0 | 2,582,933,525,782,652,000 | 39 | 135 | 0.616279 | false |
janist7/udacity-movie-site-development | python/templates/template_main.py | 1 | 2546 | """Contains parts of the main template - head, nav and footer"""
#Uses code from this reposatory:
#https://github.com/adarsh0806/ud036_StarterCode/blob/master/fresh_tomatoes.py
def get_template():
'''Contains main template'''
# Styles and scripting for the page
main_page_head = '''
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<title>Fresh Tomatoes!</title>
<meta name="viewport" content="width=device-width, initial-scale=1">
<!-- Bootstrap 3 -->
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap.min.css">
<link rel="stylesheet" href="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/css/bootstrap-theme.min.css">
<script src="https://code.jquery.com/jquery-1.10.1.min.js"></script>
<script src="https://netdna.bootstrapcdn.com/bootstrap/3.1.0/js/bootstrap.min.js"></script>
<!-- Roboto -->
<link href="https://fonts.googleapis.com/css?family=Roboto:100,100i,300,300i,400,400i,500,500i,700,700i,900,900i" rel="stylesheet">
<!-- Css/Js -->
<link rel="stylesheet" href="css/main.css">
<script src="js/main.js"></script>
</head>
'''
# Page navigation
main_page_navigation = '''
<nav class="navbar navbar-inverse navbar-fixed-top" role="navigation">
<div class="container-fluid">
<div class="navbar-header">
<button type="button" class="navbar-toggle" data-toggle="collapse" data-target="#myNavbar">
<span class="icon-bar"></span>
<span class="icon-bar"></span>
<span class="icon-bar"></span>
</button>
<a class="navbar-brand" href="index.html">Movie Trailers</a>
</div>
<div class="collapse navbar-collapse" id="myNavbar">
<ul class="nav navbar-nav">
<li><a class="navbar-nav" href="index.html">Main</a></li>
<li><a class="navbar-nav" href="fresh_tomatoes_upload.html">Upload Movie</a></li>
</ul>
</div>
</div>
</nav>
'''
# Page footer
main_page_footer = '''
<footer class="container-fluid bg-4 text-center">
<p>© Copyright: Janis Tidrikis</p>
</footer>
'''
main_template_subparts = {
"main_page_head":main_page_head,
"main_page_navigation":main_page_navigation,
"main_page_footer":main_page_footer
}
return main_template_subparts
| gpl-3.0 | -7,364,092,432,295,013,000 | 41.433333 | 139 | 0.584446 | false |
Yhgenomics/dcos-cli | dcos/package.py | 1 | 45936 | import abc
import base64
import collections
import copy
import hashlib
import json
import os
import re
import shutil
import stat
import subprocess
import zipfile
from distutils.version import LooseVersion
import git
import portalocker
import pystache
import six
from dcos import (constants, emitting, errors, http, marathon, mesos,
subcommand, util)
from dcos.errors import DCOSException
from six.moves import urllib
logger = util.get_logger(__name__)
emitter = emitting.FlatEmitter()
PACKAGE_METADATA_KEY = 'DCOS_PACKAGE_METADATA'
PACKAGE_NAME_KEY = 'DCOS_PACKAGE_NAME'
PACKAGE_VERSION_KEY = 'DCOS_PACKAGE_VERSION'
PACKAGE_SOURCE_KEY = 'DCOS_PACKAGE_SOURCE'
PACKAGE_FRAMEWORK_KEY = 'DCOS_PACKAGE_IS_FRAMEWORK'
PACKAGE_RELEASE_KEY = 'DCOS_PACKAGE_RELEASE'
PACKAGE_COMMAND_KEY = 'DCOS_PACKAGE_COMMAND'
PACKAGE_REGISTRY_VERSION_KEY = 'DCOS_PACKAGE_REGISTRY_VERSION'
PACKAGE_FRAMEWORK_NAME_KEY = 'DCOS_PACKAGE_FRAMEWORK_NAME'
def install_app(pkg, revision, init_client, options, app_id):
"""Installs a package's application
:param pkg: the package to install
:type pkg: Package
:param revision: the package revision to install
:type revision: str
:param init_client: the program to use to run the package
:type init_client: object
:param options: package parameters
:type options: dict
:param app_id: app ID for installation of this package
:type app_id: str
:rtype: None
"""
# Insert option parameters into the init template
init_desc = pkg.marathon_json(revision, options)
if app_id is not None:
logger.debug('Setting app ID to "%s" (was "%s")',
app_id,
init_desc['id'])
init_desc['id'] = app_id
# Send the descriptor to init
init_client.add_app(init_desc)
def _make_package_labels(pkg, revision, options):
"""Returns Marathon app labels for a package.
:param pkg: The package to install
:type pkg: Package
:param revision: The package revision to install
:type revision: str
:param options: package parameters
:type options: dict
:returns: Marathon app labels
:rtype: dict
"""
metadata = pkg.package_json(revision)
encoded_metadata = _base64_encode(metadata)
is_framework = metadata.get('framework')
if not is_framework:
is_framework = False
package_registry_version = pkg.registry.get_version()
package_labels = {
PACKAGE_METADATA_KEY: encoded_metadata,
PACKAGE_NAME_KEY: metadata['name'],
PACKAGE_VERSION_KEY: metadata['version'],
PACKAGE_SOURCE_KEY: pkg.registry.source.url,
PACKAGE_FRAMEWORK_KEY: json.dumps(is_framework),
PACKAGE_REGISTRY_VERSION_KEY: package_registry_version,
PACKAGE_RELEASE_KEY: revision
}
if pkg.has_command_definition(revision):
command = pkg.command_json(revision, options)
package_labels[PACKAGE_COMMAND_KEY] = _base64_encode(command)
# Run a heuristic that determines the hint for the framework name
framework_name = _find_framework_name(pkg.name(), options)
if framework_name:
package_labels[PACKAGE_FRAMEWORK_NAME_KEY] = framework_name
return package_labels
def _find_framework_name(package_name, options):
"""
:param package_name: the name of the package
:type package_name: str
:param options: the options object
:type options: dict
:returns: the name of framework if found; None otherwise
:rtype: str
"""
return options.get(package_name, {}).get('framework-name', None)
def _base64_encode(dictionary):
"""Returns base64(json(dictionary)).
:param dictionary: dict to encode
:type dictionary: dict
:returns: base64 encoding
:rtype: str
"""
json_str = json.dumps(dictionary, sort_keys=True)
str_bytes = six.b(json_str)
return base64.b64encode(str_bytes).decode('utf-8')
def uninstall(package_name, remove_all, app_id, cli, app):
"""Uninstalls a package.
:param package_name: The package to uninstall
:type package_name: str
:param remove_all: Whether to remove all instances of the named app
:type remove_all: boolean
:param app_id: App ID of the app instance to uninstall
:type app_id: str
:param init_client: The program to use to run the app
:type init_client: object
:rtype: None
"""
if cli is False and app is False:
cli = app = True
uninstalled = False
if cli:
if subcommand.uninstall(package_name):
uninstalled = True
if app:
num_apps = uninstall_app(
package_name,
remove_all,
app_id,
marathon.create_client(),
mesos.DCOSClient())
if num_apps > 0:
uninstalled = True
if uninstalled:
return None
else:
msg = 'Package [{}]'.format(package_name)
if app_id is not None:
msg += " with id [{}]".format(app_id)
msg += " is not installed."
raise DCOSException(msg)
def uninstall_subcommand(distribution_name):
"""Uninstalls a subcommand.
:param distribution_name: the name of the package
:type distribution_name: str
:returns: True if the subcommand was uninstalled
:rtype: bool
"""
return subcommand.uninstall(distribution_name)
def uninstall_app(app_name, remove_all, app_id, init_client, dcos_client):
"""Uninstalls an app.
:param app_name: The app to uninstall
:type app_name: str
:param remove_all: Whether to remove all instances of the named app
:type remove_all: boolean
:param app_id: App ID of the app instance to uninstall
:type app_id: str
:param init_client: The program to use to run the app
:type init_client: object
:param dcos_client: the DCOS client
:type dcos_client: dcos.mesos.DCOSClient
:returns: number of apps uninstalled
:rtype: int
"""
apps = init_client.get_apps()
def is_match(app):
encoding = 'utf-8' # We normalize encoding for byte-wise comparison
name_label = app.get('labels', {}).get(PACKAGE_NAME_KEY, u'')
name_label_enc = name_label.encode(encoding)
app_name_enc = app_name.encode(encoding)
name_matches = name_label_enc == app_name_enc
if app_id is not None:
pkg_app_id = app.get('id', '')
normalized_app_id = init_client.normalize_app_id(app_id)
return name_matches and pkg_app_id == normalized_app_id
else:
return name_matches
matching_apps = [a for a in apps if is_match(a)]
if not remove_all and len(matching_apps) > 1:
app_ids = [a.get('id') for a in matching_apps]
raise DCOSException(
("Multiple apps named [{}] are installed: [{}].\n" +
"Please use --app-id to specify the ID of the app to uninstall," +
" or use --all to uninstall all apps.").format(
app_name,
', '.join(app_ids)))
for app in matching_apps:
# First, remove the app from Marathon
init_client.remove_app(app['id'], force=True)
# Second, shutdown the framework with Mesos
framework_name = app.get('labels', {}).get(PACKAGE_FRAMEWORK_NAME_KEY)
if framework_name is not None:
logger.info(
'Trying to shutdown framework {}'.format(framework_name))
frameworks = mesos.Master(dcos_client.get_master_state()) \
.frameworks(inactive=True)
# Look up all the framework names
framework_ids = [
framework['id']
for framework in frameworks
if framework['name'] == framework_name
]
logger.info(
'Found the following frameworks: {}'.format(framework_ids))
if len(framework_ids) == 1:
dcos_client.shutdown_framework(framework_ids[0])
elif len(framework_ids) > 1:
raise DCOSException(
"Unable to shutdown the framework for [{}] because there "
"are multiple frameworks with the same name: [{}]. "
"Manually shut them down using 'dcos service "
"shutdown'.".format(
framework_name,
', '.join(framework_ids)))
return len(matching_apps)
class InstalledPackage(object):
"""Represents an intalled DCOS package. One of `app` and
`subcommand` must be supplied.
:param apps: A dictionary representing a marathon app. Of the
format returned by `installed_apps()`
:type apps: [dict]
:param subcommand: Installed subcommand
:type subcommand: subcommand.InstalledSubcommand
"""
def __init__(self, apps=[], subcommand=None):
assert apps or subcommand
self.apps = apps
self.subcommand = subcommand
def name(self):
"""
:returns: The name of the package
:rtype: str
"""
if self.subcommand:
return self.subcommand.name
else:
return self.apps[0]['name']
def dict(self):
""" A dictionary representation of the package. Used by `dcos package
list`.
:returns: A dictionary representation of the package.
:rtype: dict
"""
ret = {}
if self.subcommand:
ret['command'] = {'name': self.subcommand.name}
if self.apps:
ret['apps'] = [app['appId'] for app in self.apps]
if self.subcommand:
package_json = self.subcommand.package_json()
ret.update(package_json)
ret['packageSource'] = self.subcommand.package_source()
ret['releaseVersion'] = self.subcommand.package_revision()
else:
ret.update(self.apps[0])
ret.pop('appId')
return ret
def installed_packages(init_client, endpoints):
"""Returns all installed packages in the format:
[{
'apps': [<id>],
'command': {
'name': <name>
}
...<metadata>...
}]
:param init_client: The program to use to list packages
:type init_client: object
:param endpoints: Whether to include a list of
endpoints as port-host pairs
:type endpoints: boolean
:returns: A list of installed packages
:rtype: [InstalledPackage]
"""
apps = installed_apps(init_client, endpoints)
subcommands = installed_subcommands()
dicts = collections.defaultdict(lambda: {'apps': [], 'command': None})
for app in apps:
key = (app['name'], app['releaseVersion'], app['packageSource'])
dicts[key]['apps'].append(app)
for subcmd in subcommands:
package_revision = subcmd.package_revision()
package_source = subcmd.package_source()
key = (subcmd.name, package_revision, package_source)
dicts[key]['command'] = subcmd
return [
InstalledPackage(pkg['apps'], pkg['command']) for pkg in dicts.values()
]
def installed_subcommands():
"""Returns all installed subcommands.
:returns: all installed subcommands
:rtype: [InstalledSubcommand]
"""
return [subcommand.InstalledSubcommand(name) for name in
subcommand.distributions()]
def installed_apps(init_client, endpoints=False):
"""
Returns all installed apps. An app is of the format:
{
'appId': <appId>,
'packageSource': <source>,
'registryVersion': <app_version>,
'releaseVersion': <release_version>
'endpoints' (optional): [{
'host': <host>,
'ports': <ports>,
}]
..<package.json properties>..
}
:param init_client: The program to use to list packages
:type init_client: object
:param endpoints: Whether to include a list of
endpoints as port-host pairs
:type endpoints: boolean
:returns: all installed apps
:rtype: [dict]
"""
apps = init_client.get_apps()
encoded_apps = [(a['id'], a['labels'])
for a in apps
if a.get('labels', {}).get(PACKAGE_METADATA_KEY)]
def decode_and_add_context(pair):
app_id, labels = pair
encoded = labels.get(PACKAGE_METADATA_KEY, {})
decoded = base64.b64decode(six.b(encoded)).decode()
decoded_json = util.load_jsons(decoded)
decoded_json['appId'] = app_id
decoded_json['packageSource'] = labels.get(PACKAGE_SOURCE_KEY)
decoded_json['releaseVersion'] = labels.get(PACKAGE_RELEASE_KEY)
return decoded_json
# Filter elements that failed to parse correctly as JSON
valid_apps = []
for encoded in encoded_apps:
try:
decoded = decode_and_add_context(encoded)
except Exception:
logger.exception(
'Unable to decode package metadata during install: %s',
encoded[0])
valid_apps.append(decoded)
if endpoints:
for app in valid_apps:
tasks = init_client.get_tasks(app["appId"])
app['endpoints'] = [{"host": t["host"], "ports": t["ports"]}
for t in tasks]
return valid_apps
def search(query, cfg):
"""Returns a list of index entry collections, one for each registry in
the supplied config.
:param query: The search term
:type query: str
:param cfg: Configuration dictionary
:type cfg: dcos.config.Toml
:rtype: [IndexEntries]
"""
threshold = 0.5 # Minimum rank required to appear in results
results = []
def clean_package_entry(entry):
result = entry.copy()
result.update({
'versions': list(entry['versions'].keys())
})
return result
for registry in registries(cfg):
source_results = []
index = registry.get_index()
for pkg in index['packages']:
rank = _search_rank(pkg, query)
if rank >= threshold:
source_results.append(clean_package_entry(pkg))
entries = IndexEntries(registry.source, source_results)
results.append(entries)
return results
def _search_rank(pkg, query):
"""
:param pkg: Index entry to rank for affinity with the search term
:type pkg: object
:param query: Search term
:type query: str
:rtype: float
"""
result = 0.0
wildcard_symbol = '*'
regex_pattern = '.*'
q = query.lower()
if wildcard_symbol in q:
q = q.replace(wildcard_symbol, regex_pattern)
if q.endswith(wildcard_symbol):
q = '^{}'.format(q)
else:
q = '{}$'.format(q)
if re.match(q, pkg['name'].lower()):
result += 2.0
return result
if q in pkg['name'].lower():
result += 2.0
for tag in pkg['tags']:
if q in tag.lower():
result += 1.0
if q in pkg['description'].lower():
result += 0.5
return result
def _extract_default_values(config_schema):
"""
:param config_schema: A json-schema describing configuration options.
:type config_schema: dict
:returns: a dictionary with the default specified by the schema
:rtype: dict | None
"""
defaults = {}
if 'properties' not in config_schema:
return None
for key, value in config_schema['properties'].items():
if isinstance(value, dict) and 'default' in value:
defaults[key] = value['default']
elif isinstance(value, dict) and value.get('type', '') == 'object':
# Generate the default value from the embedded schema
defaults[key] = _extract_default_values(value)
return defaults
def _merge_options(first, second):
"""Merges the :code:`second` dictionary into the :code:`first` dictionary.
If both dictionaries have the same key and both values are dictionaries
then it recursively merges those two dictionaries.
:param first: first dictionary
:type first: dict
:param second: second dictionary
:type second: dict
:returns: merged dictionary
:rtype: dict
"""
result = copy.deepcopy(first)
for key, second_value in second.items():
if key in first:
first_value = first[key]
if (isinstance(first_value, collections.Mapping) and
isinstance(second_value, collections.Mapping)):
result[key] = _merge_options(first_value, second_value)
else:
result[key] = second_value
else:
result[key] = second_value
return result
def resolve_package(package_name, config=None):
"""Returns the first package with the supplied name found by looking at
the configured sources in the order they are defined.
:param package_name: The name of the package to resolve
:type package_name: str
:param config: dcos config
:type config: dcos.config.Toml | None
:returns: The named package, if found
:rtype: Package
"""
if not config:
config = util.get_config()
for registry in registries(config):
package = registry.get_package(package_name)
if package:
return package
return None
def registries(config):
"""Returns configured cached package registries.
:param config: Configuration dictionary
:type config: dcos.config.Toml
:returns: The list of registries, in resolution order
:rtype: [Registry]
"""
sources = list_sources(config)
return [Registry(source, source.local_cache(config)) for source in sources]
def list_sources(config):
"""List configured package sources.
:param config: Configuration dictionary
:type config: dcos.config.Toml
:returns: The list of sources, in resolution order
:rtype: [Source]
"""
source_uris = util.get_config_vals(['package.sources'], config)[0]
sources = [url_to_source(s) for s in source_uris]
errs = [source for source in sources if isinstance(source, Error)]
if errs:
raise DCOSException('\n'.join(err.error() for err in errs))
return sources
def url_to_source(url):
"""Creates a package source from the supplied URL.
:param url: Location of the package source
:type url: str
:returns: A Source backed by the supplied URL
:rtype: Source | Error
"""
parse_result = urllib.parse.urlparse(url)
scheme = parse_result.scheme
if scheme == 'file':
return FileSource(url)
elif scheme == 'http' or scheme == 'https':
return HttpSource(url)
elif scheme == 'git':
return GitSource(url)
else:
return Error("Source URL uses unsupported protocol [{}]".format(url))
def _acquire_file_lock(lock_file_path):
"""Acquires an exclusive lock on the supplied file.
:param lock_file_path: Path to the lock file
:type lock_file_path: str
:returns: Lock file
:rtype: File
"""
try:
lock_file = open(lock_file_path, 'w')
except IOError as e:
logger.exception('Failed to open lock file: %s', lock_file_path)
raise util.io_exception(lock_file_path, e.errno)
acquire_mode = portalocker.LOCK_EX | portalocker.LOCK_NB
try:
portalocker.lock(lock_file, acquire_mode)
return lock_file
except portalocker.LockException:
logger.exception(
'Failure while tring to aquire file lock: %s',
lock_file_path)
lock_file.close()
raise DCOSException('Unable to acquire the package cache lock')
def update_sources(config, validate=False):
"""Overwrites the local package cache with the latest source data.
:param config: Configuration dictionary
:type config: dcos.config.Toml
:rtype: None
"""
errors = []
# ensure the cache directory is properly configured
cache_dir = os.path.expanduser(
util.get_config_vals(['package.cache'], config)[0])
# ensure the cache directory exists
if not os.path.exists(cache_dir):
os.makedirs(cache_dir)
if not os.path.isdir(cache_dir):
raise DCOSException(
'Cache directory does not exist! [{}]'.format(cache_dir))
# obtain an exclusive file lock on $CACHE/.lock
lock_path = os.path.join(cache_dir, '.lock')
with _acquire_file_lock(lock_path):
# list sources
sources = list_sources(config)
for source in sources:
emitter.publish('Updating source [{}]'.format(source))
# create a temporary staging directory
with util.tempdir() as tmp_dir:
stage_dir = os.path.join(tmp_dir, source.hash())
# copy to the staging directory
try:
source.copy_to_cache(stage_dir)
except DCOSException as e:
logger.exception(
'Failed to copy universe source %s to cache %s',
source.url,
stage_dir)
errors.append(e.message)
continue
# check version
# TODO(jsancio): move this to the validation when it is forced
Registry(source, stage_dir).check_version(
LooseVersion('1.0'),
LooseVersion('2.0'))
# validate content
if validate:
validation_errors = Registry(source, stage_dir).validate()
if len(validation_errors) > 0:
errors += validation_errors
continue # keep updating the other sources
# remove the $CACHE/source.hash() directory
target_dir = os.path.join(cache_dir, source.hash())
try:
if os.path.exists(target_dir):
shutil.rmtree(target_dir,
onerror=_rmtree_on_error,
ignore_errors=False)
except OSError:
logger.exception(
'Error removing target directory before move: %s',
target_dir)
err = "Could not remove directory [{}]".format(target_dir)
errors.append(err)
continue # keep updating the other sources
# move the staging directory to $CACHE/source.hash()
shutil.move(stage_dir, target_dir)
if errors:
raise DCOSException(util.list_to_err(errors))
class Source:
"""A source of DCOS packages."""
@property
@abc.abstractmethod
def url(self):
"""
:returns: Location of the package source
:rtype: str
"""
raise NotImplementedError
def hash(self):
"""Returns a cryptographically secure hash derived from this source.
:returns: a hexadecimal string
:rtype: str
"""
return hashlib.sha1(self.url.encode('utf-8')).hexdigest()
def local_cache(self, config):
"""Returns the file system path to this source's local cache.
:param config: Configuration dictionary
:type config: dcos.config.Toml
:returns: Path to this source's local cache on disk
:rtype: str or None
"""
cache_dir = os.path.expanduser(
util.get_config_vals(['package.cache'], config)[0])
return os.path.join(cache_dir, self.hash())
def copy_to_cache(self, target_dir):
"""Copies the source content to the supplied local directory.
:param target_dir: Path to the destination directory.
:type target_dir: str
:rtype: None
"""
raise NotImplementedError
def __repr__(self):
return self.url
class FileSource(Source):
"""A registry of DCOS packages.
:param url: Location of the package source
:type url: str
"""
def __init__(self, url):
self._url = url
@property
def url(self):
"""
:returns: Location of the package source
:rtype: str
"""
return self._url
def copy_to_cache(self, target_dir):
"""Copies the source content to the supplied local directory.
:param target_dir: Path to the destination directory.
:type target_dir: str
:rtype: None
"""
# copy the source to the target_directory
parse_result = urllib.parse.urlparse(self._url)
source_dir = parse_result.path
try:
shutil.copytree(source_dir, target_dir)
return None
except OSError:
logger.exception(
'Error copying source director [%s] to target directory [%s].',
source_dir,
target_dir)
raise DCOSException(
'Unable to fetch packages from [{}]'.format(self.url))
class HttpSource(Source):
"""A registry of DCOS packages.
:param url: Location of the package source
:type url: str
"""
def __init__(self, url):
self._url = url
@property
def url(self):
"""
:returns: Location of the package source
:rtype: str
"""
return self._url
def copy_to_cache(self, target_dir):
"""Copies the source content to the supplied local directory.
:param target_dir: Path to the destination directory.
:type target_dir: str
:returns: The error, if one occurred
:rtype: None
"""
try:
with util.tempdir() as tmp_dir:
tmp_file = os.path.join(tmp_dir, 'packages.zip')
# Download the zip file.
req = http.get(self.url)
if req.status_code == 200:
with open(tmp_file, 'wb') as f:
for chunk in req.iter_content(1024):
f.write(chunk)
else:
raise Exception(
'HTTP GET for {} did not return 200: {}'.format(
self.url,
req.status_code))
# Unzip the downloaded file.
packages_zip = zipfile.ZipFile(tmp_file, 'r')
packages_zip.extractall(tmp_dir)
# Move the enclosing directory to the target directory
enclosing_dirs = [item
for item in os.listdir(tmp_dir)
if os.path.isdir(
os.path.join(tmp_dir, item))]
# There should only be one directory present after extracting.
assert(len(enclosing_dirs) is 1)
enclosing_dir = os.path.join(tmp_dir, enclosing_dirs[0])
shutil.copytree(enclosing_dir, target_dir)
# Set appropriate file permissions on the scripts.
x_mode = (stat.S_IRUSR | stat.S_IWUSR | stat.S_IXUSR |
stat.S_IRGRP | stat.S_IWGRP | stat.S_IXGRP)
scripts_dir = os.path.join(target_dir, 'scripts')
scripts = os.listdir(scripts_dir)
for script in scripts:
script_path = os.path.join(scripts_dir, script)
if os.path.isfile(script_path):
os.chmod(script_path, x_mode)
return None
except Exception:
logger.exception('Unable to fetch packages from URL: %s', self.url)
raise DCOSException(
'Unable to fetch packages from [{}]'.format(self.url))
class GitSource(Source):
"""A registry of DCOS packages.
:param url: Location of the package source
:type url: str
"""
def __init__(self, url):
self._url = url
@property
def url(self):
"""
:returns: Location of the package source
:rtype: str
"""
return self._url
def copy_to_cache(self, target_dir):
"""Copies the source content to the supplied local directory.
:param target_dir: Path to the destination directory.
:type target_dir: str
:returns: The error, if one occurred
:rtype: None
"""
try:
# TODO(SS): add better url parsing
# Ensure git is installed properly.
git_program = util.which('git')
if git_program is None:
raise DCOSException("""Could not locate the git program. Make sure \
it is installed and on the system search path.
PATH = {}""".format(os.environ[constants.PATH_ENV]))
# Clone git repo into the supplied target directory.
git.Repo.clone_from(self._url,
to_path=target_dir,
progress=None,
branch='master')
# Remove .git directory to save space.
shutil.rmtree(os.path.join(target_dir, ".git"),
onerror=_rmtree_on_error)
return None
except git.exc.GitCommandError:
logger.exception('Unable to fetch packages from git: %s', self.url)
raise DCOSException(
'Unable to fetch packages from [{}]'.format(self.url))
def _rmtree_on_error(func, path, exc_info):
"""Error handler for ``shutil.rmtree``.
If the error is due to an access error (read only file)
it attempts to add write permission and then retries.
If the error is for another reason it re-raises the error.
Usage : ``shutil.rmtree(path, onerror=onerror)``.
:param func: Function which raised the exception.
:type func: function
:param path: The path name passed to ``shutil.rmtree`` function.
:type path: str
:param exc_info: Information about the last raised exception.
:type exc_info: tuple
:rtype: None
"""
import stat
if not os.access(path, os.W_OK):
os.chmod(path, stat.S_IRWXU | stat.S_IRWXG | stat.S_IRWXO)
func(path)
else:
raise
class Error(errors.Error):
"""Class for describing errors during packaging operations.
:param message: Error message
:type message: str
"""
def __init__(self, message):
self._message = message
def error(self):
"""Return error message
:returns: The error message
:rtype: str
"""
return self._message
class Registry():
"""Represents a package registry on disk.
:param base_path: Path to the registry
:type base_path: str
:param source: The associated package source
:type source: Source
"""
def __init__(self, source, base_path):
self._base_path = base_path
self._source = source
def validate(self):
"""Validates a package registry.
:returns: Validation errors
:rtype: [str]
"""
# TODO(CD): implement these checks in pure Python?
scripts_dir = os.path.join(self._base_path, 'scripts')
if util.is_windows_platform():
validate_script = os.path.join(scripts_dir,
'1-validate-packages.ps1')
cmd = ['powershell', '-ExecutionPolicy',
'ByPass', '-File', validate_script]
result = subprocess.call(cmd)
else:
validate_script = os.path.join(scripts_dir,
'1-validate-packages.sh')
result = subprocess.call(validate_script)
if result is not 0:
return ["Source tree is not valid [{}]".format(self._base_path)]
else:
return []
@property
def source(self):
"""Returns the associated upstream package source for this registry.
:rtype: Source
"""
return self._source
def check_version(self, min_version, max_version):
"""Checks that the version is [min_version, max_version)
:param min_version: the min version inclusive
:type min_version: LooseVersion
:param max_version: the max version exclusive
:type max_version: LooseVersion
:returns: None
"""
version = LooseVersion(self.get_version())
if not (version >= min_version and
version < max_version):
raise DCOSException((
'Unable to update source [{}] because version {} is '
'not supported. Supported versions are between {} and '
'{}. Please update your DCOS CLI.').format(
self._source.url,
version,
min_version,
max_version))
def get_version(self):
"""Returns the version of this registry.
:rtype: str
"""
# The package version is found in $BASE/repo/meta/version.json
index_path = os.path.join(
self._base_path,
'repo',
'meta',
'version.json')
if not os.path.isfile(index_path):
raise DCOSException('Path [{}] is not a file'.format(index_path))
try:
with util.open_file(index_path) as fd:
version_json = json.load(fd)
return version_json.get('version')
except ValueError:
logger.exception('Unable to parse JSON: %s', index_path)
raise DCOSException('Unable to parse [{}]'.format(index_path))
def get_index(self):
"""Retuprns the index of packages in this registry.
:rtype: dict
"""
# The package index is found in $BASE/repo/meta/index.json
index_path = os.path.join(
self._base_path,
'repo',
'meta',
'index.json')
if not os.path.isfile(index_path):
raise DCOSException('Path [{}] is not a file'.format(index_path))
try:
with util.open_file(index_path) as fd:
return json.load(fd)
except ValueError:
logger.exception('Unable to parse JSON: %s', index_path)
raise DCOSException('Unable to parse [{}]'.format(index_path))
def get_package(self, package_name):
"""Returns the named package, if it exists.
:param package_name: The name of the package to fetch
:type package_name: str
:returns: The requested package
:rtype: Package
"""
if len(package_name) is 0:
raise DCOSException('Package name must not be empty.')
# Packages are found in $BASE/repo/package/<first_character>/<pkg_name>
first_character = package_name[0].title()
package_path = os.path.join(
self._base_path,
'repo',
'packages',
first_character,
package_name)
if not os.path.isdir(package_path):
return None
try:
return Package(self, package_path)
except:
logger.exception('Unable to read package: %s', package_path)
raise DCOSException(
'Could not read package [{}]'.format(package_name))
class Package():
"""Interface to a package on disk.
:param registry: The containing registry for this package.
:type registry: Registry
:param path: Path to the package description on disk
:type path: str
"""
def __init__(self, registry, path):
assert os.path.isdir(path)
self._registry = registry
self.path = path
def name(self):
"""Returns the package name.
:returns: The name of this package
:rtype: str
"""
return os.path.basename(self.path)
def options(self, revision, user_options):
"""Merges package options with user supplied options, validates, and
returns the result.
:param revision: the package revision to install
:type revision: str
:param user_options: package parameters
:type user_options: dict
:returns: a dictionary with the user supplied options
:rtype: dict
"""
if user_options is None:
user_options = {}
config_schema = self.config_json(revision)
default_options = _extract_default_values(config_schema)
if default_options is None:
pkg = self.package_json(revision)
msg = ("An object in the package's config.json is missing the "
"required 'properties' feature:\n {}".format(config_schema))
if 'maintainer' in pkg:
msg += "\nPlease contact the project maintainer: {}".format(
pkg['maintainer'])
raise DCOSException(msg)
logger.info('Generated default options: %r', default_options)
# Merge option overrides
options = _merge_options(default_options, user_options)
logger.info('Merged options: %r', options)
# Validate options with the config schema
errs = util.validate_json(options, config_schema)
if len(errs) != 0:
raise DCOSException(
"{}\n\n{}".format(
util.list_to_err(errs),
'Please create a JSON file with the appropriate options, '
'and pass the /path/to/file as an --options argument.'))
return options
@property
def registry(self):
"""Returns the containing registry for this package.
:rtype: Registry
"""
return self._registry
def has_definition(self, revision, filename):
"""Returns true if the package defines filename; false otherwise.
:param revision: package revision
:type revision: str
:param filename: file in package definition
:type filename: str
:returns: whether filename is defined
:rtype: bool
"""
return os.path.isfile(
os.path.join(
self.path,
os.path.join(revision, filename)))
def has_command_definition(self, revision):
"""Returns true if the package defines a command; false otherwise.
:param revision: package revision
:type revision: str
:rtype: bool
"""
return self.has_definition(revision, 'command.json')
def has_marathon_definition(self, revision):
"""Returns true if the package defines a Marathon json. false otherwise.
:param revision: package revision
:type revision: str
:rtype: bool
"""
return self.has_definition(revision, 'marathon.json')
def config_json(self, revision):
"""Returns the JSON content of the config.json file.
:param revision: package revision
:type revision: str
:returns: Package config schema
:rtype: dict
"""
return self._json(revision, 'config.json')
def package_json(self, revision):
"""Returns the JSON content of the package.json file.
:param revision: the package revision
:type revision: str
:returns: Package data
:rtype: dict
"""
return self._json(revision, 'package.json')
def marathon_json(self, revision, options):
"""Returns the JSON content of the marathon.json template, after
rendering it with options.
:param revision: the package revision
:type revision: str
:param options: the template options to use in rendering
:type options: dict
:rtype: dict
"""
init_desc = self._render_template(
'marathon.json',
revision,
options)
# Add package metadata
package_labels = _make_package_labels(self, revision, options)
# Preserve existing labels
labels = init_desc.get('labels', {})
labels.update(package_labels)
init_desc['labels'] = labels
return init_desc
def command_json(self, revision, options):
"""Returns the JSON content of the comand.json template, after
rendering it with options.
:param revision: the package revision
:type revision: str
:param options: the template options to use in rendering
:type options: dict
:returns: Package data
:rtype: dict
"""
template = self._data(revision, 'command.json')
rendered = pystache.render(template, options)
return json.loads(rendered)
def marathon_template(self, revision):
""" Returns raw data from marathon.json
:param revision: the package revision
:type revision: str
:returns: raw data from marathon.json
:rtype: str
"""
return self._data(revision, 'marathon.json')
def command_template(self, revision):
""" Returns raw data from command.json
:param revision: the package revision
:type revision: str
:returns: raw data from command.json
:rtype: str
"""
return self._data(revision, 'command.json')
def _render_template(self, name, revision, options):
"""Render a template.
:param name: the file name of the template
:type name: str
:param revision: the package revision
:type revision: str
:param options: the template options to use in rendering
:type options: dict
:rtype: dict
"""
template = self._data(revision, name)
return util.render_mustache_json(template, options)
def _json(self, revision, name):
"""Returns the json content of the file named `name` in the directory
named `revision`
:param revision: the package revision
:type revision: str
:param name: file name
:type name: str
:rtype: dict
"""
data = self._data(revision, name)
return util.load_jsons(data)
def _data(self, revision, name):
"""Returns the content of the file named `name` in the directory named
`revision`
:param revision: the package revision
:type revision: str
:param name: file name
:type name: str
:returns: File content of the supplied path
:rtype: str
"""
path = os.path.join(revision, name)
full_path = os.path.join(self.path, path)
return util.read_file(full_path)
def package_revisions(self):
"""Returns all of the available package revisions, most recent first.
:returns: Available revisions of this package
:rtype: [str]
"""
vs = sorted((f for f in os.listdir(self.path)
if not f.startswith('.')), key=int, reverse=True)
return vs
def package_revisions_map(self):
"""Returns an ordered mapping from the package revision to the package
version, sorted by package revision.
:returns: Map from package revision to package version
:rtype: OrderedDict
"""
package_version_map = collections.OrderedDict()
for rev in self.package_revisions():
pkg_json = self.package_json(rev)
package_version_map[rev] = pkg_json['version']
return package_version_map
def latest_package_revision(self, package_version=None):
"""Returns the most recent package revision, for a
given package version if specified.
:param package_version: a given package version
:type package_version: str
:returns: package revision
:rtype: str | None
"""
if package_version:
pkg_rev_map = self.package_revisions_map()
# depends on package_revisions() returning an OrderedDict
if package_version in pkg_rev_map.values():
return next(pkg_rev for pkg_rev in reversed(pkg_rev_map)
if pkg_rev_map[pkg_rev] == package_version)
else:
return None
else:
pkg_revisions = self.package_revisions()
revision = pkg_revisions[0]
return revision
def __repr__(self):
rev = self.latest_package_revision()
pkg_json = self.package_json(rev)
return json.dumps(pkg_json)
class IndexEntries():
"""A collection of package index entries from a single source.
Each entry is a dict as described by the JSON schema for the package index:
https://github.com/mesosphere/universe/blob/master/repo/meta/schema/index-schema.json
:param source: The source of these index entries
:type source: Source
:param packages: The index entries
:type packages: [dict]
"""
def __init__(self, source, packages):
self._source = source
self._packages = packages
@property
def source(self):
"""Returns the source of these index entries.
:rtype: Source
"""
return self._source
@property
def packages(self):
"""Returns the package index entries.
:rtype: list of dict
"""
return self._packages
def as_dict(self):
"""
:rtype: dict
"""
return {'source': self.source.url, 'packages': self.packages}
def get_apps_for_framework(framework_name, client):
""" Return all apps running the given framework.
:param framework_name: framework name
:type framework_name: str
:param client: marathon client
:type client: marathon.Client
:rtype: [dict]
"""
return [app for app in client.get_apps()
if app.get('labels', {}).get(
PACKAGE_FRAMEWORK_NAME_KEY) == framework_name]
| apache-2.0 | 2,685,393,595,888,874,500 | 28.770577 | 89 | 0.585358 | false |
lgiommi/root | documentation/doxygen/converttonotebook.py | 1 | 35395 | #!/usr/bin/env python
# Author: Pau Miquel i Mir <[email protected]> <[email protected]>>
# Date: July, 2016
#
# DISCLAIMER: This script is a prototype and a work in progress. Indeed, it is possible that
# it may not work for certain tutorials, and that it, or the tutorial, might need to be
# tweaked slightly to ensure full functionality. Please do not hesistate to email the author
# with any questions or with examples that do not work.
#
# HELP IT DOESN'T WORK: Two possible solutions:
# 1. Check that all the types returned by the tutorial are in the gTypesList. If they aren't,
# simply add them.
# 2. If the tutorial takes a long time to execute (more than 90 seconds), add the name of the
# tutorial to the list of long tutorials listLongTutorials, in the fucntion findTimeout.
#
# REQUIREMENTS: This script needs jupyter to be properly installed, as it uses the python
# package nbformat and calls the shell commands `jupyter nbconvert` and `jupyter trust`. The
# rest of the packages used should be included in a standard installation of python. The script
# is intended to be run on a UNIX based system.
#
#
# FUNCTIONING:
# -----------
# The converttonotebook script creates Jupyter notebooks from raw C++ or python files.
# Particulary, it is indicated to convert the ROOT tutorials found in the ROOT
# repository.
#
# The script should be called from bash with the following format:
# python /path/to/script/converttonotebook.py /path/to/<macro>.C /path/to/outdir
#
# Indeed the script takes two arguments, the path to the macro and the path to the directory
# where the notebooks will be created
#
# The script's general functioning is as follows. The macro to be converted is imported as a string.
# A series of modifications are made to this string, for instance delimiting where markdown and
# code cells begin and end. Then, this string is converted into ipynb format using a function
# in the nbconvert package. Finally, the notebook is executed and output.
#
# For converting python tutorials it is fairly straightforward. It extracts the decription and
# author information from the header and then removes it. It also converts any comment at the
# beginning of a line into a Markdown cell.
#
# For C++ files the process is slightly more complex. The script separates the functions from the
# main code. The main function is identified as it has the smae name as the macro file. The other
# functions are considered functions. The main function is "extracted" and presented as main code.
# The helper functions are placed in their own code cell with the %%cpp -d magic to enable function
# defintion. Finally, as with Python macros, relevant information is extracted from the header, and
# newline comments are converted into Markdown cells (unless they are in helper functions).
#
# The script creates an .ipynb version of the macro, with the full output included.
# The files are named:
# <macro>.<C or py>.nbconvert.ipynb
#
# It is called by filter.cxx, which in turn is called by doxygen when processing any file
# in the ROOT repository. filter.cxx only calls convertonotebook.py when the string \notebook
# is found in the header of the turorial, but this script checks for its presence as well.
import re
import os
import sys
import json
import time
import doctest
import textwrap
import subprocess
from nbformat import v3, v4
from datetime import datetime, date
# List of types that will be considered when looking for a C++ function. If a macro returns a
# type not included on the list, the regular expression will not match it, and thus the function
# will not be properly defined. Thus, any other type returned by function must be added to this list
# for the script to work correctly.
gTypesList = ["void", "int", "Int_t", "TF1", "string", "bool", "double", "float", "char",
"TCanvas", "TTree", "TString", "TSeqCollection", "Double_t", "TFile", "Long64_t", "Bool_t", "TH1",
"RooDataSet", "RooWorkspace" , "HypoTestInverterResult" , "TVectorD" , "TArrayF", "UInt_t"]
# -------------------------------------
# -------- Fuction definitions---------
# -------------------------------------
def unindenter(string, spaces = 3):
"""
Returns string with each line unindented by 3 spaces. If line isn't indented, it stays the same.
>>> unindenter(" foobar")
'foobar\\n'
>>> unindenter("foobar")
'foobar\\n'
>>> unindenter('''foobar
... foobar
... foobar''')
'foobar\\nfoobar\\nfoobar\\n'
"""
newstring = ''
lines = string.splitlines()
for line in lines:
if line.startswith(spaces*' '):
newstring += (line[spaces:] + "\n")
else:
newstring += (line + "\n")
return newstring
def readHeaderPython(text):
"""
Extract author and description from header, eliminate header from text. Also returns
notebook boolean, which is True if the string \notebook is present in the header
Also determine options (-js, -nodraw, -header) passed in \notebook command, and
return their booleans
>>> readHeaderPython('''## \\file
... ## \\ingroup tutorials
... ## \\\\notebook
... ## This is the description of the tutorial
... ##
... ## \\macro_image
... ## \\macro_code
... ##
... ## \\\\author John Brown
... def tutorialfuncion()''')
('def tutorialfuncion()\\n', 'This is the description of the tutorial\\n\\n\\n', 'John Brown', True, False, False, False)
>>> readHeaderPython('''## \\file
... ## \\ingroup tutorials
... ## \\\\notebook -js
... ## This is the description of the tutorial
... ##
... ## \\macro_image
... ## \\macro_code
... ##
... ## \\\\author John Brown
... def tutorialfuncion()''')
('def tutorialfuncion()\\n', 'This is the description of the tutorial\\n\\n\\n', 'John Brown', True, True, False, False)
>>> readHeaderPython('''## \\file
... ## \\ingroup tutorials
... ## \\\\notebook -nodraw
... ## This is the description of the tutorial
... ##
... ## \\macro_image
... ## \\macro_code
... ##
... ## \\\\author John Brown
... def tutorialfuncion()''')
('def tutorialfuncion()\\n', 'This is the description of the tutorial\\n\\n\\n', 'John Brown', True, False, True, False)
"""
lines = text.splitlines()
description = ''
author = ''
isNotebook = False
isJsroot = False
nodraw = False
needsHeaderFile = False
for i, line in enumerate(lines):
if line.startswith("## \\aut"):
author = line[11:]
elif line.startswith("## \\note"):
isNotebook = True
if "-js" in line:
isJsroot = True
if "-nodraw" in line:
nodraw = True
if "-header" in line:
needsHeaderFile = True
elif line.startswith("##"):
if not line.startswith("## \\") and isNotebook:
description += (line[3:] + '\n')
else:
break
newtext = ''
for line in lines[i:]:
newtext += (line + "\n")
return newtext, description, author, isNotebook, isJsroot, nodraw, needsHeaderFile
def pythonComments(text):
"""
Converts comments delimited by # or ## and on a new line into a markdown cell.
For python files only
>>> pythonComments('''## This is a
... ## multiline comment
... def function()''')
'# <markdowncell>\\n## This is a\\n## multiline comment\\n# <codecell>\\ndef function()\\n'
>>> pythonComments('''def function():
... variable = 5 # Comment not in cell
... # Comment also not in cell''')
'def function():\\n variable = 5 # Comment not in cell\\n # Comment also not in cell\\n'
"""
text = text.splitlines()
newtext = ''
inComment = False
for i, line in enumerate(text):
if line.startswith("#") and not inComment: # True if first line of comment
inComment = True
newtext += "# <markdowncell>\n"
newtext += (line + "\n")
elif inComment and not line.startswith("#"): # True if first line after comment
inComment = False
newtext += "# <codecell>\n"
newtext += (line+"\n")
else:
newtext += (line+"\n")
return newtext
def pythonMainFunction(text):
lines = text.splitlines()
functionContentRe = re.compile('def %s\\(.*\\):' % tutName , flags = re.DOTALL | re.MULTILINE)
newtext = ''
inMainFunction = False
hasMainFunction = False
for line in lines:
if hasMainFunction:
if line.startswith("""if __name__ == "__main__":""") or line.startswith("""if __name__ == '__main__':"""):
break
match = functionContentRe.search(line)
if inMainFunction and not line.startswith(" ") and line != "":
inMainFunction = False
if match:
inMainFunction = True
hasMainFunction = True
else:
if inMainFunction:
newtext += (line[4:] + '\n')
else:
newtext += (line + '\n')
return newtext
def readHeaderCpp(text):
"""
Extract author and description from header, eliminate header from text. Also returns
notebook boolean, which is True if the string \notebook is present in the header
Also determine options (-js, -nodraw, -header) passed in \notebook command, and
return their booleans
>>> readHeaderCpp('''/// \\file
... /// \\ingroup tutorials
... /// \\\\notebook
... /// This is the description of the tutorial
... ///
... /// \\macro_image
... /// \\macro_code
... ///
... /// \\\\author John Brown
... void tutorialfuncion(){}''')
('void tutorialfuncion(){}\\n', '# This is the description of the tutorial\\n# \\n# \\n', 'John Brown', True, False, False, False)
>>> readHeaderCpp('''/// \\file
... /// \\ingroup tutorials
... /// \\\\notebook -js
... /// This is the description of the tutorial
... ///
... /// \\macro_image
... /// \\macro_code
... ///
... /// \\\\author John Brown
... void tutorialfuncion(){}''')
('void tutorialfuncion(){}\\n', '# This is the description of the tutorial\\n# \\n# \\n', 'John Brown', True, True, False, False)
>>> readHeaderCpp('''/// \\file
... /// \\ingroup tutorials
... /// \\\\notebook -nodraw
... /// This is the description of the tutorial
... ///
... /// \\macro_image
... /// \\macro_code
... ///
... /// \\\\author John Brown
... void tutorialfuncion(){}''')
('void tutorialfuncion(){}\\n', '# This is the description of the tutorial\\n# \\n# \\n', 'John Brown', True, False, True, False)
"""
lines = text.splitlines()
description = ''
author = ''
isNotebook = False
isJsroot = False
nodraw = False
needsHeaderFile = False
for i, line in enumerate(lines):
if line.startswith("/// \\aut"):
author = line[12:]
if line.startswith("/// \\note"):
isNotebook = True
if "-js" in line:
isJsroot = True
if "-nodraw" in line:
nodraw = True
if "-header" in line:
needsHeaderFile = True
if line.startswith("///"):
if not line.startswith("/// \\") and isNotebook:
description += ('# ' + line[4:] + '\n')
else:
break
newtext = ''
for line in lines[i:]:
newtext += (line + "\n")
description = description.replace("\\f$", "$")
description = description.replace("\\f[", "$$")
description = description.replace("\\f]", "$$")
return newtext, description, author, isNotebook, isJsroot, nodraw, needsHeaderFile
def cppFunction(text):
"""
Extracts main function for the function enclosure by means of regular expression
>>> cppFunction('''void mainfunction(arguments = values){
... content of function
... which spans
... several lines
... }''')
'\\n content of function\\n which spans\\n several lines\\n'
>>> cppFunction('''void mainfunction(arguments = values)
... {
... content of function
... which spans
... several lines
... }''')
'\\n content of function\\n which spans\\n several lines\\n'
>>> cppFunction('''void mainfunction(arguments = values
... morearguments = morevalues)
... {
... content of function
... which spans
... several lines
... }''')
'\\n content of function\\n which spans\\n several lines\\n'
"""
functionContentRe = re.compile(r'(?<=\{).*(?=^\})', flags = re.DOTALL | re.MULTILINE)
match = functionContentRe.search(text)
if match:
return match.group()
else:
return text
def cppComments(text):
"""
Converts comments delimited by // and on a new line into a markdown cell. For C++ files only.
>>> cppComments('''// This is a
... // multiline comment
... void function(){}''')
'# <markdowncell>\\n# This is a\\n# multiline comment\\n# <codecell>\\nvoid function(){}\\n'
>>> cppComments('''void function(){
... int variable = 5 // Comment not in cell
... // Comment also not in cell
... }''')
'void function(){\\n int variable = 5 // Comment not in cell\\n // Comment also not in cell\\n}\\n'
"""
text = text.splitlines()
newtext = ''
inComment = False
for line in text:
if line.startswith("//") and not inComment: # True if first line of comment
inComment = True
newtext += "# <markdowncell>\n"
if line[2:].lstrip().startswith("#"): # Don't use .capitalize() if line starts with hash, ie it is a header
newtext += ("# " + line[2:]+"\n")
else:
newtext += ("# " + line[2:].lstrip().capitalize()+"\n")
elif inComment and not line.startswith("//"): # True if first line after comment
inComment = False
newtext += "# <codecell>\n"
newtext += (line+"\n")
elif inComment and line.startswith("//"): # True if in the middle of a comment block
newtext += ("# " + line[2:] + "\n")
else:
newtext += (line+"\n")
return newtext
def split(text):
"""
Splits the text string into main, helpers, and rest. main is the main function,
i.e. the function tha thas the same name as the macro file. Helpers is a list of
strings, each a helper function, i.e. any other function that is not the main function.
Finally, rest is a string containing any top-level code outside of any function.
Comments immediately prior to a helper cell are converted into markdown cell,
added to the helper, and removed from rest.
Intended for C++ files only.
>>> split('''void tutorial(){
... content of tutorial
... }''')
('void tutorial(){\\n content of tutorial\\n}', [], '')
>>> split('''void tutorial(){
... content of tutorial
... }
... void helper(arguments = values){
... helper function
... content spans lines
... }''')
('void tutorial(){\\n content of tutorial\\n}', ['\\n# <markdowncell>\\n A helper function is created: \\n# <codecell>\\n%%cpp -d\\nvoid helper(arguments = values){\\n helper function\\n content spans lines\\n}'], '')
>>> split('''#include <header.h>
... using namespace NAMESPACE
... void tutorial(){
... content of tutorial
... }
... void helper(arguments = values){
... helper function
... content spans lines
... }''')
('void tutorial(){\\n content of tutorial\\n}', ['\\n# <markdowncell>\\n A helper function is created: \\n# <codecell>\\n%%cpp -d\\nvoid helper(arguments = values){\\n helper function\\n content spans lines\\n}'], '#include <header.h>\\nusing namespace NAMESPACE')
>>> split('''void tutorial(){
... content of tutorial
... }
... // This is a multiline
... // description of the
... // helper function
... void helper(arguments = values){
... helper function
... content spans lines
... }''')
('void tutorial(){\\n content of tutorial\\n}', ['\\n# <markdowncell>\\n This is a multiline\\n description of the\\n helper function\\n \\n# <codecell>\\n%%cpp -d\\nvoid helper(arguments = values){\\n helper function\\n content spans lines\\n}'], '')
"""
functionReString="("
for cpptype in gTypesList:
functionReString += ("^%s|") % cpptype
functionReString = functionReString[:-1] + r")\s?\*?&?\s?[\w:]*?\s?\([^\)]*\)\s*\{.*?^\}"
functionRe = re.compile(functionReString, flags = re.DOTALL | re.MULTILINE)
#functionre = re.compile(r'(^void|^int|^Int_t|^TF1|^string|^bool|^double|^float|^char|^TCanvas|^TTree|^TString|^TSeqCollection|^Double_t|^TFile|^Long64_t|^Bool_t)\s?\*?\s?[\w:]*?\s?\([^\)]*\)\s*\{.*?^\}', flags = re.DOTALL | re.MULTILINE)
functionMatches = functionRe.finditer(text)
helpers = []
main = ""
for matchString in [match.group() for match in functionMatches]:
if tutName == findFunctionName(matchString): # if the name of the function is that of the macro
main = matchString
else:
helpers.append(matchString)
# Create rest by replacing the main and helper functions with blank strings
rest = text.replace(main, "")
for helper in helpers:
rest = rest.replace(helper, "")
newHelpers = []
lines = text.splitlines()
for helper in helpers: # For each helper function
for i, line in enumerate(lines): # Look through the lines until the
if line.startswith(helper[:helper.find("\n")]): # first line of the helper is found
j = 1
commentList = []
while lines[i-j].startswith("//"): # Add comment lines immediately prior to list
commentList.append(lines[i-j])
j += 1
if commentList: # Convert list to string
commentList.reverse()
helperDescription = ''
for comment in commentList:
if comment in ("//", "// "):
helperDescription += "\n\n" # Two newlines to create hard break in Markdown
else:
helperDescription += (comment[2:] + "\n")
rest = rest.replace(comment, "")
break
else: # If no comments are found create generic description
helperDescription = "A helper function is created:"
break
if findFunctionName(helper) != "main": # remove void main function
newHelpers.append("\n# <markdowncell>\n " + helperDescription + " \n# <codecell>\n%%cpp -d\n" + helper)
rest = rest.rstrip("\n /") # remove newlines and empty comments at the end of string
return main, newHelpers, rest
def findFunctionName(text):
"""
Takes a string representation of a C++ function as an input,
finds and returns the name of the function
>>> findFunctionName('void functionName(arguments = values){}')
'functionName'
>>> findFunctionName('void functionName (arguments = values){}')
'functionName'
>>> findFunctionName('void *functionName(arguments = values){}')
'functionName'
>>> findFunctionName('void* functionName(arguments = values){}')
'functionName'
>>> findFunctionName('void * functionName(arguments = values){}')
'functionName'
>>> findFunctionName('void class::functionName(arguments = values){}')
'class::functionName'
"""
functionNameReString="(?<="
for cpptype in gTypesList:
functionNameReString += ("(?<=%s)|") % cpptype
functionNameReString = functionNameReString[:-1] + r")\s?\*?\s?[^\s]*?(?=\s?\()"
functionNameRe = re.compile(functionNameReString, flags = re.DOTALL | re.MULTILINE)
#functionnamere = re.compile(r'(?<=(?<=int)|(?<=void)|(?<=TF1)|(?<=Int_t)|(?<=string)|(?<=double)|(?<=Double_t)|(?<=float)|(?<=char)|(?<=TString)|(?<=bool)|(?<=TSeqCollection)|(?<=TCanvas)|(?<=TTree)|(?<=TFile)|(?<=Long64_t)|(?<=Bool_t))\s?\*?\s?[^\s]*?(?=\s?\()', flags = re.DOTALL | re.MULTILINE)
match = functionNameRe.search(text)
functionname = match.group().strip(" *\n")
return functionname
def processmain(text):
"""
Evaluates whether the main function returns a TCanvas or requires input. If it
does then the keepfunction flag is True, meaning the function wont be extracted
by cppFunction. If the initial condition is true then an extra cell is added
before at the end that calls the main function is returned, and added later.
>>> processmain('''void function(){
... content of function
... spanning several
... lines
... }''')
('void function(){\\n content of function\\n spanning several\\n lines\\n}', '')
>>> processmain('''void function(arguments = values){
... content of function
... spanning several
... lines
... }''')
('void function(arguments = values){\\n content of function\\n spanning several\\n lines\\n}', '# <markdowncell> \\n Arguments are defined. \\n# <codecell>\\narguments = values;\\n# <codecell>\\n')
>>> processmain('''void function(argument1 = value1, //comment 1
... argument2 = value2 /*comment 2*/ ,
... argument3 = value3,
... argument4 = value4)
... {
... content of function
... spanning several
... lines
... }''')
('void function(argument1 = value1, //comment 1\\n argument2 = value2 /*comment 2*/ ,\\n argument3 = value3, \\n argument4 = value4)\\n{\\n content of function\\n spanning several\\n lines\\n}', '# <markdowncell> \\n Arguments are defined. \\n# <codecell>\\nargument1 = value1;\\nargument2 = value2;\\nargument3 = value3;\\nargument4 = value4;\\n# <codecell>\\n')
>>> processmain('''TCanvas function(){
... content of function
... spanning several
... lines
... return c1
... }''')
('TCanvas function(){\\n content of function\\n spanning several \\n lines\\n return c1\\n}', '')
"""
argumentsCell = ''
if text:
argumentsre = re.compile(r'(?<=\().*?(?=\))', flags = re.DOTALL | re.MULTILINE)
arguments = argumentsre.search(text)
if len(arguments.group()) > 3:
argumentsCell = "# <markdowncell> \n Arguments are defined. \n# <codecell>\n"
individualArgumentre = re.compile(r'[^/\n,]*?=[^/\n,]*') #, flags = re.DOTALL) #| re.MULTILINE)
argumentList=individualArgumentre.findall(arguments.group())
for argument in argumentList:
argumentsCell += argument.strip("\n ") + ";\n"
argumentsCell += "# <codecell>\n"
return text, argumentsCell
# now define text transformers
def removePaletteEditor(code):
code = code.replace("img->StartPaletteEditor();", "")
code = code.replace("Open the color editor", "")
return code
def runEventExe(code):
if "copytree" in tutName:
return "# <codecell> \n.! $ROOTSYS/test/eventexe 1000 1 1 1 \n" + code
return code
def getLibMathMore(code):
if "quasirandom" == tutName:
return "# <codecell> \ngSystem->Load(\"libMathMore\"); \n# <codecell> \n" + code
return code
def roofitRemoveSpacesComments(code):
def changeString(matchObject):
matchString = matchObject.group()
matchString = matchString[0] + " " + matchString[1:]
matchString = matchString.replace(" " , "THISISASPACE")
matchString = matchString.replace(" " , "")
matchString = matchString.replace("THISISASPACE" , " ")
return matchString
newcode = re.sub("#\s\s?\w\s[\w-]\s\w.*", changeString , code)
return newcode
def declareNamespace(code):
if "using namespace RooFit;\nusing namespace RooStats;" in code:
code = code.replace("using namespace RooFit;\nusing namespace RooStats;", "# <codecell>\n%%cpp -d\n// This is a workaround to make sure the namespace is used inside functions\nusing namespace RooFit;\nusing namespace RooStats;\n# <codecell>\n")
else:
code = code.replace("using namespace RooFit;", "# <codecell>\n%%cpp -d\n// This is a workaround to make sure the namespace is used inside functions\nusing namespace RooFit;\n# <codecell>\n")
code = code.replace("using namespace RooStats;", "# <codecell>\n%%cpp -d\n// This is a workaround to make sure the namespace is used inside functions\nusing namespace RooStats;\n# <codecell>\n")
code = code.replace("using namespace ROOT::Math;", "# <codecell>\n%%cpp -d\n// This is a workaround to make sure the namespace is used inside functions\nusing namespace ROOT::Math;\n# <codecell>\n")
return code
def rs401dGetFiles(code):
if tutName == "rs401d_FeldmanCousins":
code = code.replace(
"""#if !defined(__CINT__) || defined(__MAKECINT__)\n#include "../tutorials/roostats/NuMuToNuE_Oscillation.h"\n#include "../tutorials/roostats/NuMuToNuE_Oscillation.cxx" // so that it can be executed directly\n#else\n#include "../tutorials/roostats/NuMuToNuE_Oscillation.cxx+" // so that it can be executed directly\n#endif""" , """std::string tutDir = gROOT->GetTutorialsDir();\nTString headerDir = TString::Format("#include \\\"%s/roostats/NuMuToNuE_Oscillation.h\\\"", tutDir.c_str());\nTString impDir = TString::Format("#include \\\"%s/roostats/NuMuToNuE_Oscillation.cxx\\\"", tutDir.c_str());\ngROOT->ProcessLine(headerDir);\ngROOT->ProcessLine(impDir);""")
return code
def declareIncludes(code):
if tutName != "fitcont":
code = re.sub(r"# <codecell>\s*#include", "# <codecell>\n%%cpp -d\n#include" , code)
return code
def tree4GetFiles(code):
if tutName == "tree4":
code = code.replace(
"""#include \"../test/Event.h\"""" , """# <codecell>\nTString dir = "$ROOTSYS/test/Event.h";\ngSystem->ExpandPathName(dir);\nTString includeCommand = TString::Format("#include \\\"%s\\\"" , dir.Data());\ngROOT->ProcessLine(includeCommand);""")
return code
def disableDrawProgressBar(code):
code = code.replace(":DrawProgressBar",":!DrawProgressBar")
return code
def fixes(code):
codeTransformers=[removePaletteEditor, runEventExe, getLibMathMore,
roofitRemoveSpacesComments, declareNamespace, rs401dGetFiles ,
declareIncludes, tree4GetFiles, disableDrawProgressBar]
for transformer in codeTransformers:
code = transformer(code)
return code
def changeMarkdown(code):
code = code.replace("~~~" , "```")
code = code.replace("{.cpp}", "cpp")
code = code.replace("{.bash}", "bash")
return code
def isCpp():
"""
Return True if extension is a C++ file
"""
return extension in ("C", "c", "cpp", "C++", "cxx")
def findTimeout():
listLongTutorials = ["OneSidedFrequentistUpperLimitWithBands", "StandardBayesianNumericalDemo",
"TwoSidedFrequentistUpperLimitWithBands" , "HybridStandardForm", "rs401d_FeldmanCousins",
"TMVAMultipleBackgroundExample", "TMVARegression", "TMVAClassification", "StandardHypoTestDemo"]
if tutName in listLongTutorials:
return 300
else:
return 90
# -------------------------------------
# ------------ Main Program------------
# -------------------------------------
def mainfunction(text):
"""
Main function. Calls all other functions, depending on whether the macro input is in python or c++.
It adds the header information. Also, it adds a cell that draws all canvases. The working text is
then converted to a version 3 jupyter notebook, subsequently updated to a version 4. Then, metadata
associated with the language the macro is written in is attatched to he notebook. Finally the
notebook is executed and output as a Jupyter notebook.
"""
# Modify text from macros to suit a notebook
if isCpp():
main, helpers, rest = split(text)
main, argumentsCell = processmain(main)
main = cppComments(unindenter(cppFunction(main))) # Remove function, Unindent, and convert comments to Markdown cells
if argumentsCell:
main = argumentsCell + main
rest = cppComments(rest) # Convert top level code comments to Markdown cells
# Construct text by starting with top level code, then the helper functions, and finally the main function.
# Also add cells for headerfile, or keepfunction
if needsHeaderFile:
text = "# <markdowncell>\n# The header file must be copied to the current directory\n# <codecell>\n.!cp %s%s.h .\n# <codecell>\n" % (tutRelativePath, tutName)
text += rest
else:
text = "# <codecell>\n" + rest
for helper in helpers:
text += helper
text += ("\n# <codecell>\n" + main)
if extension == "py":
text = pythonMainFunction(text)
text = pythonComments(text) # Convert comments into Markdown cells
# Perform last minute fixes to the notebook, used for specific fixes needed by some tutorials
text = fixes(text)
# Change to standard Markdown
newDescription = changeMarkdown(description)
# Add the title and header of the notebook
text = "# <markdowncell> \n# # %s\n%s# \n# \n# **Author:** %s \n# <i><small>This notebook tutorial was automatically generated " \
"with <a href= \"https://github.com/root-mirror/root/blob/master/documentation/doxygen/converttonotebook.py\">ROOTBOOK-izer (Beta)</a> " \
"from the macro found in the ROOT repository on %s.</small></i>\n# <codecell>\n%s" % (tutTitle, newDescription, author, date, text)
# Add cell at the end of the notebook that draws all the canveses. Add a Markdown cell before explaining it.
if isJsroot and not nodraw:
if isCpp():
text += "\n# <markdowncell> \n# Draw all canvases \n# <codecell>\n%jsroot on\ngROOT->GetListOfCanvases()->Draw()"
if extension == "py":
text += "\n# <markdowncell> \n# Draw all canvases \n# <codecell>\n%jsroot on\nfrom ROOT import gROOT \ngROOT.GetListOfCanvases().Draw()"
elif not nodraw:
if isCpp():
text += "\n# <markdowncell> \n# Draw all canvases \n# <codecell>\ngROOT->GetListOfCanvases()->Draw()"
if extension == "py":
text += "\n# <markdowncell> \n# Draw all canvases \n# <codecell>\nfrom ROOT import gROOT \ngROOT.GetListOfCanvases().Draw()"
# Create a notebook from the working text
nbook = v3.reads_py(text)
nbook = v4.upgrade(nbook) # Upgrade v3 to v4
# Load notebook string into json format, essentially creating a dictionary
json_data = json.loads(v4.writes(nbook))
# add the corresponding metadata
if extension == "py":
json_data[u'metadata'] = {
"kernelspec": {
"display_name": "Python 2",
"language": "python",
"name": "python2"
},
"language_info": {
"codemirror_mode": {
"name": "ipython",
"version": 2
},
"file_extension": ".py",
"mimetype": "text/x-python",
"name": "python",
"nbconvert_exporter": "python",
"pygments_lexer": "ipython2",
"version": "2.7.10"
}
}
elif isCpp():
json_data[u'metadata'] = {
"kernelspec": {
"display_name": "ROOT C++",
"language": "c++",
"name": "root"
},
"language_info": {
"codemirror_mode": "text/x-c++src",
"file_extension": ".C",
"mimetype": " text/x-c++src",
"name": "c++"
}
}
# write the json file with the metadata
with open(outPathName, 'w') as fout:
json.dump(json_data, fout, indent=1, sort_keys=True)
print(time.time() - starttime)
timeout = findTimeout()
# Call commmand that executes the notebook and creates a new notebook with the output
r = subprocess.call(["jupyter", "nbconvert", "--ExecutePreprocessor.timeout=%d" % timeout, "--to=notebook", "--execute", outPathName])
if r != 0:
sys.stderr.write("NOTEBOOK_CONVERSION_WARNING: Nbconvert failed for notebook %s with return code %s\n" %(outname,r))
if isJsroot:
subprocess.call(["jupyter", "trust", os.path.join(outdir, outnameconverted)])
if r == 0: # Only remove notebook without output if nbconvert succeedes
os.remove(outPathName)
if __name__ == "__main__":
if str(sys.argv[1]) == "-test":
tutName = "tutorial"
doctest.testmod(verbose=True)
else:
# -------------------------------------
# ----- Preliminary definitions--------
# -------------------------------------
# Extract and define the name of the file as well as its derived names
tutPathName = str(sys.argv[1])
tutPath = os.path.dirname(tutPathName)
if tutPath.split("/")[-2] == "tutorials":
tutRelativePath = "$ROOTSYS/tutorials/%s/" % tutPath.split("/")[-1]
tutFileName = os.path.basename(tutPathName)
tutName, extension = tutFileName.split(".")
tutTitle = re.sub( r"([A-Z\d])", r" \1", tutName).title()
outname = tutFileName + ".ipynb"
outnameconverted = tutFileName + ".nbconvert.ipynb"
# Extract output directory
try:
outdir = str(sys.argv[2])
except:
outdir = tutPath
outPathName = os.path.join(outdir, outname)
# Find and define the time and date this script is run
date = datetime.now().strftime("%A, %B %d, %Y at %I:%M %p")
# -------------------------------------
# -------------------------------------
# -------------------------------------
# Set DYLD_LIBRARY_PATH. When run without root access or as a different user, epecially from Mac systems,
# it is possible for security reasons that the enviornment does not include this definition, so it is manually defined.
os.environ["DYLD_LIBRARY_PATH"] = os.environ["ROOTSYS"] + "/lib"
# Open the file to be converted
with open(tutPathName) as fin:
text = fin.read()
# Extract information from header and remove header from text
if extension == "py":
text, description, author, isNotebook, isJsroot, nodraw, needsHeaderFile = readHeaderPython(text)
elif isCpp():
text, description, author, isNotebook, isJsroot, nodraw, needsHeaderFile = readHeaderCpp(text)
if isNotebook:
starttime = time.time()
mainfunction(text)
print(time.time() - starttime)
else:
pass
| lgpl-2.1 | 7,401,523,947,888,513,000 | 41.389222 | 670 | 0.592965 | false |
zqfan/leetcode | algorithms/306. Additive Number/solution.py | 1 | 1232 | class Solution(object):
def isAdditiveNumber(self, num):
"""
:type num: str
:rtype: bool
"""
def str_add(s1, s2):
res = []
carry = 0; i1 = len(s1)-1; i2 = len(s2)-1
while i1 >= 0 or i2 >=0:
if i1 >= 0:
carry += ord(s1[i1]) - ord('0')
if i2 >= 0:
carry += ord(s2[i2]) - ord('0')
res.append(str(carry % 10))
carry /= 10; i1 -= 1; i2 -= 1
if carry:
res.append('1')
return ''.join(reversed(res))
l = len(num)
for i in xrange(l/2):
if i > 0 and num[0] == '0':
break
s1 = num[:i+1]
for j in xrange(i+1, 2*l/3):
if j - i > 1 and num[i+1] == '0':
break
s2 = num[i+1:j+1]
s = str_add(s1, s2)
k = j + 1
while k < l:
if s != num[k:k+len(s)]:
break
k += len(s)
s, s2 = str_add(s2, s), s
if k == l:
return True
return False
| gpl-3.0 | 5,049,437,436,025,781,000 | 30.589744 | 53 | 0.323052 | false |
apeng2012/TimeSwitch4ModemRouter | SetTimer/switchTime.py | 1 | 5093 | import sys
import time
import serial
try:
import xml.etree.cElementTree as ET
except ImportError:
import xml.etree.ElementTree as ET
saveFileName = "SwitchTime.xml"
def list2xml(stList):
if len(stList) != 7:
print "DayOfWeek num error!"
return
weekname = "sunday", "monday", "tuesday", "wednesday", "thursday", "friday", "saturday"
root = ET.Element("switchTime")
i = 0
for itList in stList:
dayofweek = ET.SubElement(root, weekname[i])
print weekname[i]
i = i+1
for it in itList:
item = ET.SubElement(dayofweek, "item")
item.text = it
print "\t"+it
# wrap it in an ElementTree instance, and save as XML
tree = ET.ElementTree(root)
tree.write(saveFileName)
def xml2list(stList):
tree = ET.ElementTree(file=saveFileName)
root = tree.getroot();
i=0
for day in root:
print day.tag
for elem in day:
print elem.text
stList[i].append(elem.text)
i = i+1
#!/usr/bin/env python
#encoding: utf-8
import ctypes
STD_INPUT_HANDLE = -10
STD_OUTPUT_HANDLE= -11
STD_ERROR_HANDLE = -12
FOREGROUND_BLACK = 0x0
FOREGROUND_BLUE = 0x01 # text color contains blue.
FOREGROUND_GREEN= 0x02 # text color contains green.
FOREGROUND_RED = 0x04 # text color contains red.
FOREGROUND_INTENSITY = 0x08 # text color is intensified.
BACKGROUND_BLUE = 0x10 # background color contains blue.
BACKGROUND_GREEN= 0x20 # background color contains green.
BACKGROUND_RED = 0x40 # background color contains red.
BACKGROUND_INTENSITY = 0x80 # background color is intensified.
class Color:
''' See http://msdn.microsoft.com/library/default.asp?url=/library/en-us/winprog/winprog/windows_api_reference.asp
for information on Windows APIs.'''
std_out_handle = ctypes.windll.kernel32.GetStdHandle(STD_OUTPUT_HANDLE)
def set_cmd_color(self, color, handle=std_out_handle):
"""(color) -> bit
Example: set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE | FOREGROUND_INTENSITY)
"""
bool = ctypes.windll.kernel32.SetConsoleTextAttribute(handle, color)
return bool
def reset_color(self):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_GREEN | FOREGROUND_BLUE)
def print_red_text(self, print_text):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_INTENSITY)
print print_text
self.reset_color()
def print_green_text(self, print_text):
self.set_cmd_color(FOREGROUND_GREEN | FOREGROUND_INTENSITY)
print print_text
self.reset_color()
def print_blue_text(self, print_text):
self.set_cmd_color(FOREGROUND_BLUE | FOREGROUND_INTENSITY)
print print_text
self.reset_color()
def print_red_text_with_blue_bg(self, print_text):
self.set_cmd_color(FOREGROUND_RED | FOREGROUND_INTENSITY| BACKGROUND_BLUE | BACKGROUND_INTENSITY)
print print_text
self.reset_color()
def main():
ser = serial.Serial(sys.argv[1], 9600, timeout=1)
#ser = serial.Serial('COM10', 9600, timeout=1)
if ser.isOpen() != True:
print "open serial port error!"
return
clr = Color()
#Hi
cmd = "Hi"
ser.write(cmd); print cmd
res = ser.readline(); res.strip(); clr.print_green_text(res)
if res.find("Hello") == -1:
print "please reset board or check serial port."
return
#SetDateTime 2014-01-27 13:21:25 1
dt = time.localtime()
cmd = time.strftime("SetDateTime %Y-%m-%d %H:%M:%S %w\r\n", dt)
ser.write(cmd); print cmd
res = ser.readline(); res.strip(); clr.print_green_text(res)
#GetDateTime
cmd = "GetDateTime"
ser.write(cmd); print cmd
res = ser.readline(); res.strip(); clr.print_green_text(res)
reList = [[] for i in range(7)]
#ReadAlarm
cmd = "ReadAlarm"
ser.write(cmd); print cmd
res = ser.readline(); res.strip(); clr.print_green_text(res) # "ReadAlarm x
for i in range(7):
while True:
res = ser.readline(); res.strip(); clr.print_green_text(res)
if res.find("no alarm") != -1:
continue
if res.find("weekday") != -1:
break
reList[i].append(res[0:12])
list2xml(reList)
print "Please set switch time in " + saveFileName
raw_input("Press Enter to continue...")
reList = [[] for i in range(7)]
xml2list(reList)
# WriteAlarmX 1>hh:mm-hh:mm 2>...
for i in range(7):
cmd = "WriteAlarm" + str(i) + " "
j = 1
for t in reList[i]:
t.strip()
cmd = cmd + str(j) + ">" + t + " "
j = j + 1
ser.write(cmd); print cmd
res = ser.readline(); res.strip(); clr.print_green_text(res)
# ProgramAlarm
cmd = "ProgramAlarm"
ser.write(cmd); print cmd
res = ser.readline(); res.strip(); clr.print_green_text(res)
print "Config Over. reset board to start"
ser.close()
if __name__=='__main__':
main() | gpl-2.0 | -8,980,545,949,764,376,000 | 28.275862 | 118 | 0.611427 | false |
tiagovaz/saskatoon | saskatoon/harvest/migrations/0002_auto_20180507_1941.py | 1 | 4140 | # -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2018-05-07 23:41
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('harvest', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='historicalproperty',
name='approximative_maturity_date',
field=models.DateField(blank=True, null=True, verbose_name='Approximative maturity date'),
),
migrations.AddField(
model_name='historicalproperty',
name='fruits_height',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Height of lowest fruits'),
),
migrations.AddField(
model_name='historicalproperty',
name='harvest_every_year',
field=models.BooleanField(default=False, verbose_name='Produces fruits every year'),
),
migrations.AddField(
model_name='historicalproperty',
name='ladder_available',
field=models.BooleanField(default=False, verbose_name='There is a ladder available in the property'),
),
migrations.AddField(
model_name='historicalproperty',
name='ladder_available_for_outside_picks',
field=models.BooleanField(default=False, verbose_name='A ladder is available in the property and can be used for nearby picks'),
),
migrations.AddField(
model_name='historicalproperty',
name='number_of_trees',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Total number of trees in this property'),
),
migrations.AddField(
model_name='historicalproperty',
name='trees_accessibility',
field=models.CharField(blank=True, max_length=200, null=True, verbose_name='Trees accessibility'),
),
migrations.AddField(
model_name='historicalproperty',
name='validated',
field=models.BooleanField(default=b'True', help_text='This property data has been reviewed and validatedby a collective member', verbose_name='Validated'),
),
migrations.AddField(
model_name='property',
name='approximative_maturity_date',
field=models.DateField(blank=True, null=True, verbose_name='Approximative maturity date'),
),
migrations.AddField(
model_name='property',
name='fruits_height',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Height of lowest fruits'),
),
migrations.AddField(
model_name='property',
name='harvest_every_year',
field=models.BooleanField(default=False, verbose_name='Produces fruits every year'),
),
migrations.AddField(
model_name='property',
name='ladder_available',
field=models.BooleanField(default=False, verbose_name='There is a ladder available in the property'),
),
migrations.AddField(
model_name='property',
name='ladder_available_for_outside_picks',
field=models.BooleanField(default=False, verbose_name='A ladder is available in the property and can be used for nearby picks'),
),
migrations.AddField(
model_name='property',
name='number_of_trees',
field=models.PositiveIntegerField(blank=True, null=True, verbose_name='Total number of trees in this property'),
),
migrations.AddField(
model_name='property',
name='trees_accessibility',
field=models.CharField(blank=True, max_length=200, null=True, verbose_name='Trees accessibility'),
),
migrations.AddField(
model_name='property',
name='validated',
field=models.BooleanField(default=b'True', help_text='This property data has been reviewed and validatedby a collective member', verbose_name='Validated'),
),
]
| agpl-3.0 | -6,729,985,459,804,566,000 | 42.578947 | 167 | 0.618599 | false |
colour-science/colour | colour/examples/plotting/examples_volume_plots.py | 1 | 1465 | # -*- coding: utf-8 -*-
"""
Showcases colour models volume and gamut plotting examples.
"""
import numpy as np
from colour.plotting import (plot_RGB_colourspaces_gamuts, plot_RGB_scatter,
colour_style)
from colour.utilities import message_box
message_box('Colour Models Volume and Gamut Plots')
colour_style()
message_box(('Plotting "ITU-R BT.709" RGB colourspace volume in "CIE xyY" '
'colourspace.'))
plot_RGB_colourspaces_gamuts(
('ITU-R BT.709', ), reference_colourspace='CIE xyY')
print('\n')
message_box(('Comparing "ITU-R BT.709" and "ACEScg" RGB colourspaces volume '
'in "CIE L*a*b*" colourspace.'))
plot_RGB_colourspaces_gamuts(
('ITU-R BT.709', 'ACEScg'),
reference_colourspace='CIE Lab',
face_colours=(None, (0.25, 0.25, 0.25)),
edge_colours=(None, (0.25, 0.25, 0.25)),
edge_alpha=(1.0, 0.1),
face_alpha=(1.0, 0.0))
print('\n')
message_box(('Plotting "ACEScg" colourspaces values in "CIE L*a*b*" '
'colourspace.'))
RGB = np.random.random((32, 32, 3))
plot_RGB_scatter(
RGB,
'ACEScg',
reference_colourspace='CIE Lab',
colourspaces=('ACEScg', 'ITU-R BT.709'),
face_colours=((0.25, 0.25, 0.25), None),
edge_colours=((0.25, 0.25, 0.25), None),
edge_alpha=(0.1, 0.5),
face_alpha=(0.1, 0.5),
grid_face_colours=(0.1, 0.1, 0.1),
grid_edge_colours=(0.1, 0.1, 0.1),
grid_edge_alpha=0.5,
grid_face_alpha=0.1)
| bsd-3-clause | 7,481,060,059,926,544,000 | 27.173077 | 77 | 0.612287 | false |
nsubiron/SublimeSuricate | suricate/command_parser.py | 1 | 3785 | # Sublime Suricate, Copyright (C) 2013 N. Subiron
#
# This program comes with ABSOLUTELY NO WARRANTY. This is free software, and you
# are welcome to redistribute it and/or modify it under the terms of the GNU
# General Public License as published by the Free Software Foundation, either
# version 3 of the License, or (at your option) any later version.
import collections
import sublime
from . import _suricate as suricate
from . import flags
_DEFAULT_DEFAULTS = \
{
'caption': 'No description provided',
'mnemonic': None,
'group': None,
'args': {},
'flags': None,
'keys': [],
'selector': None,
'context': None,
'context_menu': False
}
_TAG_LIST = ['call'] + [x for x in _DEFAULT_DEFAULTS.keys()]
_PLATFORM = sublime.platform()
Command = collections.namedtuple('Command', _TAG_LIST)
class _CommandParser(object):
def __init__(self, settings):
self.ignore_default_keybindings = settings.get('ignore_default_keybindings', False)
self.override_ctrl_o = settings.get('override_default_opening_key', False)
key_map = settings.get('key_map', {})
self.os_key_map = dict(key_map.get('*', {}))
self.os_key_map.update(dict(key_map.get(_PLATFORM, {})))
self.commands = {}
def parse(self, profile):
data = self._get_commands(profile, 'commands')
if self.ignore_default_keybindings:
self._remove_key_bindings(data)
user_data = self._get_commands(profile, 'user_commands')
self._rupdate(data, user_data)
defaults = self._merge_platform_specific_tags(profile.get('defaults', _DEFAULT_DEFAULTS))
for key, item in data.items():
try:
args = dict(defaults)
args.update(item)
args['flags'] = flags.from_string(str(args['flags']))
if flags.check_platform(args['flags']):
if args['keys']:
args['keys'] = self._map_keybinding(args['keys'])
self.commands[key] = Command(**args)
except Exception as exception:
suricate.log('WARNING: Command %r not added: %s', key, exception)
@staticmethod
def _rupdate(lhs, rhs):
for key, irhs in rhs.items():
ilhs = lhs.get(key, {})
ilhs.update(irhs)
lhs[key] = ilhs
@staticmethod
def _remove_key_bindings(data):
for item in data.values():
if 'keys' in item:
item.pop('keys')
def _get_commands(self, profile, key):
data = profile.get(key, {})
return dict((k, self._merge_platform_specific_tags(v)) for k, v in data.items())
@staticmethod
def _merge_platform_specific_tags(raw_data):
data = {}
for tag in _TAG_LIST:
os_tag = tag + '.' + _PLATFORM
if os_tag in raw_data:
data[tag] = raw_data[os_tag]
elif tag in raw_data:
data[tag] = raw_data[tag]
return data
def _map_keybinding(self, keybinding):
# Override <c>+o.
if self.override_ctrl_o and keybinding[0] == '<c>+o':
keybinding = [self.override_ctrl_o] + keybinding[1:]
# Map keys by platform.
for key, value in self.os_key_map.items():
keybinding = [x.replace(key, value) for x in keybinding]
return keybinding
def parse_profiles(profiles):
assert suricate.api_is_ready()
parser = _CommandParser(suricate.load_settings())
for profile_file_name in profiles:
profile = sublime.load_settings(profile_file_name)
parser.parse(profile)
return parser.commands
| gpl-3.0 | -5,906,764,279,225,393,000 | 33.099099 | 97 | 0.575429 | false |
arongdari/sparse-graph-prior | sgp/GGPrnd.py | 1 | 3729 | import numpy as np
from scipy.special import gamma, gammaln
def W(t, x, alpha, sigma, tau):
if tau > 0:
logout = np.log(alpha) + np.log(1. - np.exp(-tau * (x - t))) + (-1 - sigma) * np.log(t) + (-t * tau) - np.log(
tau) - gammaln(1. - sigma)
else:
logout = np.log(alpha) - gammaln(1. - sigma) - np.log(sigma) + np.log(t ** (-sigma) - x ** (-sigma))
return np.exp(logout)
def inv_W(t, x, alpha, sigma, tau):
if tau > 0:
out = t - 1. / tau * np.log(1. - gamma(1. - sigma) * x * tau / (alpha * t ** (-1. - sigma) * np.exp(-t * tau)))
else:
logout = -1. / sigma * np.log(t ** (-sigma) - sigma * gamma(1. - sigma) / alpha * x)
out = np.exp(logout)
return out
def GGPrnd(alpha, sigma, tau, T=0):
"""
GGPrnd samples points of a generalised gamma process
Samples the points of the GGP with Levy measure
alpha/Gamma(1-sigma) * w^(-1-sigma) * exp(-tau*w)
For sigma>=0, it samples points above the threshold T>0 using the adaptive
thinning strategy described in Favaro and Teh (2013).
Convert the same function used in BNPGraph matlab package by Francois Caron
http://www.stats.ox.ac.uk/~caron/code/bnpgraph/index.html
Reference:
S. Favaro and Y.W. Teh. MCMC for normalized random measure mixture
models. Statistical Science, vol.28(3), pp.335-359, 2013.
:param alpha: positive scalar
:param sigma: real in (-Inf, 1)
:param tau: positive scalar
:param T: truncation threshold; positive scalar
:return:
N: weights from the GGP
T: threshold
"""
# finite activity GGP, don't need to truncate
if sigma < 0:
rate = np.exp(np.log(alpha) - np.log(-sigma) + sigma * np.log(tau))
K = np.random.poisson(rate)
N = np.random.gamma(-sigma, scale=1. / tau, size=K)
T = 0
return N, T
# infinite activity GGP
if T == 0:
# set the threshold automatically
# Number of jumps of order alpha/sigma/Gamma(1-sigma) * T^{-sigma} for sigma > 0
# and alpha*log(T) for sigma = 0
if sigma > .1:
Njumps = 20000
T = np.exp(1. / sigma * (np.log(alpha) - np.log(sigma) - gammaln(1. - sigma) - np.log(Njumps)))
else:
T = 1e-10
if sigma > 0:
Njumps = np.floor(alpha / sigma / gamma(1. - sigma) * T ** (-sigma))
else:
Njumps = np.floor(-alpha * np.log(T))
else:
if T <= 0:
raise ValueError("Threshold T must be strictly positive")
if sigma > 1e-3:
Njumps = np.floor(alpha / sigma / gamma(1. - sigma) * T ** (-sigma))
else:
Njumps = np.floor(-alpha * np.log(T))
if Njumps > 1e7:
raise Warning("Expected number of jumps = %d" % Njumps)
# Adaptive thinning strategy
N = np.zeros(int(np.ceil(Njumps + 3 * np.sqrt(Njumps))))
k = 0
t = T
count = 0
while True:
e = -np.log(np.random.random()) # Sample exponential random variable of unit rate
if e > W(t, np.inf, alpha, sigma, tau):
N = N[0:k]
return N, T
else:
t_new = inv_W(t, e, alpha, sigma, tau)
if tau == 0 or np.log(np.random.random()) < (-1. - sigma) * np.log(t_new / t):
# if tau>0, adaptive thinning - otherwise accept always
if k > len(N):
N = np.append(N, np.zeros(Njumps))
N[k] = t_new
k += 1
t = t_new
count += 1
if count > 10e8:
# If too many computation, we lower the threshold T and rerun
T /= 10.
N, T = GGPrnd(alpha, sigma, tau, T)
return N, T
| mit | 3,136,497,266,407,477,000 | 33.211009 | 119 | 0.53875 | false |
BrandonNowlin/RebirthItemTracker | src/view_controls/overlay.py | 1 | 2633 | """
This module deals with everything related to the overlay text generated,
as well as formating how to display stats
"""
from game_objects.item import ItemInfo
class Overlay(object):
"""The main class to handle output to overlay text files"""
def __init__(self, prefix, tracker_state):
self.state = tracker_state
self.prefix = prefix
@staticmethod
def format_value(value):
"""Format a float value for displaying"""
# NOTE this is not only used in this class
# Round to 2 decimal places then ignore trailing zeros and trailing periods
# Doing just 'rstrip("0.")' breaks on "0.00"
display = format(value, ".2f").rstrip("0").rstrip(".")
# For example, set "0.6" to ".6"
if abs(value) < 1 and value != 0:
display = display.lstrip("0")
if value > -0.00001:
display = "+" + display
return display
@staticmethod
def format_transform(transform_set):
"""Format a transform_set for displaying"""
# NOTE this is not only used in this class
if len(transform_set) >= 3:
return "yes"
else:
return str(len(transform_set))
def update_stats(self, stat_list=None, transform_list=None):
"""
Update file content for a subset (or all) the player's stats.
stat_list provide the subset of stats to update, if None it will update everything
"""
if stat_list is None:
stat_list = ItemInfo.stat_list
for stat in stat_list:
display = Overlay.format_value(self.state.player_stats[stat])
with open(self.prefix + "overlay text/" + stat + ".txt", "w+") as sfile:
sfile.write(display)
if transform_list is None:
transform_list = ItemInfo.transform_list
for transform in transform_list:
display = Overlay.format_transform(self.state.player_transforms[transform])
with open(self.prefix + "overlay text/" + transform + ".txt", "w+") as sfile:
sfile.write(display)
def update_last_item_description(self):
"""Update the overlay file for item pickup description"""
item = self.state.last_item
desc = item.info.name
desc += ": " + item.generate_item_description()
with open(self.prefix + "overlay text/itemInfo.txt", "w+") as sfile:
sfile.write(desc)
def update_seed(self):
"""Update the overlay file the seed"""
with open(self.prefix + "overlay text/seed.txt", "w+") as sfile:
sfile.write(self.state.seed)
| bsd-2-clause | 2,913,179,788,774,310,000 | 37.15942 | 90 | 0.602735 | false |
icgc-dcc/SONG | song-python-sdk/overture_song/entities.py | 1 | 9524 | import json
from typing import Any, Type
from dataclasses import dataclass
from overture_song.validation import Validatable
from overture_song.utils import Builder, default_value
from typing import List
from dataclasses import is_dataclass, asdict
from overture_song.utils import check_type, check_state
class Entity(object):
def to_json(self):
return json.dumps(self.to_dict(), indent=4)
def to_dict(self):
if is_dataclass(self):
return asdict(self)
else:
raise NotImplemented("not implemented for non-dataclass object")
def __str__(self):
return self.to_json()
@dataclass(frozen=False)
class Metadata(Entity):
info: dict = None
def __post_init__(self):
self.info = {}
def set_info(self, key: str, value: Any):
self.info[key] = value
def add_info(self, data: dict):
if data is None:
return
self.info.update(data)
@dataclass(frozen=False)
class Study(Metadata, Validatable):
studyId: str = None
name: str = None
organization: str = None
description: str = None
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def create(cls, studyId, name=None, description=None, organization=None):
s = Study()
s.studyId = studyId
s.name = name
s.description = description
s.organization = organization
return s
@classmethod
def create_from_raw(cls, study_obj):
return Study.create(
study_obj.studyId,
name=study_obj.name,
description=study_obj.description,
organization=study_obj.organization)
@dataclass(frozen=False)
class File(Metadata, Validatable):
objectId: str = None
analysisId: str = None
fileName: str = None
studyId: str = None
fileSize: int = -1
fileType: str = None
fileMd5sum: str = None
fileAccess: str = None
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def create(cls, fileName, fileSize, fileType, fileMd5sum,
fileAccess, studyId=None, analysisId=None, objectId=None, info=None):
f = File()
f.objectId = objectId
f.analysisId = analysisId
f.studyId = studyId
f.fileType = fileType
f.fileSize = fileSize
f.info = default_value(info, {})
f.fileMd5sum = fileMd5sum
f.fileAccess = fileAccess
f.fileName = fileName
return f
@dataclass(frozen=False)
class Sample(Metadata, Validatable):
sampleId: str = None
specimenId: str = None
sampleSubmitterId: str = None
sampleType: str = None
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def create(cls, specimenId, sampleSubmitterId,
sampleType, sampleId=None, info=None):
s = Sample()
s.info = default_value(info, {})
s.specimenId = specimenId
s.sampleType = sampleType
s.sampleSubmitterId = sampleSubmitterId
s.sampleId = sampleId
return s
@dataclass(frozen=False)
class Specimen(Metadata, Validatable):
specimenId: str = None
donorId: str = None
specimenSubmitterId: str = None
specimenClass: str = None
specimenType: str = None
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def create(cls, donorId, specimenSubmitterId, specimenClass, specimenType,
specimenId=None, info=None):
s = Specimen()
s.info = default_value(info, {})
s.specimenId = specimenId
s.donorId = donorId
s.specimenType = specimenType
s.specimenClass = specimenClass
s.specimenSubmitterId = specimenSubmitterId
return s
@dataclass(frozen=False)
class Donor(Metadata, Validatable):
donorId: str = None
donorSubmitterId: str = None
studyId: str = None
donorGender: str = None
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def create(cls, donorSubmitterId, studyId, donorGender, donorId=None, info=None):
d = Donor()
d.donorId = donorId
d.info = default_value(info, {})
d.studyId = studyId
d.donorSubmitterId = donorSubmitterId
d.donorGender = donorGender
return d
@dataclass(frozen=False)
class CompositeEntity(Sample):
specimen: Type[Specimen] = None
donor: Type[Donor] = None
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def base_on_sample(cls, sample):
s = CompositeEntity()
s.sampleId = sample.sampleId
s.sampleSubmitterId = sample.sampleSubmitterId
s.sampleType = sample.sampleType
s.info = sample.info
s.specimenId = sample.specimenId
return s
@classmethod
def create(cls, donor, specimen, sample):
c = CompositeEntity.base_on_sample(sample)
check_type(donor, Donor)
check_type(specimen, Specimen)
c.donor = donor
c.specimen = specimen
return c
@dataclass(frozen=False)
class Experiment(Metadata):
pass
@dataclass(frozen=False)
class VariantCall(Experiment, Validatable):
analysisId: str = None
variantCallingTool: str = None
matchedNormalSampleSubmitterId: str = None
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def create(cls, variantCallingTool, matchedNormalSampleSubmitterId, analysisId=None):
s = VariantCall()
s.analysisId = analysisId
s.variantCallingTool = variantCallingTool
s.matchedNormalSampleSubmitterId = matchedNormalSampleSubmitterId
return s
@dataclass(frozen=False)
class SequencingRead(Experiment, Validatable):
analysisId: str = None
aligned: bool = None
alignmentTool: str = None
insertSize: int = None
libraryStrategy: str = None
pairedEnd: bool = None
referenceGenome: str = None
@classmethod
def builder(cls):
return Builder(Analysis)
def validate(self):
raise NotImplemented("not implemented")
@classmethod
def create(cls, aligned, alignmentTool, insertSize,
libraryStrategy, pairedEnd, referenceGenome, analysisId=None):
s = SequencingRead()
s.alignmentTool = alignmentTool
s.aligned = aligned
s.analysisId = analysisId
s.libraryStrategy = libraryStrategy
s.insertSize = insertSize
s.pairedEnd = pairedEnd
s.referenceGenome = referenceGenome
return s
@dataclass(frozen=False)
class Analysis(Entity):
analysisId: str = None
study: str = None
analysisState: str = "UNPUBLISHED"
# TODO: add typing to this. should be a list of type Sample
sample: List[CompositeEntity] = None
# TODO: add typing to this. should be a list of type File
file: List[File] = None
def __post_init__(self):
self.sample = []
self.file = []
@classmethod
def builder(cls):
return Builder(Analysis)
@classmethod
def from_json(cls, json_string):
pass
@dataclass(frozen=False)
class SequencingReadAnalysis(Analysis, Validatable):
analysisType: str = "sequencingRead"
# TODO: add typing to this. should be a list of type File
experiment: Type[SequencingRead] = None
@classmethod
def create(cls, experiment, sample, file, analysisId=None, study=None, analysisState="UNPUBLISHED", info=None):
check_type(experiment, SequencingRead)
check_state(sample is not None and isinstance(sample, list) and len(sample) > 0,
"Atleast one sample must be defined")
check_state(file is not None and isinstance(file, list) and len(file) > 0,
"Atleast one file must be defined")
for s in sample:
check_type(s, CompositeEntity)
for f in file:
check_type(f, File)
s = SequencingReadAnalysis()
s.sample = sample
s.file = file
s.info = default_value(info, {})
s.experiment = experiment
s.analysisId = analysisId
s.study = study
s.analysisState = analysisState
return s
def validate(self):
raise NotImplemented("not implemented")
@dataclass(frozen=False)
class VariantCallAnalysis(Analysis, Validatable):
analysisType: str = 'variantCall'
# TODO: add typing to this. should be a list of type File
experiment: Type[VariantCall] = None
@classmethod
def create(cls, experiment, sample, file, analysisId=None, study=None, analysisState="UNPUBLISHED", info=None):
check_type(experiment, VariantCall)
check_state(sample is not None and isinstance(sample, list) and len(sample) > 0,
"Atleast one sample must be defined")
check_state(file is not None and isinstance(file, list) and len(file) > 0,
"Atleast one file must be defined")
for s in sample:
check_type(s, CompositeEntity)
for f in file:
check_type(f, File)
s = VariantCallAnalysis()
s.experiment = experiment
s.analysisId = analysisId
s.study = study
s.analysisState = analysisState
s.sample = sample
s.file = file
s.info = default_value(info, {})
return s
def validate(self):
raise NotImplemented("not implemented")
| gpl-3.0 | 3,801,246,344,196,846,000 | 27.011765 | 115 | 0.640487 | false |
richardclegg/multiuservideostream | streamsim/src/streamOptions.py | 1 | 11399 | #!/usr/bin/env python
import json
import sys
import demandmodel.demandModel as demandModel
import networkmodel.networkModel as networkModel
import qoemodel.qoeModel as qoeModel
import routemodel.routeModel as routeModel
import servermodel.serverModel as serverModel
import sessionmodel.sessionModel as sessionModel
import outputmodel.outputModel as outputModel
class streamOptions(object):
def __init__(self):
"""Initialise the streamOptions class which contains
the information read from the configuration file for
the simulation"""
self.outputFile= None
self.routeMod= None
self.sessionMod= None
self.netMod= None
self.demMod= None
self.qoeMod= None
self.serverMod= None
self.outputs= []
self.simDays= 0
def readJson(self,fName):
"""Read a JSON file containing the options for the
file and read any subsidiary files included in that
file"""
try:
f= open(fName)
except:
print >> sys.stderr,'Cannot open',fName
return False
try:
js=json.load(f)
except ValueError as e:
print >> sys.stderr, 'JSON parse error in',fName
print >> sys.stderr, e
sys.exit()
f.close()
try:
outPart= js.pop('output')
except:
outPart= None
try:
sessModel= js.pop('session_model')
dModel= js.pop('demand_model')
nModel= js.pop('network_model')
rModel= js.pop('route_model')
qModel= js.pop('qoe_model')
svrModel= js.pop('server_model')
outputs= js.pop('output_models')
self.simDays= js.pop('simulation_days')
except ValueError as e:
print >> sys.stderr, 'JSON file',fName, \
'must contain stream_model, route_model' \
'demand_model, network_model and simulation_days'
return False
if type(self.simDays) != int:
print >> sys.stderr, 'JSON file',fName, \
'must specify simulation_days as integer'
return False
if not self.checkJSEmpty(js,fName):
return False
try:
if outPart != None:
self.parseOutPart(outPart,fName)
self.parseSessionModel(sessModel,fName)
self.parseDemandModel(dModel,fName)
self.parseNetworkModel(nModel,fName)
self.parseRouteModel(rModel,fName)
self.parseQoeModel(qModel,fName)
self.parseServerModel(svrModel,fName)
for o in outputs:
self.outputs.append(self.parseOutputModel(o,fName))
except ValueError as e:
return False
return True
def checkJSEmpty(self,js,fName):
if (len(js) != 0):
print >> sys.stderr, 'JSON file',fName, \
'contains unrecognised keys',js.keys()
return False
return True
def parseOutPart(self,js,fName):
""" Parse the output part of the stream model JSON config"""
try:
self.outputFile= js.pop('file')
except:
pass
if not self.checkJSEmpty(js,fName):
print >> sys.stderr,"JSON contains unused tokens", js, \
"in file",fName
raise ValueError
def strToClassInstance(self,classStr,classType):
"""Convert string to a class instance"""
try:
(modName,_,className)= classStr.rpartition('.')
newmodule= __import__(modName, fromlist=[''])
objClass= getattr(newmodule,className)
except AttributeError as e:
print >> sys.stderr, "Making ",classStr,"into class",classType, \
"Attribute Error", e
raise ValueError("%s must be qualified class." % classStr)
except ImportError as e:
print >> sys.stderr, "Making ",classStr,"into class",classType, \
"Attribute Error", e
raise ValueError("Cannot find class %s module name %s to import"
% (className,modName))
obj= objClass()
if isinstance(obj,classType):
return obj
raise ValueError("%s is not a valid class of type sessionModel." % classStr)
def parseSessionModel(self,js,fName):
""" Parse the session Model part of the stream model
JSON config"""
try:
modelStr= js.pop('type')
except:
print >> sys.stderr,'JSON file',fName, \
'must contain model_type'
try:
self.sessionMod= self.strToClassInstance(modelStr,
sessionModel.sessionModel)
self.sessionMod.parseJSON(js,fName)
except ValueError as e:
print >> sys.stderr,'JSON file',fName, \
'has error with type in session_model'
print >> sys.stderr, e
raise e
if not self.checkJSEmpty(js,fName):
print >> sys.stderr,"JSON contains unused tokens", js, \
"in file",fName
raise ValueError
def parseDemandModel(self,js,fName):
""" Parse the demand Model part of the stream model
JSON config"""
try:
demModType= js.pop('type')
except ValueError as e:
print >> sys.stderr, "demand_model in JSON must contain" \
"type in JSON ",fName
raise e
try:
self.demMod= self.strToClassInstance(demModType,
demandModel.demandModel)
except ValueError as e:
print >> sys.stderr,"JSON in demand_model has error with " \
"type in",fName
print >> sys.stderr, e
raise e
try:
self.demMod.parseJSON(js,fName)
except ValueError as e:
print >> sys.stderr, "Parsing error with JSON in",\
"demand_model in",fName
print >> sys.stderr, "Error given:",e
raise e
if not self.checkJSEmpty(js,fName):
print >> sys.stderr,"JSON contains unused tokens", js, \
"in file",fName
raise ValueError
def parseNetworkModel(self, js, fName):
"""Parse the network model from the JSON"""
try:
netModType= js.pop('type')
except Exception as e:
print >> sys.stderr, "network_model in JSON must contain" \
"type in JSON ",fName
raise e
try:
self.netMod= self.strToClassInstance(netModType,
networkModel.networkModel)
except Exception as e:
print >> sys.stderr,"JSON in network_model has error with" \
"type in",fName
print >> sys.stderr, e
raise e
try:
self.netMod.parseJSON(js,fName)
except ValueError as e:
print >> sys.stderr, "Parsing error with JSON in ",\
"network_model in",fName
print >> sys.stderr, e
raise e
if not self.checkJSEmpty(js,fName):
print >> sys.stderr,"JSON contains unused tokens", js, \
"in file",fName
raise ValueError
def parseRouteModel(self, js, fName):
"""Parse the route model from the JSON"""
try:
routeModType= js.pop('type')
except Exception as e:
print >> sys.stderr, "route_model in JSON must contain" \
"type in JSON ",fName
raise e
try:
self.routeMod= self.strToClassInstance(routeModType,
routeModel.routeModel)
except Exception as e:
print >> sys.stderr,"JSON in route_model has error with" \
"type in",fName
print >> sys.stderr, e
raise e
try:
self.routeMod.parseJSON(js,fName)
except ValueError as e:
print >> sys.stderr, "Parsing error with JSON in ",\
"route_model in",fName
print >> sys.stderr, e
raise e
if not self.checkJSEmpty(js,fName):
print >> sys.stderr,"JSON contains unused tokens", js, \
"in file",fName
raise ValueError
def parseQoeModel(self, js, fName):
""" Parse the model for user quality of experience from the
JSON config input"""
try:
qoeModType= js.pop('type')
except Exception as e:
print >> sys.stderr, "qoe_model in JSON must contain" \
"type in JSON ",fName
raise e
try:
self.qoeMod= self.strToClassInstance(qoeModType,
qoeModel.qoeModel)
except ValueError as e:
print >> sys.stderr,"JSON in qoe_model has error with", \
"type in",fName
print >> sys.stderr, e
raise e
try:
self.qoeMod.parseJSON(js,fName)
except ValueError as e:
print >> sys.stderr, "Parsing error with JSON in ",\
"qoe_model in",fName
print >> sys.stderr, e
raise e
if not self.checkJSEmpty(js,fName):
print >> sys.stderr,"JSON contains unused tokens", js, \
"in file",fName
raise ValueError
def parseServerModel(self,js, fName):
"""Parse the model which learns about and assigns servers"""
try:
serverModType= js.pop('type')
except Exception as e:
print >> sys.stderr, "server_model in JSON must contain" \
"type in JSON ",fName
raise e
try:
self.serverMod= self.strToClassInstance(serverModType,
serverModel.serverModel)
except ValueError as e:
print >> sys.stderr,"JSON in server_model has error with", \
"type in",fName
print >> sys.stderr, e
raise e
try:
self.serverMod.parseJSON(js,fName)
except ValueError as e:
print >> sys.stderr, "Parsing error with JSON in ",\
"server_model in",fName
print >> sys.stderr, e
raise e
if not self.checkJSEmpty(js,fName):
print >> sys.stderr,"JSON contains unused tokens", js, \
"in server_model in file",fName
raise ValueError
def parseOutputModel(self, js, fName):
"""Parse one of the models which gives output"""
try:
omType= js.pop('type')
except Exception as e:
print >> sys.stderr, "Every instance of output_models in JSON must contain" \
"type ",fName
raise e
try:
outputmod= self.strToClassInstance(omType,
outputModel.outputModel)
except ValueError as e:
print >> sys.stderr,"JSON in server_model has error with", \
"type in",fName
print >> sys.stderr, e
raise e
try:
outputmod.parseJSON(js,fName)
except ValueError as e:
print >> sys.stderr, "Parsing error with JSON in",\
"output_models in",fName
print >> sys.stderr, "Error given:",e
raise e
return outputmod
| mpl-2.0 | 8,610,475,788,309,808,000 | 35.072785 | 89 | 0.547855 | false |
jarus/git-wcount-diff | git_wcount_diff.py | 1 | 1753 | import sys
import subprocess
import re
files_re = re.compile(r"diff --git a/(.+?) b/.+?")
additions_re = re.compile(r"{\+(.*?)\+}")
deletions_re = re.compile(r"\[-(.*?)-\]")
word_re = re.compile(r"\S*")
def count(regex, source):
words = 0
characters = 0
for match in regex.findall(source):
for word in word_re.findall(match.decode('utf8')):
if len(word) == 0:
continue
words += 1
characters += len(word)
return words, characters
def analyse_file(filename, rev_1, rev_2):
git_diff = subprocess.Popen(
'git diff --word-diff %s %s -- "%s"' % (rev_1, rev_2, filename),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
stdout, stderr = git_diff.communicate()
if git_diff.returncode > 0:
print stderr
sys.exit(2)
return count(additions_re, stdout), count(deletions_re, stdout)
def main():
if len(sys.argv) < 3:
print "Usage: git-wcount-diff <commit> <commit>"
sys.exit(1)
git_diff = subprocess.Popen(
"git diff %s %s --name-only" % (sys.argv[1], sys.argv[2]),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
shell=True
)
stdout, stderr = git_diff.communicate()
if git_diff.returncode > 0:
print stderr
sys.exit(2)
files = {}
for git_file in stdout.splitlines():
files[git_file] = analyse_file(git_file, sys.argv[1], sys.argv[2])
for filename, (additions, deletions) in files.items():
print "File: %s" % filename
print " - Additions: %s Words %s Characters" % additions
print " - Deletions: %s Words %s Characters" % deletions
if __name__ == '__main__':
main()
| bsd-3-clause | -7,602,571,264,804,953,000 | 26.825397 | 74 | 0.572162 | false |
Royal-Society-of-New-Zealand/NZ-ORCID-Hub | orcid_api_v3/models/research_resource_group_v30_rc2.py | 1 | 5775 | # coding: utf-8
"""
ORCID Member
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen) # noqa: E501
OpenAPI spec version: Latest
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from orcid_api_v3.models.external_i_ds_v30_rc2 import ExternalIDsV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.last_modified_date_v30_rc2 import LastModifiedDateV30Rc2 # noqa: F401,E501
from orcid_api_v3.models.research_resource_summary_v30_rc2 import ResearchResourceSummaryV30Rc2 # noqa: F401,E501
class ResearchResourceGroupV30Rc2(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'last_modified_date': 'LastModifiedDateV30Rc2',
'external_ids': 'ExternalIDsV30Rc2',
'research_resource_summary': 'list[ResearchResourceSummaryV30Rc2]'
}
attribute_map = {
'last_modified_date': 'last-modified-date',
'external_ids': 'external-ids',
'research_resource_summary': 'research-resource-summary'
}
def __init__(self, last_modified_date=None, external_ids=None, research_resource_summary=None): # noqa: E501
"""ResearchResourceGroupV30Rc2 - a model defined in Swagger""" # noqa: E501
self._last_modified_date = None
self._external_ids = None
self._research_resource_summary = None
self.discriminator = None
if last_modified_date is not None:
self.last_modified_date = last_modified_date
if external_ids is not None:
self.external_ids = external_ids
if research_resource_summary is not None:
self.research_resource_summary = research_resource_summary
@property
def last_modified_date(self):
"""Gets the last_modified_date of this ResearchResourceGroupV30Rc2. # noqa: E501
:return: The last_modified_date of this ResearchResourceGroupV30Rc2. # noqa: E501
:rtype: LastModifiedDateV30Rc2
"""
return self._last_modified_date
@last_modified_date.setter
def last_modified_date(self, last_modified_date):
"""Sets the last_modified_date of this ResearchResourceGroupV30Rc2.
:param last_modified_date: The last_modified_date of this ResearchResourceGroupV30Rc2. # noqa: E501
:type: LastModifiedDateV30Rc2
"""
self._last_modified_date = last_modified_date
@property
def external_ids(self):
"""Gets the external_ids of this ResearchResourceGroupV30Rc2. # noqa: E501
:return: The external_ids of this ResearchResourceGroupV30Rc2. # noqa: E501
:rtype: ExternalIDsV30Rc2
"""
return self._external_ids
@external_ids.setter
def external_ids(self, external_ids):
"""Sets the external_ids of this ResearchResourceGroupV30Rc2.
:param external_ids: The external_ids of this ResearchResourceGroupV30Rc2. # noqa: E501
:type: ExternalIDsV30Rc2
"""
self._external_ids = external_ids
@property
def research_resource_summary(self):
"""Gets the research_resource_summary of this ResearchResourceGroupV30Rc2. # noqa: E501
:return: The research_resource_summary of this ResearchResourceGroupV30Rc2. # noqa: E501
:rtype: list[ResearchResourceSummaryV30Rc2]
"""
return self._research_resource_summary
@research_resource_summary.setter
def research_resource_summary(self, research_resource_summary):
"""Sets the research_resource_summary of this ResearchResourceGroupV30Rc2.
:param research_resource_summary: The research_resource_summary of this ResearchResourceGroupV30Rc2. # noqa: E501
:type: list[ResearchResourceSummaryV30Rc2]
"""
self._research_resource_summary = research_resource_summary
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ResearchResourceGroupV30Rc2, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ResearchResourceGroupV30Rc2):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| mit | 6,240,323,661,696,474,000 | 33.789157 | 122 | 0.629264 | false |
gvpavlov/Insaniquarium | insaniquarium/gui/aquarium.py | 1 | 5488 | import os
import sys
from PyQt5.QtWidgets import QApplication, QWidget
from PyQt5 import QtCore
from PyQt5 import QtGui
current_directory = os.path.dirname(os.path.abspath(__file__))
parent_directory = os.path.abspath(os.path.join(current_directory, os.pardir))
resource_directory = os.path.join(current_directory, "resources")
sys.path.append(os.path.join(parent_directory, "core"))
from game import Game
from unit import Directions
class Aquarium(QWidget):
score_changed = QtCore.pyqtSignal()
def __init__(self, parent):
super(Aquarium, self).__init__()
self.game = Game((self.size().width(), self.size().height()))
self.score = self.game.score
self.score_changed.emit()
self.paused = False
self.load_pictures()
self.timer = QtCore.QTimer(self)
self.timer.timeout.connect(self.action)
self.timer.start(40)
def resizeEvent(self, event):
self.game.constrain((event.size().width(), event.size().height()))
def paintEvent(self, event):
canvas = QtGui.QPainter()
canvas.begin(self)
canvas.setPen(QtCore.Qt.NoPen)
canvas.drawPixmap(0, 0, self.background.scaled(self.size().width(),
self.size().height()))
for alien in self.game.aliens:
self.draw_alien(canvas, alien)
for fish in self.game.fishes:
self.draw_fish(canvas, fish)
for coin in self.game.coins:
self.draw_coin(canvas, coin)
for food in self.game.food:
self.draw_food(canvas, food)
@QtCore.pyqtSlot()
def spawn_fish(self):
self.game.spawn_fish()
@QtCore.pyqtSlot()
def upgrade_weapon(self):
self.game.upgrade_weapon()
@QtCore.pyqtSlot()
def upgrade_food(self):
self.game.upgrade_food()
@QtCore.pyqtSlot()
def pause(self):
self.paused = True
@QtCore.pyqtSlot()
def unpause(self):
self.paused = False
def mousePressEvent(self, event):
if event.button() == QtCore.Qt.LeftButton:
self.game.mouse_press(event.x(), event.y())
def load_pictures(self):
# Alien
self.alien_images = {
'lion': QtGui.QPixmap(
os.path.join(resource_directory, "alien.png")),
'blue': QtGui.QPixmap(
os.path.join(resource_directory, "alien2.png"))}
self.alien_images_mirrored = {}.fromkeys(self.alien_images)
self.fill_mirrored(self.alien_images, self.alien_images_mirrored)
# Fish
self.fish_images = {
'swim': QtGui.QPixmap(
os.path.join(resource_directory, "fish_swim.png")),
'eat': QtGui.QPixmap(
os.path.join(resource_directory, "fish_eat.png")),
'turn': QtGui.QPixmap(
os.path.join(resource_directory, "fish_turn.png")),
'hungry_die': QtGui.QPixmap(
os.path.join(resource_directory, "fish_die.png")),
'hungry_swim': QtGui.QPixmap(
os.path.join(resource_directory, "hungry_swim.png")),
'hungry_eat': QtGui.QPixmap(
os.path.join(resource_directory, "hungry_eat.png")),
'hungry_turn': QtGui.QPixmap(
os.path.join(resource_directory, "hungry_turn.png"))}
self.fish_images_mirrored = {}.fromkeys(self.fish_images)
self.fill_mirrored(self.fish_images, self.fish_images_mirrored)
# Food
self.food_image = QtGui.QPixmap(
os.path.join(resource_directory, "food.png"))
# Coin
self.coin_image = QtGui.QPixmap(
os.path.join(resource_directory, "coin.png"))
# Background
self.background = QtGui.QPixmap(
os.path.join(resource_directory, "background.png"))
def fill_mirrored(self, normal_images, mirrored):
for key, value in normal_images.items():
mirror = value.toImage()
mirror = mirror.mirrored(True, False)
mirrored[key] = QtGui.QPixmap().fromImage(mirror)
def action(self):
""" Incorporates all objects' actions and calls the repaint event."""
if not self.paused:
self.game.actions()
if self.score != self.game.score:
self.score = self.game.score
self.score_changed.emit()
self.repaint()
def draw_alien(self, canvas, alien):
if alien.mirrored:
image = self.alien_images_mirrored[alien.kind]
else:
image = self.alien_images[alien.kind]
if alien.state == 'swim':
state = 0
else:
state = 160
canvas.drawPixmap(alien.x, alien.y, image,
alien.frame, state, 160, 160)
def draw_fish(self, canvas, fish):
state = fish.state
if fish.hungry:
state = 'hungry_' + state
if fish.mirrored:
image = self.fish_images_mirrored[state]
else:
image = self.fish_images[state]
canvas.drawPixmap(fish.x, fish.y, image,
fish.frame, fish.size * 80, 80, 80)
def draw_coin(self, canvas, coin):
canvas.drawPixmap(coin.x, coin.y, self.coin_image,
coin.frame, coin.worth * 72, 72, 72)
def draw_food(self, canvas, food):
canvas.drawPixmap(food.x, food.y, self.food_image,
food.frame, 0, 40, 40)
| gpl-2.0 | -6,743,611,437,689,086,000 | 33.515723 | 78 | 0.579993 | false |
OSGeoLabBp/tutorials | english/img_processing/code/image_equalize.py | 1 | 1689 | #!/usr/bin/env python3
# -*- coding: UTF-8 -*-
"""
Equalize images to the histogram of a reference image
Based on https://www.pyimagesearch.com/2021/02/08/histogram-matching-with-opencv-scikit-image-and-python/
"""
import argparse
import os
import sys
from skimage import exposure
import cv2
# command line parameters
parser = argparse.ArgumentParser()
parser.add_argument('names', metavar='file_names', type=str, nargs='*',
help='pathes to image files to process')
parser.add_argument("-r", "--reference", required=True,
help="path to the input reference image")
parser.add_argument('--nowrite', action="store_true",
help='do not write equalized images to disk')
parser.add_argument('--debug', action="store_true",
help='show images on screen')
args = parser.parse_args()
if not args.names:
print("No input images given")
parser.print_help()
sys.exit(0)
# load the reference images
if args.debug:
print("Loading reference image...")
ref = cv2.imread(args.reference)
# determine if we are performing multichannel histogram matching
multi = ref.shape[-1] > 1
for fn in args.names:
if args.debug:
print("Performing histogram matching for {}...".format(fn))
src = cv2.imread(fn)
matched = exposure.match_histograms(src, ref, multichannel=multi)
if not args.nowrite:
spl = os.path.splitext(fn)
mn = spl[0] + "_matched" + spl[1]
if args.debug:
print("Writing matched image...")
cv2.imwrite(mn, matched)
if args.debug:
# show the output images
cv2.imshow("Source", src)
cv2.imshow("Reference", ref)
cv2.imshow("Matched", matched)
cv2.waitKey(0)
| cc0-1.0 | -2,569,869,299,229,796,400 | 32.78 | 109 | 0.671995 | false |
miaoski/bsideslv-plc-home | dummy.py | 1 | 1293 | # -*- coding: utf8 -*-
# Run ipython -i dummy.py if you don't want to run it on Raspberry Pi
from pymodbus.server.async import StartTcpServer
from pymodbus.datastore import ModbusSequentialDataBlock
from pymodbus.datastore import ModbusSlaveContext, ModbusServerContext
from identity import identity
import logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s %(message)s')
log = logging.getLogger()
def dump_store(a):
context = a[0]
address = 0x00
print "DI values:", context[0].store['d'].values[:20]
print "CO values:", context[0].store['c'].values[:20]
print "HR values:", context[0].store['h'].values[:10]
print "IR values:", context[0].store['i'].values[:10]
# Initialize ModBus Context
store = ModbusSlaveContext(
di = ModbusSequentialDataBlock(0, [0,0,0,1,1,1,1,1,1,0,1,0,1,1,1,0,0,1,1,1,1,1]),
co = ModbusSequentialDataBlock(0, [0,0,0,0,0,0,0,0,0,1,0,1,0,0,0,0,0,0,0,0,0,0]),
hr = ModbusSequentialDataBlock(0, [0,0,37,0,35,0,0] + [0] * 10),
ir = ModbusSequentialDataBlock(0, [0,0,85,0,0,0,0] + [0] * 10))
context = ModbusServerContext(slaves=store, single=True)
# Start loop
def run(ip='192.168.42.1', port=502):
StartTcpServer(context, identity=identity(), address=(ip, port))
print 'Type run() to StartTcpServer'
| gpl-2.0 | 5,917,105,770,169,918,000 | 35.942857 | 85 | 0.688322 | false |
IQSS/geoconnect | scripts/unmapped_row_test.py | 1 | 1633 | # ------------------------------
# Quick script to add insitutions and
# affiliate them with dataverse installations
#
# Only deletes redundant institutions to refresh their affiliation
# ------------------------------
import os, sys
from os.path import abspath, isdir, realpath, isfile
proj_paths = [abspath('../'), abspath('../geoconnect')]
sys.path += proj_paths
# ------------------------------
# This is so Django knows where to find stuff.
# ------------------------------
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "geoconnect.settings.local")
from gc_apps.geo_utils.msg_util import *
from gc_apps.gis_tabular.models import WorldMapJoinLayerInfo
from gc_apps.gis_tabular.unmapped_row_util import UnmatchedRowHelper
def check_unmatched(layer_info_md5):
msgt('check_unmatched')
wm_info = WorldMapJoinLayerInfo.objects.get(md5=layer_info_md5)
kwargs = dict(show_all_failed_rows=True)
unmatched_row_helper = UnmatchedRowHelper(wm_info, **kwargs)
if unmatched_row_helper.has_error:
msg('ERROR: %s' % unmatched_row_helper.error_message)
return
msgt('bad rows as list')
row_list = unmatched_row_helper.get_failed_rows_as_list()
if unmatched_row_helper.has_error:
msg('ERROR: %s' % unmatched_row_helper.error_message)
return
msg(row_list)
row_list_csv = unmatched_row_helper.get_failed_rows_as_csv()
if unmatched_row_helper.has_error:
msg('ERROR: %s' % unmatched_row_helper.error_message)
return
msg(row_list_csv)
if __name__ == '__main__':
tab_md5 = '1a77cebad8a249820f2c577392dae20a'
check_unmatched(tab_md5)
| apache-2.0 | 2,044,353,086,420,126,000 | 29.811321 | 76 | 0.650949 | false |
joferkington/mplstereonet | docs/conf.py | 1 | 9530 | # -*- coding: utf-8 -*-
#
# mplstereonet documentation build configuration file, created by
# sphinx-quickstart on Sun Jun 23 13:39:02 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys, os
import runpy
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Generate pages for examples
path = os.path.join(os.path.dirname('__file__'), 'generate_example_rst.py')
runpy.run_path(path, run_name='__main__')
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode',
'sphinx.ext.autosummary', 'numpydoc']
numpydoc_show_class_members = False
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'mplstereonet'
copyright = u'2013, Free Software Foundation'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.6'
# The full version, including alpha/beta/rc tags.
release = '0.6-dev'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build', 'setup.py']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'mplstereonetdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'mplstereonet.tex', u'mplstereonet Documentation',
u'Joe Kington', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'mplstereonet', u'mplstereonet Documentation',
[u'Joe Kington'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'mplstereonet', u'mplstereonet Documentation',
u'Joe Kington', 'mplstereonet', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# -- Options for Epub output ---------------------------------------------------
# Bibliographic Dublin Core info.
epub_title = u'mplstereonet'
epub_author = u'Joe Kington'
epub_publisher = u'Joe Kington'
epub_copyright = u'2013, Joe Kington'
# The language of the text. It defaults to the language option
# or en if the language is not set.
#epub_language = ''
# The scheme of the identifier. Typical schemes are ISBN or URL.
#epub_scheme = ''
# The unique identifier of the text. This can be a ISBN number
# or the project homepage.
#epub_identifier = ''
# A unique identification for the text.
#epub_uid = ''
# A tuple containing the cover image and cover page html template filenames.
#epub_cover = ()
# HTML files that should be inserted before the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_pre_files = []
# HTML files shat should be inserted after the pages created by sphinx.
# The format is a list of tuples containing the path and title.
#epub_post_files = []
# A list of files that should not be packed into the epub file.
#epub_exclude_files = []
# The depth of the table of contents in toc.ncx.
#epub_tocdepth = 3
# Allow duplicate toc entries.
#epub_tocdup = True
# Show __init__ docs
def skip(app, what, name, obj, skip, options):
if name == '__init__':
return False
return skip
def setup(app):
app.connect('autodoc-skip-member', skip)
| mit | 6,082,613,948,636,367,000 | 30.452145 | 80 | 0.701889 | false |
JonasT/miraclecrafter | src/miraclecrafterserver/onlinegame.py | 1 | 5858 |
"""
This file is part of the Miracle Crafter Server.
Miracle Crafter Server (C) 2014 The Miracle Crafter Team (see AUTHORS)
Miracle Crafter Server is free software: you can redistribute it and/or
modify it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or (at your
option) any later version.
Miracle Crafter Server is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Miracle Crafter Server.
If not, see <http://www.gnu.org/licenses/>.
In addition, the Miracle Crafter Server developers grant this exception:
Game code and content created with the Miracle Crafter Client as part
of your game shall be excepted from the GPL licensing of Miracle Crafter
Server. However, this exception doesn't cover any modifications to any of
the GPL-licensed files shipping with Miracle Crafter Server, or adding
new files to any of the folders containing GPL-licensed files of Miracle
Crafter Server, or game code attempting to modify Miracle Crafter Server's
behavior at runtime beyond using the regular game code interfaces for Lua
code which your game is supposed to use.
"""
import logging
from miraclecrafterserver.server import SimpleServer
from miraclecrafterserver.game import Game
from miraclecrafterserver.protocolmodule import get_module_instances, \
OnlineGameBaseModule
from miraclecrafterserver.accounts import login
class OnlineGame(Game):
def __init__(self, game_path, port, version_str):
super(OnlineGame, self).__init__(game_path)
self.connections = []
self.port = port
self.version_str = version_str
self.required_client_version = 1
module_instances = get_module_instances()
logging.info("Starting game with " + str(len(module_instances))\
+ " module instances available")
for module in module_instances.values():
if isinstance(module, OnlineGameBaseModule):
module.set_online_game_instance(self)
else:
module.set_game_instance(self)
def split_args(self, args_line):
if len(args_line) == 0:
return []
splitted_list = args_line.split(":", 1)
last_arg = ""
if len(splitted_list) == 2:
last_arg = splitted_list[1].strip()
splittable_args = splitted_list[0]
while splittable_args.find(" ") >= 0:
splittable_args = splittable_args.replace(" ", " ")
args = splittable_args.split(" ")
if len(args[0]) == 0:
del args[0]
if len(last_arg) > 0:
args.append(last_arg)
return args
def connection_has_data(self, connection, data):
data = data.strip()
if len(data) == 0:
return
module = data.split(" ", 1)[0]
data = data[len(module)+1:]
cmd = data.split(" ", 1)[0]
args = self.split_args(data[len(cmd)+1:])
if len(cmd) == 0:
connection.send("core error-empty-cmd")
return
if not module == "core":
module_instances = get_module_instances()
# forward to appropriate module:
if module in module_instances:
if not connection.connected_as_client:
if not isinstance(module_instances[module], \
OnlineGameBaseModule):
connection.send("core error-module-unavailable " +\
module)
return
self.process_cmd(connection, module, cmd, args)
return
self.process_cmd(connection.client, module, cmd, args)
return
# module not found:
connection.send("core error-invalid-module :" + module)
return
else:
if cmd == "quit":
connection.send("core confirm-quit :Bye!")
connection.close()
self.connections.remove(connection)
elif cmd == "ping":
if len(args) == 0:
connection.send("core error-missing-arguments core ping")
return
if len(args[0]) > 64:
connection.send("core error-invalid-argument core " +\
"ping 1 :excessive length")
return
connection.send("core pong :" + args[0])
else:
connection.send("core error-unknown-cmd core :" + cmd)
def server_reports_new_connection(self, connection):
connection.connected_as_client = False
connection.set_read_callback(lambda data:
self.connection_has_data(connection, data))
self.connections.append(connection)
connection.send("core hello-msg :Hello client! This is Miracle "\
"Crafter Server Version " + self.version_str)
connection.send("core version-info " + self.version_str + " :" + \
"unmodified")
connection.send("core available-server-protocol-extensions :" + \
"core")
connection.send("core required-client-protocol-extensions :" + \
"core")
connection.send("core required-client-version " +\
str(self.required_client_version))
def run(self):
self.server = SimpleServer(self.port, \
self.server_reports_new_connection)
frame_time = 1/10
while 1:
self.server.tick(frame_time)
| gpl-3.0 | 9,010,634,835,741,574,000 | 38.581081 | 79 | 0.602253 | false |
geoscixyz/em_examples | em_examples/FDEMpipe.py | 1 | 6739 | import numpy as np
import matplotlib.pyplot as plt
import scipy.io
import warnings
warnings.filterwarnings('ignore')
from ipywidgets import interactive, IntSlider, widget, FloatText, FloatSlider, Checkbox
def fempipeWidget(alpha, pipedepth):
respEW, respNS, X, Y = fempipe(alpha, pipedepth)
fig = plt.figure(figsize = (12, 9))
ax0 = plt.subplot2grid((2,2), (0,0))
ax1 = plt.subplot2grid((2,2), (0,1))
ax2 = plt.subplot2grid((2,2), (1,0), colspan=2)
dat0 = ax0.imshow(respEW.real*100, extent=[X.min(),X.max(),Y.min(),Y.max()])
dat1 = ax1.imshow(respNS.real*100, extent=[X.min(),X.max(),Y.min(),Y.max()])
cb0 = plt.colorbar(dat0, ax = ax0)
cb1 = plt.colorbar(dat1, ax = ax1)
ax0.set_title("In-phase EW boom (%)", fontsize = 12)
ax1.set_title("In-phase NS boom (%)", fontsize = 12)
ax0.set_xlabel("Easting (m)", fontsize = 12)
ax1.set_xlabel("Easting (m)", fontsize = 12)
ax0.set_ylabel("Northing (m)", fontsize = 12)
ax1.set_ylabel("Northing (m)", fontsize = 12)
ax0.plot(np.r_[0., 0.], np.r_[-10., 10.], 'k--', lw=2)
ax1.plot(np.r_[0., 0.], np.r_[-10., 10.], 'k--', lw=2)
ax2.plot(Y[:,20],respEW[:, 20].real, 'k.-')
ax2.plot(Y[:,20],respEW[:, 20].imag, 'k--')
ax2.plot(Y[:,20],respNS[:, 20].real, 'r.-')
ax2.plot(Y[:,20],respNS[:, 20].imag, 'r--')
ax2.legend(('In-phase EW boom', 'Out-of-phase EW boom', 'In-phase NS boom', 'Out-of-phase NS boom'),loc=4)
ax2.grid(True)
ax2.set_ylabel('Hs/Hp (%)', fontsize = 16)
ax2.set_xlabel('Northing (m)', fontsize = 16)
ax2.set_title('Northing profile line at Easting 0 m', fontsize = 16)
plt.tight_layout()
plt.show()
def fempipe(a, pipedepth):
"""
EOSC350 forward modeling of EM-31 responses with pipeline model
Only two adjustable parameters: alpha and depth of pipe below surface
Pipeline oriented W-E (many small loops lined up)
forward model EW ans NS boom configurations
Plot in-phase maps of EW and NS boom
Plot NS profile
"""
freq = 9800
L = 0.1
s = 3.6
R = 2*np.pi*freq*L/a
fa = (1j*a)/(1+1j*a)
tau = L/R
boomheight = 1.
Npipe = 20
xmax = 10.
npts = 100
pipeloc = np.c_[ np.linspace(-10,10,Npipe), np.zeros(Npipe), np.zeros(Npipe)-pipedepth]
pipeloc = np.vstack((pipeloc, pipeloc))
pipeangle1 = np.c_[np.zeros(Npipe)+90, np.zeros(Npipe)+0]
pipeangle2 = np.c_[np.zeros(Npipe)+90, np.zeros(Npipe)+90] #.. what's this?
pipeangle3 = np.c_[np.zeros(Npipe)+0, np.zeros(Npipe)+0]
pipeangle = np.vstack((pipeangle1, pipeangle3))
x = np.linspace(-xmax, xmax, num=npts)
y = x.copy()
X, Y = np.meshgrid(x, y)
XY = np.c_[X.flatten(), Y.flatten()]
loop1loc_NS = np.c_[XY[:,0], XY[:,1]-s/2, boomheight*np.ones(XY.shape[0])]
loop3loc_NS = np.c_[XY[:,0], XY[:,1]+s/2, boomheight*np.ones(XY.shape[0])]
loop1angle = np.c_[np.ones(XY.shape[0])*0., np.ones(XY.shape[0])*0.]
loop3angle = np.c_[np.ones(XY.shape[0])*0., np.ones(XY.shape[0])*0.]
loop1loc_EW = np.c_[XY[:,0]-s/2, XY[:,1], boomheight*np.ones(XY.shape[0])]
loop3loc_EW = np.c_[XY[:,0]+s/2, XY[:,1], boomheight*np.ones(XY.shape[0])]
respEW = 0j
respNS = 0j
for q in range(pipeloc.shape[0]):
loop2loc = np.c_[np.ones(XY.shape[0])*pipeloc[q,0], np.ones(XY.shape[0])*pipeloc[q,1], np.ones(XY.shape[0])*pipeloc[q,2]]
loop2angle = np.c_[np.ones(XY.shape[0])*pipeangle[q,0], np.ones(XY.shape[0])*pipeangle[q,1]]
respEW += HsHp(loop1loc_EW,loop1angle,loop2loc,loop2angle,loop3loc_EW,loop3angle,freq,L,R)
respNS += HsHp(loop1loc_NS,loop1angle,loop2loc,loop2angle,loop3loc_NS,loop3angle,freq,L,R)
return respEW.reshape((npts, npts)), respNS.reshape((npts, npts)), X, Y
def Lij(loopiloc,loopiangle,loopjloc,loopjangle):
"""
Calculate mnutual inductance of two loops (simplified to magnetic dipole)
SEG EM Volume II (Page 14): ... Lij as the amount of magnetic flux that
cuts circuit i due to a unit current in loop j.
Since we use magnetic dipole model here, the magnetic flux will be the
magnetic intensity B obtained by Biot-Savart Law.
Angles in degree
Inductance in T*m^2/A; Here the current and loop area are both unit.
"""
xi = loopiloc[:,0]
yi = loopiloc[:,1]
zi = loopiloc[:,2]
xj = loopjloc[:,0]
yj = loopjloc[:,1]
zj = loopjloc[:,2]
thetai = loopiangle[:,0]
alphai = loopiangle[:,1]
thetaj = loopjangle[:,0]
alphaj = loopjangle[:,1]
thetai = thetai/180 * np.pi # degtorad(thetai);
alphai = alphai/180 * np.pi # degtorad(alphai);
thetaj = thetaj/180 * np.pi # degtorad(thetaj);
alphaj = alphaj/180 * np.pi # degtorad(alphaj);
# http://en.wikipedia.org/wiki/Magnetic_moment#Magnetic_flux_density_due_to_an_arbitrary_oriented_dipole_moment_at_the_origin
# assume the dipole at origin, the observation is now at
x = xi - xj
y = yi - yj
z = zi - zj
# orthogonal decomposition of dipole moment
p = np.cos(thetaj); # vertical
n = np.sin(thetaj) * np.cos(alphaj) # y
m = np.sin(thetaj) * np.sin(alphaj) # x
Hx = ( 3.*(m*x+n*y+p*z)*x/((x**2+y**2+z**2)**(5./2)) - m/((x**2+y**2+z**2)**(3./2)) )/4./np.pi
Hy = ( 3.*(m*x+n*y+p*z)*y/((x**2+y**2+z**2)**(5./2)) - n/((x**2+y**2+z**2)**(3./2)) )/4./np.pi
Hz = ( 3.*(m*x+n*y+p*z)*z/((x**2+y**2+z**2)**(5./2)) - p/((x**2+y**2+z**2)**(3./2)) )/4./np.pi
H = np.c_[Hx, Hy, Hz]
# project B field to normal direction of loop i
L = H*np.c_[ np.sin(thetai)*np.sin(alphai), np.sin(thetai)*np.cos(alphai), np.cos(thetai)]
return L.sum(axis=1)
def HsHp(loop1loc,loop1angle,loop2loc,loop2angle,loop3loc,loop3angle, freq,L,R):
"""
EM response of 3-loop model
response = Hs/Hp = - (L12*L23/L22/L13) * (i*a/(1+i*a))
"""
a = 2. * np.pi * freq * L / R
L12 = L * Lij(loop1loc,loop1angle,loop2loc,loop2angle)
L23 = L * Lij(loop2loc,loop2angle,loop3loc,loop3angle)
L13 = Lij(loop1loc,loop1angle,loop3loc,loop3angle)
response = - (L12*L23/L13/L) * ( (1j*a)/(1+1j*a) )
return response
def interact_femPipe():
Q = interactive(fempipeWidget,
alpha = FloatSlider(min=0.1,max=5.,step=0.1,value=1., continuous_update=False),
pipedepth = FloatSlider(min=0.5,max=4.0,step=0.1,value=1.0, continuous_update=False))
return Q
if __name__ == '__main__':
a = 1.
pipedepth = 1.
respEW, respNS, X, Y = fempipe(a, pipedepth)
# print resp.shape
import matplotlib.pyplot as plt
fig, ax = plt.subplots(1,2, figsize = (12, 5))
ax[0].pcolor(X, Y, respEW.real, 40)
ax[1].pcolor(X, Y, respNS.real, 40)
plt.show()
| mit | -2,975,767,107,371,324,000 | 36.859551 | 129 | 0.601425 | false |
myles/mylesbraithwaite.org | source/_uploads/2016/05/20/itunes-playing/playing.py | 1 | 1806 | #!/usr/bin/env python3
import subprocess
def osascript(script):
"""
This is a bridge between Python and AppleScript using the `osascript`
comamnd line app.
"""
process = subprocess.run(['osascript', '-e', script],
stdout=subprocess.PIPE)
# Because the `subprocess.CompletedProcess` class returns a byte (followed
# by a new line), I have to clean it a little.
return process.stdout.decode('utf-8').strip()
def itunes(args):
"""This is so I don't have to repeat a bunch of code."""
script = 'tell application "iTunes" to {0} as string'
return osascript(script.format(args))
def is_running():
"""
Here we a checking to see if iTunes is currently running.
I'm doing this because if iTunes is closed I don't want to open it.
"""
output = osascript('application "iTunes" is running')
if output == 'true':
return True
else:
return False
def is_playing():
"""This function is to check if iTunes is currently playing music."""
output = itunes('player state')
if output == 'playing':
return True
else:
return False
def get_track():
"""This is the main function that get the currently playing track."""
track = {}
track['name'] = itunes('name of current track')
track['artist'] = itunes('artist of current track')
track['album'] = itunes('album of current track')
return track
def main():
if not is_running():
return None
# `is_running()` and `is_playing()` need to be run separately, if together
# than it will launch iTunes.
if is_playing():
print('iTunes is currently playing:')
print("{name} / {artist} / {album}".format(**get_track()))
if __name__ == "__main__":
main()
| cc0-1.0 | -5,790,147,332,706,279,000 | 23.405405 | 78 | 0.616279 | false |
jeffmacinnes/pyneal | utils/mkDummyMask.py | 1 | 2226 | """
Tool to quickly make a dummy mask with user-supplied dimensions
The resulting mask will be a rectangle (.25*xDim X .25*yDim) positioned in the
middle of the middle slice of the given volume dimensions
"""
import os
from os.path import join
import sys
import argparse
import nibabel as nib
import numpy as np
def mkDummyMask(dims, outputDir):
""" Make a dummy mask of given dims
Parameters
----------
dims : int list (length = 3)
[x, y, z] dimensions of the output mask
outputDir : string
full path to where the output mask will be saved
"""
assert len(dims) == 3, 'Too many dimensions specified!'
# volume dims
x,y,z = dims
print('mask dimensions: [{}, {}, {}]'.format(x,y,z))
# confirm output path is a real path
if not os.path.exists(outputDir):
print('Output path does not exist: {}'.format(outputDir))
sys.exit()
# make array of zeros
maskArray = np.zeros(shape=[x,y,z])
# make a square in the middle slice of 1s. this will be the mask
mask_sizeX = np.floor(x/4)
mask_sizeY = np.floor(y/4)
maskStartX = int(np.floor(x/2) - mask_sizeX/2)
maskEndX = int(maskStartX + mask_sizeX)
maskStartY = int(np.floor(y/2) - mask_sizeY/2)
maskEndY = int(maskStartY + mask_sizeY)
maskArray[maskStartX:maskEndX, maskStartY:maskEndY, int(np.floor(z/2))] = 1
# save as nib object
maskImage = nib.Nifti1Image(maskArray, affine=np.eye(4))
outputName = 'dummyMask_{}-{}-{}.nii.gz'.format(x,y,z)
outputPath = join(outputDir, outputName)
nib.save(maskImage, outputPath)
print('dummy mask saved as: {}'.format(outputPath))
if __name__ == '__main__':
# parse arguments
parser = argparse.ArgumentParser()
parser.add_argument('maskDims',
metavar='dim',
type=int,
nargs=3,
help='volume dims: x y z')
parser.add_argument('-o', '--outputDir',
default='.',
type=str,
help='output dir path for saving mask')
args = parser.parse_args()
print(args)
mkDummyMask(args.maskDims, args.outputDir)
| mit | -8,057,642,820,469,725,000 | 28.289474 | 79 | 0.602426 | false |
GreatLakesEnergy/sesh-dash-beta | seshdash/migrations/0006_auto_20170117_1613.py | 1 | 2776 | # -*- coding: utf-8 -*-
# Generated by Django 1.9.1 on 2017-01-17 14:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('seshdash', '0005_remove_report_sent_report_date'),
]
operations = [
migrations.CreateModel(
name='Sensor_Node',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('node_id', models.IntegerField(choices=[(19, 19), (20, 20), (21, 21), (22, 22), (23, 23), (24, 24), (25, 25), (26, 26), (27, 27), (28, 28), (29, 29)], default=0)),
('sensor_type', models.CharField(choices=[(b'Temperature Humidity', b'th'), (b'Power Voltage', b'tx'), (b'Ph Ethenoal', b'pe')], max_length=40)),
('index1', models.CharField(default=b'ac_power1', max_length=40)),
('index2', models.CharField(default=b'pv_production', max_length=40)),
('index3', models.CharField(default=b'consumption', max_length=40)),
('index4', models.CharField(default=b'grid_in', max_length=40)),
('index5', models.CharField(default=b'AC_Voltage_out', max_length=40)),
('index6', models.CharField(blank=True, max_length=40, null=True)),
('index7', models.CharField(blank=True, max_length=40, null=True)),
('index8', models.CharField(blank=True, max_length=40, null=True)),
('index9', models.CharField(blank=True, max_length=40, null=True)),
('index10', models.CharField(blank=True, max_length=40, null=True)),
('index11', models.CharField(blank=True, max_length=40, null=True)),
('index12', models.CharField(blank=True, max_length=40, null=True)),
('site', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='seshdash.Sesh_Site')),
],
),
migrations.RemoveField(
model_name='sensor_bmv',
name='site',
),
migrations.RemoveField(
model_name='sensor_emonpi',
name='site',
),
migrations.RemoveField(
model_name='sensor_emonth',
name='site',
),
migrations.RemoveField(
model_name='sensor_emontx',
name='site',
),
migrations.DeleteModel(
name='Sensor_BMV',
),
migrations.DeleteModel(
name='Sensor_EmonPi',
),
migrations.DeleteModel(
name='Sensor_EmonTh',
),
migrations.DeleteModel(
name='Sensor_EmonTx',
),
]
| mit | -3,634,531,187,939,472,000 | 41.707692 | 180 | 0.552954 | false |
renner/spacewalk | client/tools/rhnpush/connection.py | 1 | 10533 | #
# Copyright (c) 2008--2017 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import socket
import base64
import sys
# pylint: disable=F0401,E0611,W0632
from rhn import connections, rpclib
from spacewalk.common.usix import ListType, TupleType, IntType
from spacewalk.common.rhn_pkg import InvalidPackageError, package_from_filename
from spacewalk.common.usix import raise_with_tb
from rhnpush.utils import tupleify_urlparse
if sys.version_info[0] == 3:
from urllib.parse import splitport
from urllib.parse import urlparse
else:
from urlparse import urlparse
from urllib import splitport # pylint: disable=C0412
# pylint: disable=W0622
class ConnectionError(Exception):
pass
# pylint: disable=R0902
class BaseConnection:
def __init__(self, uri, proxy=None):
self._scheme, (self._host, self._port), self._path = parse_url(uri)[:3]
if proxy:
arr = rpclib.get_proxy_info(proxy)
self._proxy_host = arr[0]
self._proxy_port = arr[1]
self._proxy_username = arr[2]
self._proxy_password = arr[3]
else:
self._proxy_host = None
self._trusted_certs = None
self._connection = None
self._timeout = None
def set_timeout(self, timeout):
self._timeout = timeout
def get_connection(self):
if self._scheme not in ['http', 'https']:
raise ValueError("Unsupported scheme", self._scheme)
params = {}
if self._timeout is not None:
params['timeout'] = self._timeout
if self._proxy_host:
params.update({
'host': self._host,
'port': self._port,
'proxy': "%s:%s" % (self._proxy_host, self._proxy_port),
'username': self._proxy_username,
'password': self._proxy_password,
})
if self._scheme == 'http':
return connections.HTTPProxyConnection(**params)
params['trusted_certs'] = self._trusted_certs
return connections.HTTPSProxyConnection(**params)
else:
if self._scheme == 'http':
return connections.HTTPConnection(self._host, self._port, **params)
params['trusted_certs'] = self._trusted_certs
return connections.HTTPSConnection(self._host, self._port, **params)
def connect(self):
self._connection = self.get_connection()
self._connection.connect()
def putrequest(self, method, url=None, skip_host=0):
if url is None:
url = self._path
return self._connection.putrequest(method, url=url,
skip_host=skip_host)
def __getattr__(self, name):
return getattr(self._connection, name)
class PackageUpload:
header_prefix = "X-RHN-Upload"
user_agent = "rhn-package-upload"
def __init__(self, url, proxy=None):
self.connection = BaseConnection(url, proxy)
self.headers = {}
self.package_name = None
self.package_epoch = None
self.package_version = None
self.package_release = None
self.package_arch = None
self.checksum = None
self.checksum_type = None
self.nvra = None
self._resp_headers = None
self.packaging = None
self._response = None
def set_header(self, name, value):
if name not in self.headers:
vlist = self.headers[name] = []
else:
vlist = self.headers[name]
if not isinstance(vlist, (ListType, TupleType)):
vlist = [vlist]
vlist.append(value)
def send_http_headers(self, method, content_length=None):
try:
self.connection.connect()
except socket.error:
e = sys.exc_info()[1]
raise_with_tb(ConnectionError("Error connecting", str(e)), sys.exc_info()[2])
# Add content_length
if 'Content-Length' not in self.headers and \
content_length is not None:
self.set_header('Content-Length', content_length)
self.connection.putrequest(method)
# Additional headers
for hname, hval in self.headers.items():
if not isinstance(hval, (ListType, TupleType)):
hval = [hval]
for v in hval:
self.connection.putheader(str(hname), str(v))
self.connection.endheaders()
def send_http_body(self, stream_body):
if stream_body is None:
return
stream_body.seek(0, 0)
buffer_size = 16384
while 1:
buf = stream_body.read(buffer_size)
if not buf:
break
try:
self.connection.send(buf)
except IOError:
e = sys.exc_info()[1]
raise_with_tb(ConnectionError("Error sending body", str(e)), sys.exc_info()[2])
def send_http(self, method, stream_body=None):
if stream_body is None:
content_length = 0
else:
stream_body.seek(0, 2)
content_length = stream_body.tell()
self.send_http_headers(method, content_length=content_length)
self.send_http_body(stream_body)
self._response = self.connection.getresponse()
self._resp_headers = self._response.msg
return self._response
def upload(self, filename, fileChecksumType, fileChecksum):
"""
Uploads a file.
Returns (http_error_code, error_message)
Sets:
self.package_name
self.package_epoch
self.package_version
self.package_release
self.package_arch
"""
try:
a_pkg = package_from_filename(filename)
a_pkg.read_header()
except InvalidPackageError:
return -1, "Not an RPM: %s" % filename
# Set some package data members
self.package_name = a_pkg.header['name']
self.package_epoch = a_pkg.header['epoch']
self.package_version = a_pkg.header['version']
self.package_release = a_pkg.header['release']
if a_pkg.header.is_source:
if 1051 in a_pkg.header.keys():
self.package_arch = 'nosrc'
else:
self.package_arch = 'src'
else:
self.package_arch = a_pkg.header['arch']
self.packaging = a_pkg.header.packaging
nvra = [self.package_name, self.package_version, self.package_release,
self.package_arch]
if isinstance(nvra[3], IntType):
# Old rpm format
return -1, "Deprecated RPM format: %s" % filename
self.nvra = nvra
# use the precomputed passed checksum
self.checksum_type = fileChecksumType
self.checksum = fileChecksum
# Set headers
self.set_header("Content-Type", "application/x-rpm")
self.set_header("User-Agent", self.user_agent)
# Custom RHN headers
prefix = self.header_prefix
self.set_header("%s-%s" % (prefix, "Package-Name"), nvra[0])
self.set_header("%s-%s" % (prefix, "Package-Version"), nvra[1])
self.set_header("%s-%s" % (prefix, "Package-Release"), nvra[2])
self.set_header("%s-%s" % (prefix, "Package-Arch"), nvra[3])
self.set_header("%s-%s" % (prefix, "Packaging"), self.packaging)
if self.checksum_type == 'md5':
self.set_header("%s-%s" % (prefix, "File-MD5sum"), self.checksum)
else:
self.set_header("%s-%s" % (prefix, "File-Checksum-Type"), self.checksum_type)
self.set_header("%s-%s" % (prefix, "File-Checksum"), self.checksum)
a_pkg.input_stream.seek(0, 0)
self._response = self.send_http('POST', stream_body=a_pkg.input_stream)
a_pkg.input_stream.close()
retval = self.process_response()
self.connection.close()
return retval
def process_response(self):
status = self._response.status
reason = self._response.reason
if status == 200:
# OK
return status, "OK"
if status == 201:
# Created
return (status, "%s %s: %s-%s-%s.%s.rpm already uploaded" % (
self.checksum_type, self.checksum,
self.nvra[0], self.nvra[1], self.nvra[2], self.nvra[3]))
if status in (404, 409):
# Conflict
errstring = self.get_error_message(self._resp_headers)
return status, errstring
data = self._response.read()
if status == 403:
# In this case Authentication is no longer valid on server
# client needs to re-authenticate itself.
errstring = self.get_error_message(self._resp_headers)
return status, errstring
if status == 500:
print("Internal server error", status, reason)
errstring = self.get_error_message(self._resp_headers)
return status, data + errstring
return status, data
def get_error_message(self, headers):
prefix = self.header_prefix + '-Error'
text = [x[1] for x in headers.getaddrlist(prefix + '-String')]
# text is a list now, convert it to a string
text = '\n'.join(text)
# pylint: disable=W1505
text = base64.decodestring(text)
return text
def parse_url(url, scheme="http", path='/'):
_scheme, netloc, _path, params, query, fragment = tupleify_urlparse(
urlparse(url))
if not netloc:
# No scheme - trying to patch it up ourselves?
url = scheme + "://" + url
_scheme, netloc, _path, params, query, fragment = tupleify_urlparse(
urlparse(url))
if not netloc:
# XXX
raise Exception()
(host, port) = splitport(netloc)
if not _path:
_path = path
return (_scheme, (host, port), _path, params, query, fragment)
| gpl-2.0 | 2,458,659,510,662,937,600 | 33.762376 | 95 | 0.58236 | false |
dmytro-ignatenko/kuzenko | python-client/client.py | 1 | 2905 | import httplib2
import json
import urllib
from optparse import OptionParser
#from AptUrl.Parser import parse
h = httplib2.Http()
host = 'http://localhost:8080/kuzenko-ws/api/'
def setHost(hostName) :
global host
host = hostName
def setDatabaseName(name) :
resp, content = h.request(host + 'database/' + name, "POST", '')
#print resp
print content
def listTables() :
resp, content = h.request(host + "table", "GET")
#print resp
print content
def makeTable(data):
name,rest = data[0], ','.join(data[1:])
resp, content = h.request(host + "table/" + name + '?' + urllib.urlencode({"columnTypes" : rest}), "POST", '')
#print resp
print content
def removeTable(name) :
resp, content = h.request(host + "table/" + name , "DELETE", '')
#print resp
print content
def addRow(data) :
name,rest = data[0], ','.join(data[1:])
resp, content = h.request(host + "table/" + name + '/data' + '?' + urllib.urlencode({"columnData" : rest}) , "POST", '')
#print resp
print content
def removeRow(data) :
name, data = data[0], ','.join(data[1:])
resp, content = h.request(host + "table/" + name + '/data' + '?' + urllib.urlencode({"columnData" : {'1':'3'}}), "DELETE", '')
#print resp
print content
def dropDatabase() :
resp, content = h.request(host + "/database", "DELETE", '')
#print resp
print content
def showTable(name) :
resp, content = h.request(host + "table/" + name + '/data', "GET")
#print resp
print content
def descartTables(data) :
name1,name2 = data[0],data[1]
resp, content = h.request(host + "table/" + name1 + '/descart/' + name2, "GET")
#print resp
print content
methods = {
"lstbl" : listTables,
"mktbl" : makeTable,
"rmtbl" : removeTable,
"addrw" : addRow,
"rmvrw" : removeRow,
"drpdb" : dropDatabase,
"swtbl" : showTable,
"dctbl" : descartTables,
}
parser = OptionParser()
parser.add_option('-d',"--directory", action="store", type="string", dest="directory")
parser.add_option('-c','--command',action='store',type='string',dest='command')
parser.add_option('-p','--parameters',action='store',type='string',dest='parameters')
print "Python client started..."
line = raw_input()
while line != 'exit' :
(option,_) = parser.parse_args(line.split(' '))
if option.directory is None or option.command is None :
print "Wrong command format"
line = raw_input()
continue
setDatabaseName(option.directory)
method = methods[option.command]
if option.parameters is None : method()
else :
l = option.parameters.split(';')
if len(l) == 1 :
method(l[0].split('=')[1])
else :
method([x.split('=')[1] for x in l])
line = raw_input()
| mit | 8,205,013,659,229,442,000 | 28.948454 | 132 | 0.582788 | false |
adrianschroeter/kiwi | test/unit/package_manager_apt_test.py | 1 | 7203 | from mock import patch
from mock import call
import mock
from .test_helper import raises
from kiwi.package_manager.apt import PackageManagerApt
from kiwi.exceptions import (
KiwiDebootstrapError,
KiwiRequestError
)
class TestPackageManagerApt(object):
def setup(self):
repository = mock.Mock()
repository.root_dir = 'root-dir'
repository.signing_keys = ['key-file.asc']
repository.unauthenticated = 'false'
root_bind = mock.Mock()
root_bind.move_to_root = mock.Mock(
return_value=['root-moved-arguments']
)
repository.root_bind = root_bind
repository.runtime_config = mock.Mock(
return_value={
'apt_get_args': ['-c', 'apt.conf', '-y'],
'command_env': ['env'],
'distribution': 'xenial',
'distribution_path': 'xenial_path'
}
)
self.manager = PackageManagerApt(repository)
def test_request_package(self):
self.manager.request_package('name')
assert self.manager.package_requests == ['name']
@patch('kiwi.logger.log.warning')
def test_request_collection(self, mock_log_warn):
self.manager.request_collection('name')
assert self.manager.collection_requests == []
assert mock_log_warn.called
@patch('kiwi.logger.log.warning')
def test_request_product(self, mock_log_warn):
self.manager.request_product('name')
assert self.manager.product_requests == []
assert mock_log_warn.called
@patch('kiwi.logger.log.warning')
def test_request_package_exclusion(self, mock_log_warn):
self.manager.request_package_exclusion('name')
assert self.manager.exclude_requests == []
assert mock_log_warn.called
@raises(KiwiDebootstrapError)
def test_process_install_requests_bootstrap_no_dist(self):
self.manager.distribution = None
self.manager.process_install_requests_bootstrap()
@patch('os.path.exists')
@raises(KiwiDebootstrapError)
def test_process_install_requests_bootstrap_no_debootstrap_script(
self, mock_exists
):
mock_exists.return_value = False
self.manager.process_install_requests_bootstrap()
@patch('kiwi.command.Command.run')
@patch('os.path.exists')
@patch('kiwi.package_manager.apt.Path.wipe')
@raises(KiwiDebootstrapError)
def test_process_install_requests_bootstrap_failed_debootstrap(
self, mock_wipe, mock_exists, mock_run
):
self.manager.request_package('apt-get')
mock_run.side_effect = Exception
mock_exists.return_value = True
self.manager.process_install_requests_bootstrap()
@patch('kiwi.logger.log.warning')
@patch('kiwi.command.Command.call')
@patch('kiwi.command.Command.run')
@patch('os.path.exists')
@patch('kiwi.package_manager.apt.DataSync')
def test_process_install_requests_bootstrap(
self, mock_sync, mock_exists, mock_run, mock_call, mock_warn
):
self.manager.request_package('apt-get')
self.manager.request_package('vim')
data = mock.Mock()
mock_sync.return_value = data
mock_exists.return_value = True
self.manager.process_install_requests_bootstrap()
mock_sync.assert_called_once_with(
'root-dir.debootstrap/', 'root-dir'
)
data.sync_data.assert_called_once_with(
options=['-a', '-H', '-X', '-A']
)
assert mock_run.call_args_list == [
call(command=['mountpoint', 'root-dir/dev'], raise_on_error=False),
call([
'debootstrap', '--no-check-gpg', '--variant=minbase',
'xenial', 'root-dir.debootstrap', 'xenial_path'],
['env']),
call([
'chroot', 'root-dir', 'apt-key', 'add', 'key-file.asc'
], ['env']),
call(['rm', '-r', '-f', 'root-dir.debootstrap']),
call([
'chroot', 'root-dir', 'apt-get',
'root-moved-arguments', 'update'
], ['env'])
]
mock_call.assert_called_once_with([
'chroot', 'root-dir', 'apt-get',
'root-moved-arguments', 'install', 'vim'],
['env']
)
assert mock_warn.called
@patch('kiwi.command.Command.call')
@patch('kiwi.command.Command.run')
def test_process_install_requests(self, mock_run, mock_call):
self.manager.request_package('vim')
self.manager.process_install_requests()
self.manager.root_bind.move_to_root(
self.manager.apt_get_args
)
mock_call.assert_called_once_with([
'chroot', 'root-dir', 'apt-get',
'root-moved-arguments', 'install', 'vim'],
['env']
)
@patch('kiwi.command.Command.call')
@patch('kiwi.command.Command.run')
def test_process_delete_requests(self, mock_run, mock_call):
self.manager.request_package('vim')
self.manager.process_delete_requests()
mock_call.assert_called_once_with(
[
'chroot', 'root-dir', 'apt-get', 'root-moved-arguments',
'--auto-remove', 'remove', 'vim'
], ['env']
)
@patch('kiwi.command.Command.call')
@patch('kiwi.command.Command.run')
def test_process_delete_requests_force(self, mock_run, mock_call):
self.manager.request_package('vim')
self.manager.process_delete_requests(True)
mock_call.assert_called_once_with(
['chroot', 'root-dir', 'dpkg', '--force-all', '-r', 'vim'],
['env']
)
@patch('kiwi.command.Command.run')
@raises(KiwiRequestError)
def test_process_delete_requests_package_missing(self, mock_run):
mock_run.side_effect = Exception
self.manager.request_package('vim')
self.manager.process_delete_requests()
@patch('kiwi.command.Command.call')
def test_update(self, mock_call):
self.manager.update()
self.manager.root_bind.move_to_root(
self.manager.apt_get_args
)
mock_call.assert_called_once_with([
'chroot', 'root-dir', 'apt-get',
'root-moved-arguments', 'upgrade'],
['env']
)
def test_process_only_required(self):
self.manager.process_only_required()
assert self.manager.custom_args == ['--no-install-recommends']
def test_process_plus_recommended(self):
self.manager.process_only_required()
assert self.manager.custom_args == ['--no-install-recommends']
self.manager.process_plus_recommended()
assert '--no-install-recommends' not in self.manager.custom_args
def test_match_package_installed(self):
assert self.manager.match_package_installed('foo', 'Unpacking foo')
def test_match_package_deleted(self):
assert self.manager.match_package_deleted('foo', 'Removing foo')
def test_database_consistent(self):
self.manager.database_consistent()
def test_dump_reload_package_database(self):
self.manager.dump_reload_package_database()
| gpl-3.0 | -3,432,555,506,134,510,600 | 34.835821 | 79 | 0.602666 | false |
rosspalmer/PerformanceTrack | tracker.py | 1 | 3755 | import csv
import datetime
import pickle
import psutil
import os
import sys
# Locations for storage and export files
DATA_STORE_FILE = 'data.pik' # Add folder path for data pickle
SYSTEM_DATA_EXTRACT_FILE = 'system_log.csv' # Add folder path for system data CSV extract
PROCESS_DATA_EXTRACT_FILE = 'process_log.csv' # Add folder path for process data CSV extract
# Process specific stats name filter
PROCESS_FILTER = ['java', 'python']
# Time for CPU Usage calculations (in seconds)
CPU_CALC_TIME = 1.0
# Log performance for system and process level metrics and store
def log_current_state(os_type):
time = datetime.datetime.now()
new_system_data = system_performance_metrics(time)
new_processes_data = process_performance_metrics(time, os_type)
store(new_system_data, new_processes_data)
# Analyze performance of system level metrics
def system_performance_metrics(time):
# Setup entry dictionary and log time
entry = {}
entry['datetime'] = time
# Log CPU statistics
entry['cpu_usage'] = psutil.cpu_percent(CPU_CALC_TIME)
# Log memory statistics
mem = psutil.virtual_memory()
entry['mem_total'] = mem.total
entry['mem_available'] = mem.available
entry['mem_used'] = mem.used
entry['mem_percent_used'] = entry['mem_used'] / entry['mem_total']
return entry
# Analyze performance of filtered processes
def process_performance_metrics(time, os_type):
filtered_processes = []
memory_label = None
if os_type == 'windows':
memory_label = 'memory_info'
elif os_type == 'linux':
memory_label = 'memory_full_info'
# Loop through process data
for process in psutil.process_iter(attrs=['pid', 'name', 'cpu_percent', memory_label]):
for process_filter_string in PROCESS_FILTER:
if process_filter_string in process.info['name']:
entry = {}
entry['datetime'] = time
entry['filter'] = process_filter_string
entry['name'] = process.info['name']
entry['pid'] = process.info['pid']
entry['cpu_usage'] = process.cpu_percent(CPU_CALC_TIME)
entry['rss_memory'] = process.info[memory_label].rss
entry['vms_memory'] = process.info[memory_label].vms
filtered_processes.append(entry)
return filtered_processes
# Store new metrics in data pickle
def store(new_system_data, new_processes_data):
if not os.path.isfile(DATA_STORE_FILE):
data = {'system': [], 'processes': []}
else:
data = pickle.load(open(DATA_STORE_FILE, 'rb'))
data['system'].append(new_system_data)
data['processes'].extend(new_processes_data)
pickle.dump(data, open(DATA_STORE_FILE, 'wb'))
# Generate CSV files from data pickle
def generate_extract():
data = pickle.load(open(DATA_STORE_FILE, 'rb'))
system_data = data['system']
process_data = data['processes']
system_data_headers = ['cpu_usage', 'datetime', 'mem_available', 'mem_percent_used', 'mem_total', 'mem_used']
write_csv(system_data, system_data_headers, SYSTEM_DATA_EXTRACT_FILE)
process_data_headers = ['cpu_usage', 'datetime', 'filter', 'name', 'pid', 'rss_memory', 'vms_memory']
write_csv(process_data, process_data_headers, PROCESS_DATA_EXTRACT_FILE)
# Write CSV file from a list of dictionaries
def write_csv(data, headers, file_location):
csv_file = open(file_location, 'w+', newline='')
writer = csv.DictWriter(csv_file, headers)
writer.writeheader()
writer.writerows(data)
if __name__ == '__main__':
os_type = sys.argv[2]
if sys.argv[1] == 'log':
log_current_state(os_type)
elif sys.argv[1] == 'extract':
generate_extract()
| apache-2.0 | -6,550,446,624,911,820,000 | 29.04 | 113 | 0.65273 | false |
sdolemelipone/django-crypsis | crypsis_tests/migrations/0008_auto_20180117_1130.py | 1 | 1097 | # Generated by Django 2.0.1 on 2018-01-17 11:30
import crypsis.models
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('crypsis_tests', '0007_auto_20171222_1155'),
]
operations = [
migrations.AlterModelOptions(
name='orderitem',
options={'ordering': ['id'], 'verbose_name': 'Order line', 'verbose_name_plural': 'Order lines'},
),
migrations.AddField(
model_name='item',
name='desc',
field=models.CharField(default='', max_length=100),
),
migrations.AlterField(
model_name='contact',
name='xero_id',
field=models.CharField(blank=True, default='', help_text="Here you can enter the long id of the xero object to force the item it is sync'd with.", max_length=200),
),
migrations.AlterField(
model_name='order',
name='date',
field=crypsis.models.DateField(default=django.utils.timezone.now),
),
]
| gpl-3.0 | 2,773,212,821,536,557,600 | 31.264706 | 175 | 0.58979 | false |
dasbruns/netzob | src/netzob/Inference/Grammar/AutomataFactories/OneStateAutomataFactory.py | 1 | 4587 | # -*- coding: utf-8 -*-
#+---------------------------------------------------------------------------+
#| 01001110 01100101 01110100 01111010 01101111 01100010 |
#| |
#| Netzob : Inferring communication protocols |
#+---------------------------------------------------------------------------+
#| Copyright (C) 2011-2014 Georges Bossert and Frédéric Guihéry |
#| This program is free software: you can redistribute it and/or modify |
#| it under the terms of the GNU General Public License as published by |
#| the Free Software Foundation, either version 3 of the License, or |
#| (at your option) any later version. |
#| |
#| This program is distributed in the hope that it will be useful, |
#| but WITHOUT ANY WARRANTY; without even the implied warranty of |
#| MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the |
#| GNU General Public License for more details. |
#| |
#| You should have received a copy of the GNU General Public License |
#| along with this program. If not, see <http://www.gnu.org/licenses/>. |
#+---------------------------------------------------------------------------+
#| @url : http://www.netzob.org |
#| @contact : [email protected] |
#| @sponsors : Amossys, http://www.amossys.fr |
#| Supélec, http://www.rennes.supelec.fr/ren/rd/cidre/ |
#+---------------------------------------------------------------------------+
#+----------------------------------------------
#| Standard library imports
#+----------------------------------------------
#+----------------------------------------------
#| Related third party imports
#+----------------------------------------------
#+----------------------------------------------
#| Local application imports
#+----------------------------------------------
from netzob.Common.Utils.Decorators import typeCheck, NetzobLogger
from netzob.Common.Models.Grammar.States.State import State
from netzob.Common.Models.Grammar.Transitions.Transition import Transition
from netzob.Common.Models.Grammar.Transitions.OpenChannelTransition import OpenChannelTransition
from netzob.Common.Models.Grammar.Transitions.CloseChannelTransition import CloseChannelTransition
@NetzobLogger
class OneStateAutomataFactory(object):
@staticmethod
@typeCheck(list, list)
def generate(abstractSession, symbolList):
"""Generate an automata that, according to an abstract
session, contains a main state where each request-response
couples are permitted.
"""
if len(abstractSession) < 1:
return
(client, server, symbol) = abstractSession[0] # We expect that the first message/symbol is emitted by the client.
# So we consider it as the initiator of the session.
sStart = State(name="Start state")
sMain = State(name="Main state")
sEnd = State(name="End state")
openTransition = OpenChannelTransition(startState=sStart, endState=sMain, name="Open")
it = iter(abstractSession)
inputSymbol = None
outputSymbols = None
while True:
try:
(source, destination, symbol) = it.next()
if source == client:
if symbol is not None:
inputSymbol = symbol
outputSymbols = None
else:
if symbol is not None:
outputSymbols = [symbol]
if inputSymbol is not None and outputSymbols is not None:
mainTransition = Transition(startState=sMain, endState=sMain, inputSymbol=inputSymbol, outputSymbols=outputSymbols, name="Transition")
inputSymbol = None
outputSymbols = None
except StopIteration:
break
closeTransition = CloseChannelTransition(startState=sMain, endState=sEnd, name="Close")
from netzob.Common.Models.Grammar.Automata import Automata
return Automata(sStart, symbolList)
| gpl-3.0 | 978,344,859,303,798,100 | 51.079545 | 154 | 0.491381 | false |
paolotozzo/SDIPy | sdipy/sender.py | 1 | 7318 | """
sender.py
by Charles Fracchia, Copyright (c) 2013
Sender class module
This class defines data and methods for the sender in a packet
"""
import re, warnings
allowedAttributes = ["name","brand","model","modelNum"] #In future, this could be loaded dynamically from a reference JSON
class Sender(object):
"""docstring for Packet"""
def __init__(self, address, timeFormat, startTime="", **kwargs):
super(Sender, self).__init__()
if (self._validateAddress(address) != False) : #Validate submitted address
self.addressType = self._validateAddress(address)
self.address = address
if startTime != "":
if self._validateTimeFormat(timeFormat,startTime):
self.timeFormat = timeFormat
self.startTime = startTime
else:
raise ValueError("The specified time format or start time in the sender object is incorrect.")
else: #If a Start Time object was not passed
if self._validateTimeFormat(timeFormat):
self.timeFormat = timeFormat
else:
raise ValueError("The specified time format in the sender object is incorrect.")
#For each extra attribute add it to the object to expose it
for arg in kwargs:
if arg not in allowedAttributes: #If it's not an allowed attribute according to SDIP
allowedList = "" #Used for nicely formatted warning
for attribute in allowedAttributes: #For each of the attributes in the list
if allowedList != "": allowedList = allowedList + ", " + attribute #Nicely formatted :)
else: allowedList += attribute #Nicely formatted :)
warnings.warn("Invalid sender attribute passed. Attribute will not be set. Allowed attributes are: %s" % allowedList) #Warn the user
else:
setattr(self, arg, kwargs[arg]) #This sets the attribute with dynamic name
def __str__(self):
return "*********************\nSDIP Sender Object\nAddress: %s (%s)\n*********************" % (self.address,self.addressType)
def _validateAddress(self, address):
"""
Check that the [address] is a valid address and return its type
Return destination address if correct, Nothing otherwise. If it is a MAC address it will return it as a byte field (xAAxBBxCCxDDxEExFFxGGxHH)
Acceptable:
XBee MAC address formatted like AA:BB:CC:DD:EE:FF:GG:HH:GG:HH
IP address formatted like 000.000.255.255, each block has to be 0 <= n < 256
"""
pass
addressType = [] #Used for storing regex matches
mac = '^[a-fA-F0-9][aceACE02468][:|\-]?([a-fA-F0-9]{2}[:|\-]?){4}[a-fA-F0-9]{2}$' #For regular mac addresses
beemac = '^[a-fA-F0-9][aceACE02468][:|\-]?([a-fA-F0-9]{2}[:|\-]?){6}[a-fA-F0-9]{2}$' #For XBee mac addresses
ip = '(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)' #For IP addresses
regexes = {"mac":mac,"beemac":beemac,"ip":ip}
for regex in regexes:
regexFound = re.compile(regexes[regex]).search(address) #Do the regex search
if regexFound != None: #If it finds a match
addressType.append("%s" % regex) #append the type to an array, this way we can detect addresses that match multiple regexes
if len(addressType) != 1: #If we matched too many regex
raise ValueError("The provided address is not correctly formatted. The address can be an IP, regular MAC address or ZigBee MAC address")
return False
else: #We correctly matched just 1 type
return addressType[0] #Return the address type matched
def _validateTimeFormat(self, timeformat, startTime=""):
"""
This validates the time format
Takes the timeformat as a string
Returns True if the timeformat is valid, False if not
"""
pass
allowedTimeFormats = ["sec","microsec","millisec"]
allowedTimeTypes = ["epoch","rel"]
splitTime = timeformat.split("-")
#print splitTime #DEBUG
if (splitTime[0] in allowedTimeFormats and splitTime[1] in allowedTimeTypes): #Check that the timeformat is correctly formatted
if splitTime[1] == "rel": #Time is relative, need to look at the start time
if startTime != "": #StartTime was passed along so we're good
if self._validateStartTime(startTime): #Time to validate the StartTime object
return True #StartTime is good
else:
raise ValueError("You indicated a relative time format but the start time object is malformed")
return False #StartTime is malformed
else: #StartTime was not passed along but time is relative grrr...
raise KeyError("You indicated a relative time format but failed to pass a start time object")
return False
elif splitTime[1] == "epoch": #Time is absolute and uses unix epoch as reference
if startTime != "":
warnings.warn("You've passed a start time dictionnary but are using absolute timing (epoch in this case). Make sure you \
understand the different types of time units we support, cause it looks like you don't :)",UserWarning)
return True
else:
raise ValueError("Your time format string is unsupported. We currently only support relative (with start time) and epoch data timestamps")
return False #Currently no other formats supported
else:
raise ValueError("Your time format string is malformed")
return False #Malformed string
def _validateStartTime(self, startTime):
"""
Validates the startTime dictionnary
Takes in a dictionnary of the following form: {"format": "sec-epoch", "time": 1383842840}
Returns True if startTime is correctly formed or False if not
"""
pass
allowedTimeFormats = ["sec","microsec","millisec"]
allowedTimeTypes = ["epoch","rel"]
try:
splitStartTime = startTime['format'].split("-")
#print splitStartTime #DEBUG
except KeyError:
raise KeyError("The start time dictionnary is malformed. It needs to be in the following form: {'format': 'sec-epoch', 'time': 1383842840}")
if (splitStartTime[0] in allowedTimeFormats and splitStartTime[1] in allowedTimeTypes): #Check that the starttime is correctly formatted
try:
if type(startTime['time']) == int:
return True
else:
return False
except KeyError:
raise KeyError("The start time dictionnary is malformed. It needs to be in the following form: {'format': 'sec-epoch', 'time': 1383842840}")
else:
return False #the startTimeFormat is not correctly formatted
| mit | 6,683,428,774,023,159,000 | 52.423358 | 148 | 0.595655 | false |
tebeka/pythonwise | macapp/humblecalc.py | 1 | 1479 | #!/usr/bin/env python
# Very humble calculator, written as "Web Desktop Application"
__author__ = "Miki Tebeka <[email protected]>"
from __future__ import division
from math import *
from operator import isNumberType
from BaseHTTPServer import HTTPServer
from SimpleHTTPServer import SimpleHTTPRequestHandler
from urlparse import urlparse
from cgi import parse_qs
import httplib
class RequestHandler(SimpleHTTPRequestHandler):
def do_GET(self):
o = urlparse(self.path)
if o.path == "/eval":
self.eval(o.query)
elif o.path == "/quit":
self.end_headers()
self.wfile.write("Bye")
self.wfile.flush()
# self.server.shutdown() hangs, so we do it the brutal way
import os; os._exit(0)
else:
SimpleHTTPRequestHandler.do_GET(self)
def eval(self, query):
q = parse_qs(query)
expr = q.get("expr", [""])[0]
try:
# FIXME: Never do this on production, this is a huge security risk
result = str(eval(expr))
except Exception, e:
result = "ERROR"
self.send_response(httplib.OK)
self.send_header("Content-type", "text/plain")
self.end_headers()
self.wfile.write(result)
if __name__ == "__main__":
import webbrowser
port = 8822
server = HTTPServer(("", port), RequestHandler)
webbrowser.open("http://localhost:%s" % port)
server.serve_forever()
| bsd-3-clause | -1,853,744,245,115,174,100 | 29.183673 | 78 | 0.615281 | false |
AlexStarov/Shop | applications/product/templatetags/block_product.py | 1 | 1391 | # -*- coding: utf-8 -*-
from django_jinja import library
from django.template.loader import render_to_string
__author__ = 'AlexStarov'
@library.global_function()
def block_products(products, request, ):
from django.middleware.csrf import get_token
request_csrf_token = get_token(request, )
# request_csrf_token = request.META.get(u"CSRF_COOKIE", None, )
# request_csrf_token = request.COOKIES.get(u'csrftoken', None, )
# from proj.settings import MEDIA_URL
return render_to_string(template_name=u'product/templatetags/block_products.jinja2.html',
dictionary={'products': products,
'request': request,
'csrf_token': request_csrf_token, }, )
@library.global_function()
def block_product(product, choice, cycle, last_loop, ):
if last_loop:
margin_bottom = '0px'
else:
margin_bottom = '10px'
if cycle == 1:
margin_left = '0px'
else:
margin_left = '10px'
return render_to_string(template_name=u'product/templatetags/block_product.jinja2.html',
dictionary={'product': product,
'choice': choice,
'margin_bottom': margin_bottom,
'margin_left': margin_left, }, )
| apache-2.0 | -2,443,777,753,299,930,600 | 38.742857 | 93 | 0.561467 | false |
DayGitH/Python-Challenges | DailyProgrammer/DP20160818B.py | 1 | 2604 | """
[2016-08-18] Challenge #279 [Intermediate] Text Reflow
https://www.reddit.com/r/dailyprogrammer/comments/4ybbcz/20160818_challenge_279_intermediate_text_reflow/
#Description:
Text reflow means to break up lines of text so that they fit within a certain width. It is useful in e.g. mobile
browsers. When you zoom in on a web page the lines will become too long to fit the width of the screen, unless the text
is broken up into shorter lines.
#Input:
You will be given a text with a maximum line width of 80 characters.
#Output:
Produce the same text with a maximum line width of 40 characters
#Challenge Input:
In the beginning God created the heavens and the earth. Now the earth was
formless and empty, darkness was over the surface of the deep, and the Spirit of
God was hovering over the waters.
And God said, "Let there be light," and there was light. God saw that the light
was good, and he separated the light from the darkness. God called the light
"day," and the darkness he called "night." And there was evening, and there was
morning - the first day.
#Challenge Output:
In the beginning God created the heavens
and the earth. Now the earth was
formless and empty, darkness was over
the surface of the deep, and the Spirit
of God was hovering over the waters.
And God said, "Let there be light," and
there was light. God saw that the light
was good, and he separated the light
from the darkness. God called the light
"day," and the darkness he called
"night." And there was evening, and
there was morning - the first day.
#Bonus:
Let's get rid of the jagged right margin of the text and make the output prettier. Output the text with full
justification; Adjusting the word spacing so that the text is flush against both the left and the right margin.
#Bonus Output:
In the beginning God created the heavens
and the earth. Now the earth was
formless and empty, darkness was over
the surface of the deep, and the Spirit
of God was hovering over the waters.
And God said, "Let there be light," and
there was light. God saw that the light
was good, and he separated the light
from the darkness. God called the light
"day," and the darkness he called
"night." And there was evening, and
there was morning - the first day.
#Finally
This challenge is posted by /u/slampropp
Also have a good challenge idea?
Consider submitting it to /r/dailyprogrammer_ideas
"""
def main():
pass
if __name__ == "__main__":
main()
| mit | -2,616,586,805,095,262,000 | 39.6875 | 119 | 0.715438 | false |
maya70/GraphMirrors | scripts/database/setup.py | 1 | 4731 | #!/usr/bin/python
# Reads from mysql database into a local sqlite database.
import mysql.connector
import sqlite3
import re
# Create Tables.
target = sqlite3.connect('data.db')
tc = target.cursor()
tc.execute('CREATE TABLE components (entity_id, component_id, component_type)')
tc.execute('CREATE TABLE reactions (reaction_id INTEGER PRIMARY KEY, reaction_type TEXT, name TEXT, pathway_id, local_id TEXT)')
tc.execute('CREATE TABLE reaction_entities (reaction_id INTEGER, entity_id INTEGER, direction TEXT, PRIMARY KEY(reaction_id, entity_id))')
source = mysql.connector.connect(user = 'garba1', host = 'localhost', database = 'reactome')
sc = source.cursor()
sc.execute('SHOW TABLES')
tables = []
for (tablename,) in sc:
tables.append(tablename)
# Limit to 30 tables for testing purposes.
#tables = tables[:30]
last_completion = 0
table_count = 0
print('Components:')
# Do complex and converted after we have the source components defined.
for tablename in tables:
table_count = table_count + 1
completion = int(20 * table_count / len(tables))
if completion > last_completion:
last_completion = completion
print(' ', completion * 5, '%')
m = re.search('^(\d+)_(\w+)$', tablename)
pathway_id = int(m.group(1))
tabletype = m.group(2)
if tabletype == '6complex' or tabletype == '8convertedEntity':
component_type = None
if '6complex' == tabletype:
component_type = 'complex'
elif '8convertedEntity' == tabletype:
component_type = 'converted'
sc.execute('SELECT * FROM %s' % (tablename,))
for (local_id, name, location, reactome_id, component_local_id) in sc:
reactome_id = int(reactome_id[16:])
m = re.search('^([a-zA-Z_]+)', local_id)
tc.execute('INSERT INTO entities(entity_type, name, location, reactome_id, uniprot_id) '
'SELECT ?, ?, ?, ?, ? '
'WHERE NOT EXISTS(SELECT 1 FROM entities WHERE reactome_id=?)',
(m.group(1), name, location, reactome_id, None, reactome_id))
tc.execute('INSERT INTO entity_pathways '
'SELECT last_insert_rowid(), ?, ? '
'WHERE NOT EXISTS('
' SELECT 1 FROM entity_pathways WHERE entity_id=last_insert_rowid() AND pathway_id=?)',
(pathway_id, local_id, pathway_id))
tc.execute('INSERT INTO components '
'SELECT ?, entity_id, ? FROM entity_pathways '
'WHERE pathway_id=? AND local_id=?',
(reactome_id, component_type, pathway_id, component_local_id))
last_completion = 0
table_count = 0
print('Reactions:')
# Do reactions after all components are defined.
for tablename in tables:
table_count = table_count + 1
completion = int(20 * table_count / len(tables))
if completion > last_completion:
last_completion = completion
print(' ', completion * 5, '%')
m = re.search('^(\d+)_(\w+)$', tablename)
pathway_id = int(m.group(1))
tabletype = m.group(2)
if tabletype == '4reaction':
sc.execute('SELECT * FROM %s' % (tablename,))
for (local_id, name, local_input_id, local_output_id) in sc:
m = re.search('^([a-zA-Z_]+)', local_id)
tc.execute('INSERT INTO reactions(reaction_type, name, pathway_id, local_id) '
'SELECT ?, ?, ?, ? '
'WHERE NOT EXISTS(SELECT 1 FROM reactions WHERE pathway_id=? AND local_id=?)',
('standard', name, pathway_id, local_id, pathway_id, local_id))
tc.execute('SELECT reaction_id FROM reactions WHERE pathway_id=? and local_id=?',
(pathway_id, local_id))
reaction_id = tc.fetchone()[0]
# Each input/output pair has its own row, so we only need to grab one per loop.
tc.execute('SELECT entity_id FROM entity_pathways WHERE pathway_id=? AND local_id=?',
(pathway_id, local_input_id))
input_id = tc.fetchone()
if input_id:
input_id = input_id[0]
tc.execute('INSERT INTO reaction_entities '
'SELECT ?, ?, ? '
'WHERE NOT EXISTS(SELECT 1 FROM reaction_entities WHERE reaction_id=? AND entity_id=?)',
(reaction_id, input_id, 'input', reaction_id, input_id))
tc.execute('SELECT entity_id FROM entity_pathways WHERE pathway_id=? AND local_id=?',
(pathway_id, local_output_id))
output_id = tc.fetchone()
if output_id:
output_id = output_id[0]
tc.execute('INSERT INTO reaction_entities '
'SELECT ?, ?, ? '
'WHERE NOT EXISTS(SELECT 1 FROM reaction_entities WHERE reaction_id=? AND entity_id=?)',
(reaction_id, output_id, 'output', reaction_id, output_id))
target.commit()
| bsd-3-clause | -8,242,140,934,111,858,000 | 38.425 | 138 | 0.617628 | false |
houseurmusic/my-swift | swift/common/client.py | 1 | 33831 | # Copyright (c) 2010-2011 OpenStack, LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Cloud Files client library used internally
"""
import socket
from cStringIO import StringIO
from re import compile, DOTALL
from tokenize import generate_tokens, STRING, NAME, OP
from urllib import quote as _quote, unquote
from urlparse import urlparse, urlunparse
try:
from eventlet.green.httplib import HTTPException, HTTPSConnection
except ImportError:
from httplib import HTTPException, HTTPSConnection
try:
from eventlet import sleep
except ImportError:
from time import sleep
try:
from swift.common.bufferedhttp \
import BufferedHTTPConnection as HTTPConnection
except ImportError:
try:
from eventlet.green.httplib import HTTPConnection
except ImportError:
from httplib import HTTPConnection
def quote(value, safe='/'):
"""
Patched version of urllib.quote that encodes utf8 strings before quoting
"""
if isinstance(value, unicode):
value = value.encode('utf8')
return _quote(value, safe)
# look for a real json parser first
try:
# simplejson is popular and pretty good
from simplejson import loads as json_loads
except ImportError:
try:
# 2.6 will have a json module in the stdlib
from json import loads as json_loads
except ImportError:
# fall back on local parser otherwise
comments = compile(r'/\*.*\*/|//[^\r\n]*', DOTALL)
def json_loads(string):
'''
Fairly competent json parser exploiting the python tokenizer and
eval(). -- From python-cloudfiles
_loads(serialized_json) -> object
'''
try:
res = []
consts = {'true': True, 'false': False, 'null': None}
string = '(' + comments.sub('', string) + ')'
for type, val, _junk, _junk, _junk in \
generate_tokens(StringIO(string).readline):
if (type == OP and val not in '[]{}:,()-') or \
(type == NAME and val not in consts):
raise AttributeError()
elif type == STRING:
res.append('u')
res.append(val.replace('\\/', '/'))
else:
res.append(val)
return eval(''.join(res), {}, consts)
except Exception:
raise AttributeError()
class ClientException(Exception):
def __init__(self, msg, http_scheme='', http_host='', http_port='',
http_path='', http_query='', http_status=0, http_reason='',
http_device=''):
Exception.__init__(self, msg)
self.msg = msg
self.http_scheme = http_scheme
self.http_host = http_host
self.http_port = http_port
self.http_path = http_path
self.http_query = http_query
self.http_status = http_status
self.http_reason = http_reason
self.http_device = http_device
def __str__(self):
a = self.msg
b = ''
if self.http_scheme:
b += '%s://' % self.http_scheme
if self.http_host:
b += self.http_host
if self.http_port:
b += ':%s' % self.http_port
if self.http_path:
b += self.http_path
if self.http_query:
b += '?%s' % self.http_query
if self.http_status:
if b:
b = '%s %s' % (b, self.http_status)
else:
b = str(self.http_status)
if self.http_reason:
if b:
b = '%s %s' % (b, self.http_reason)
else:
b = '- %s' % self.http_reason
if self.http_device:
if b:
b = '%s: device %s' % (b, self.http_device)
else:
b = 'device %s' % self.http_device
return b and '%s: %s' % (a, b) or a
def http_connection(url, proxy=None):
"""
Make an HTTPConnection or HTTPSConnection
:param url: url to connect to
:param proxy: proxy to connect through, if any; None by default; str of the
format 'http://127.0.0.1:8888' to set one
:returns: tuple of (parsed url, connection object)
:raises ClientException: Unable to handle protocol scheme
"""
parsed = urlparse(url)
proxy_parsed = urlparse(proxy) if proxy else None
if parsed.scheme == 'http':
conn = HTTPConnection((proxy_parsed if proxy else parsed).netloc)
elif parsed.scheme == 'https':
conn = HTTPSConnection((proxy_parsed if proxy else parsed).netloc)
else:
raise ClientException('Cannot handle protocol scheme %s for url %s' %
(parsed.scheme, repr(url)))
if proxy:
conn._set_tunnel(parsed.hostname, parsed.port)
return parsed, conn
def get_auth(url, user, key, snet=False):
"""
Get authentication/authorization credentials.
The snet parameter is used for Rackspace's ServiceNet internal network
implementation. In this function, it simply adds *snet-* to the beginning
of the host name for the returned storage URL. With Rackspace Cloud Files,
use of this network path causes no bandwidth charges but requires the
client to be running on Rackspace's ServiceNet network.
:param url: authentication/authorization URL
:param user: user to authenticate as
:param key: key or password for authorization
:param snet: use SERVICENET internal network (see above), default is False
:returns: tuple of (storage URL, auth token)
:raises ClientException: HTTP GET request to auth URL failed
"""
parsed, conn = http_connection(url)
conn.request('GET', parsed.path, '',
{'X-Auth-User': user, 'X-Auth-Key': key})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Auth GET failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port,
http_path=parsed.path, http_status=resp.status,
http_reason=resp.reason)
url = resp.getheader('x-storage-url')
if snet:
parsed = list(urlparse(url))
# Second item in the list is the netloc
parsed[1] = 'snet-' + parsed[1]
url = urlunparse(parsed)
return url, resp.getheader('x-storage-token',
resp.getheader('x-auth-token'))
def get_account(url, token, marker=None, limit=None, prefix=None,
http_conn=None, full_listing=False):
"""
Get a listing of containers for the account.
:param url: storage URL
:param token: auth token
:param marker: marker query
:param limit: limit query
:param prefix: prefix query
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param full_listing: if True, return a full listing, else returns a max
of 10000 listings
:returns: a tuple of (response headers, a list of containers) The response
headers will be a dict and all header names will be lowercase.
:raises ClientException: HTTP GET request failed
"""
if not http_conn:
http_conn = http_connection(url)
if full_listing:
rv = get_account(url, token, marker, limit, prefix, http_conn)
listing = rv[1]
while listing:
marker = listing[-1]['name']
listing = \
get_account(url, token, marker, limit, prefix, http_conn)[1]
if listing:
rv[1].extend(listing)
return rv
parsed, conn = http_conn
qs = 'format=json'
if marker:
qs += '&marker=%s' % quote(marker)
if limit:
qs += '&limit=%d' % limit
if prefix:
qs += '&prefix=%s' % quote(prefix)
conn.request('GET', '%s?%s' % (parsed.path, qs), '',
{'X-Auth-Token': token})
resp = conn.getresponse()
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
if resp.status < 200 or resp.status >= 300:
resp.read()
raise ClientException('Account GET failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port,
http_path=parsed.path, http_query=qs, http_status=resp.status,
http_reason=resp.reason)
if resp.status == 204:
resp.read()
return resp_headers, []
return resp_headers, json_loads(resp.read())
def head_account(url, token, http_conn=None):
"""
Get account stats.
:param url: storage URL
:param token: auth token
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:returns: a dict containing the response's headers (all header names will
be lowercase)
:raises ClientException: HTTP HEAD request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
conn.request('HEAD', parsed.path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Account HEAD failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port,
http_path=parsed.path, http_status=resp.status,
http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers
def post_account(url, token, headers, http_conn=None):
"""
Update an account's metadata.
:param url: storage URL
:param token: auth token
:param headers: additional headers to include in the request
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP POST request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
headers['X-Auth-Token'] = token
conn.request('POST', parsed.path, '', headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Account POST failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
def get_container(url, token, container, marker=None, limit=None,
prefix=None, delimiter=None, http_conn=None,
full_listing=False):
"""
Get a listing of objects for the container.
:param url: storage URL
:param token: auth token
:param container: container name to get a listing for
:param marker: marker query
:param limit: limit query
:param prefix: prefix query
:param delimeter: string to delimit the queries on
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param full_listing: if True, return a full listing, else returns a max
of 10000 listings
:returns: a tuple of (response headers, a list of objects) The response
headers will be a dict and all header names will be lowercase.
:raises ClientException: HTTP GET request failed
"""
if not http_conn:
http_conn = http_connection(url)
if full_listing:
rv = get_container(url, token, container, marker, limit, prefix,
delimiter, http_conn)
listing = rv[1]
while listing:
if not delimiter:
marker = listing[-1]['name']
else:
marker = listing[-1].get('name', listing[-1].get('subdir'))
listing = get_container(url, token, container, marker, limit,
prefix, delimiter, http_conn)[1]
if listing:
rv[1].extend(listing)
return rv
parsed, conn = http_conn
path = '%s/%s' % (parsed.path, quote(container))
qs = 'format=json'
if marker:
qs += '&marker=%s' % quote(marker)
if limit:
qs += '&limit=%d' % limit
if prefix:
qs += '&prefix=%s' % quote(prefix)
if delimiter:
qs += '&delimiter=%s' % quote(delimiter)
conn.request('GET', '%s?%s' % (path, qs), '', {'X-Auth-Token': token})
resp = conn.getresponse()
if resp.status < 200 or resp.status >= 300:
resp.read()
raise ClientException('Container GET failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_query=qs,
http_status=resp.status, http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
if resp.status == 204:
resp.read()
return resp_headers, []
return resp_headers, json_loads(resp.read())
def head_container(url, token, container, http_conn=None):
"""
Get container stats.
:param url: storage URL
:param token: auth token
:param container: container name to get stats for
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:returns: a dict containing the response's headers (all header names will
be lowercase)
:raises ClientException: HTTP HEAD request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s' % (parsed.path, quote(container))
conn.request('HEAD', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Container HEAD failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers
def put_container(url, token, container, headers=None, http_conn=None):
"""
Create a container
:param url: storage URL
:param token: auth token
:param container: container name to create
:param headers: additional headers to include in the request
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP PUT request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s' % (parsed.path, quote(container))
if not headers:
headers = {}
headers['X-Auth-Token'] = token
conn.request('PUT', path, '', headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Container PUT failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
def post_container(url, token, container, headers, http_conn=None):
"""
Update a container's metadata.
:param url: storage URL
:param token: auth token
:param container: container name to update
:param headers: additional headers to include in the request
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP POST request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s' % (parsed.path, quote(container))
headers['X-Auth-Token'] = token
conn.request('POST', path, '', headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Container POST failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
def delete_container(url, token, container, http_conn=None):
"""
Delete a container
:param url: storage URL
:param token: auth token
:param container: container name to delete
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP DELETE request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s' % (parsed.path, quote(container))
conn.request('DELETE', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Container DELETE failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
def get_object(url, token, container, name, http_conn=None,
resp_chunk_size=None):
"""
Get an object
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: object name to get
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param resp_chunk_size: if defined, chunk size of data to read. NOTE: If
you specify a resp_chunk_size you must fully read
the object's contents before making another
request.
:returns: a tuple of (response headers, the object's contents) The response
headers will be a dict and all header names will be lowercase.
:raises ClientException: HTTP GET request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
conn.request('GET', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
if resp.status < 200 or resp.status >= 300:
resp.read()
raise ClientException('Object GET failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
if resp_chunk_size:
def _object_body():
buf = resp.read(resp_chunk_size)
while buf:
yield buf
buf = resp.read(resp_chunk_size)
object_body = _object_body()
else:
object_body = resp.read()
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers, object_body
def head_object(url, token, container, name, http_conn=None):
"""
Get object info
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: object name to get info for
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:returns: a dict containing the response's headers (all header names will
be lowercase)
:raises ClientException: HTTP HEAD request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
conn.request('HEAD', path, '', {'X-Auth-Token': token})
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object HEAD failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
resp_headers = {}
for header, value in resp.getheaders():
resp_headers[header.lower()] = value
return resp_headers
def put_object(url, token=None, container=None, name=None, contents=None,
content_length=None, etag=None, chunk_size=65536,
content_type=None, headers=None, http_conn=None, proxy=None):
"""
Put an object
:param url: storage URL
:param token: auth token; if None, no token will be sent
:param container: container name that the object is in; if None, the
container name is expected to be part of the url
:param name: object name to put; if None, the object name is expected to be
part of the url
:param contents: a string or a file like object to read object data from;
if None, a zero-byte put will be done
:param content_length: value to send as content-length header; also limits
the amount read from contents; if None, it will be
computed via the contents or chunked transfer
encoding will be used
:param etag: etag of contents; if None, no etag will be sent
:param chunk_size: chunk size of data to write; default 65536
:param content_type: value to send as content-type header; if None, no
content-type will be set (remote end will likely try
to auto-detect it)
:param headers: additional headers to include in the request, if any
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param proxy: proxy to connect through, if any; None by default; str of the
format 'http://127.0.0.1:8888' to set one
:returns: etag from server response
:raises ClientException: HTTP PUT request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url, proxy=proxy)
path = parsed.path
if container:
path = '%s/%s' % (path.rstrip('/'), quote(container))
if name:
path = '%s/%s' % (path.rstrip('/'), quote(name))
if headers:
headers = dict(headers)
else:
headers = {}
if token:
headers['X-Auth-Token'] = token
if etag:
headers['ETag'] = etag.strip('"')
if content_length is not None:
headers['Content-Length'] = str(content_length)
else:
for n, v in headers.iteritems():
if n.lower() == 'content-length':
content_length = int(v)
if content_type is not None:
headers['Content-Type'] = content_type
if not contents:
headers['Content-Length'] = '0'
if hasattr(contents, 'read'):
conn.putrequest('PUT', path)
for header, value in headers.iteritems():
conn.putheader(header, value)
if content_length is None:
conn.putheader('Transfer-Encoding', 'chunked')
conn.endheaders()
chunk = contents.read(chunk_size)
while chunk:
conn.send('%x\r\n%s\r\n' % (len(chunk), chunk))
chunk = contents.read(chunk_size)
conn.send('0\r\n\r\n')
else:
conn.endheaders()
left = content_length
while left > 0:
size = chunk_size
if size > left:
size = left
chunk = contents.read(size)
conn.send(chunk)
left -= len(chunk)
else:
conn.request('PUT', path, contents, headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object PUT failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
return resp.getheader('etag', '').strip('"')
def post_object(url, token, container, name, headers, http_conn=None):
"""
Update object metadata
:param url: storage URL
:param token: auth token
:param container: container name that the object is in
:param name: name of the object to update
:param headers: additional headers to include in the request
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:raises ClientException: HTTP POST request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url)
path = '%s/%s/%s' % (parsed.path, quote(container), quote(name))
headers['X-Auth-Token'] = token
conn.request('POST', path, '', headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object POST failed', http_scheme=parsed.scheme,
http_host=conn.host, http_port=conn.port, http_path=path,
http_status=resp.status, http_reason=resp.reason)
def delete_object(url, token=None, container=None, name=None, http_conn=None,
headers=None, proxy=None):
"""
Delete object
:param url: storage URL
:param token: auth token; if None, no token will be sent
:param container: container name that the object is in; if None, the
container name is expected to be part of the url
:param name: object name to delete; if None, the object name is expected to
be part of the url
:param http_conn: HTTP connection object (If None, it will create the
conn object)
:param headers: additional headers to include in the request
:param proxy: proxy to connect through, if any; None by default; str of the
format 'http://127.0.0.1:8888' to set one
:raises ClientException: HTTP DELETE request failed
"""
if http_conn:
parsed, conn = http_conn
else:
parsed, conn = http_connection(url, proxy=proxy)
path = parsed.path
if container:
path = '%s/%s' % (path.rstrip('/'), quote(container))
if name:
path = '%s/%s' % (path.rstrip('/'), quote(name))
if headers:
headers = dict(headers)
else:
headers = {}
if token:
headers['X-Auth-Token'] = token
conn.request('DELETE', path, '', headers)
resp = conn.getresponse()
resp.read()
if resp.status < 200 or resp.status >= 300:
raise ClientException('Object DELETE failed',
http_scheme=parsed.scheme, http_host=conn.host,
http_port=conn.port, http_path=path, http_status=resp.status,
http_reason=resp.reason)
class Connection(object):
"""Convenience class to make requests that will also retry the request"""
def __init__(self, authurl, user, key, retries=5, preauthurl=None,
preauthtoken=None, snet=False, starting_backoff=1):
"""
:param authurl: authenitcation URL
:param user: user name to authenticate as
:param key: key/password to authenticate with
:param retries: Number of times to retry the request before failing
:param preauthurl: storage URL (if you have already authenticated)
:param preauthtoken: authentication token (if you have already
authenticated)
:param snet: use SERVICENET internal network default is False
"""
self.authurl = authurl
self.user = user
self.key = key
self.retries = retries
self.http_conn = None
self.url = preauthurl
self.token = preauthtoken
self.attempts = 0
self.snet = snet
self.starting_backoff = starting_backoff
def get_auth(self):
return get_auth(self.authurl, self.user, self.key, snet=self.snet)
def http_connection(self):
return http_connection(self.url)
def _retry(self, reset_func, func, *args, **kwargs):
self.attempts = 0
backoff = self.starting_backoff
while self.attempts <= self.retries:
self.attempts += 1
try:
if not self.url or not self.token:
self.url, self.token = self.get_auth()
self.http_conn = None
if not self.http_conn:
self.http_conn = self.http_connection()
kwargs['http_conn'] = self.http_conn
rv = func(self.url, self.token, *args, **kwargs)
return rv
except (socket.error, HTTPException):
if self.attempts > self.retries:
raise
self.http_conn = None
except ClientException, err:
if self.attempts > self.retries:
raise
if err.http_status == 401:
self.url = self.token = None
if self.attempts > 1:
raise
elif err.http_status == 408:
self.http_conn = None
elif 500 <= err.http_status <= 599:
pass
else:
raise
sleep(backoff)
backoff *= 2
if reset_func:
reset_func(func, *args, **kwargs)
def head_account(self):
"""Wrapper for :func:`head_account`"""
return self._retry(None, head_account)
def get_account(self, marker=None, limit=None, prefix=None,
full_listing=False):
"""Wrapper for :func:`get_account`"""
# TODO(unknown): With full_listing=True this will restart the entire
# listing with each retry. Need to make a better version that just
# retries where it left off.
return self._retry(None, get_account, marker=marker, limit=limit,
prefix=prefix, full_listing=full_listing)
def post_account(self, headers):
"""Wrapper for :func:`post_account`"""
return self._retry(None, post_account, headers)
def head_container(self, container):
"""Wrapper for :func:`head_container`"""
return self._retry(None, head_container, container)
def get_container(self, container, marker=None, limit=None, prefix=None,
delimiter=None, full_listing=False):
"""Wrapper for :func:`get_container`"""
# TODO(unknown): With full_listing=True this will restart the entire
# listing with each retry. Need to make a better version that just
# retries where it left off.
return self._retry(None, get_container, container, marker=marker,
limit=limit, prefix=prefix, delimiter=delimiter,
full_listing=full_listing)
def put_container(self, container, headers=None):
"""Wrapper for :func:`put_container`"""
return self._retry(None, put_container, container, headers=headers)
def post_container(self, container, headers):
"""Wrapper for :func:`post_container`"""
return self._retry(None, post_container, container, headers)
def delete_container(self, container):
"""Wrapper for :func:`delete_container`"""
return self._retry(None, delete_container, container)
def head_object(self, container, obj):
"""Wrapper for :func:`head_object`"""
return self._retry(None, head_object, container, obj)
def get_object(self, container, obj, resp_chunk_size=None):
"""Wrapper for :func:`get_object`"""
return self._retry(None, get_object, container, obj,
resp_chunk_size=resp_chunk_size)
def put_object(self, container, obj, contents, content_length=None,
etag=None, chunk_size=65536, content_type=None,
headers=None):
"""Wrapper for :func:`put_object`"""
def _default_reset(*args, **kwargs):
raise ClientException('put_object(%r, %r, ...) failure and no '
'ability to reset contents for reupload.' % (container, obj))
reset_func = _default_reset
tell = getattr(contents, 'tell', None)
seek = getattr(contents, 'seek', None)
if tell and seek:
orig_pos = tell()
reset_func = lambda *a, **k: seek(orig_pos)
elif not contents:
reset_func = lambda *a, **k: None
return self._retry(reset_func, put_object, container, obj, contents,
content_length=content_length, etag=etag, chunk_size=chunk_size,
content_type=content_type, headers=headers)
def post_object(self, container, obj, headers):
"""Wrapper for :func:`post_object`"""
return self._retry(None, post_object, container, obj, headers)
def delete_object(self, container, obj):
"""Wrapper for :func:`delete_object`"""
return self._retry(None, delete_object, container, obj)
| apache-2.0 | -1,593,031,386,800,875,800 | 37.313703 | 79 | 0.593627 | false |
opennode/waldur-mastermind | src/waldur_mastermind/analytics/views.py | 1 | 3899 | import collections
from datetime import timedelta
from django.contrib.contenttypes.models import ContentType
from django.db.models.expressions import OuterRef, Subquery
from rest_framework import status, viewsets
from rest_framework.response import Response
from waldur_core.quotas.models import Quota
from waldur_core.structure.models import Project
from waldur_mastermind.billing.models import PriceEstimate
from waldur_mastermind.invoices.models import InvoiceItem
from waldur_mastermind.invoices.utils import get_current_month, get_current_year
from . import models, serializers
class DailyQuotaHistoryViewSet(viewsets.GenericViewSet):
# Fix for schema generation
queryset = []
def list(self, request):
serializer = serializers.DailyHistoryQuotaSerializer(
data=request.query_params, context={'request': request},
)
serializer.is_valid(raise_exception=True)
result = self.get_result(serializer.validated_data)
return Response(result)
def get_result(self, query):
scope = query['scope']
quota_names = query['quota_names']
start = query['start']
end = query['end']
quotas = models.DailyQuotaHistory.objects.filter(
scope=scope, name__in=quota_names, date__gte=start, date__lte=end,
).only('name', 'date', 'usage',)
charts = collections.defaultdict(dict)
for quota in quotas:
charts[quota.name][quota.date] = quota.usage
values = collections.defaultdict(list)
day = timedelta(days=1)
days = (end - start).days
for name in quota_names:
usage = 0
for i in range(days + 1):
date = start + i * day
usage = charts[name].get(date, usage)
values[name].append(usage)
return values
class ProjectQuotasViewSet(viewsets.GenericViewSet):
# Fix for schema generation
queryset = []
def list(self, request):
quota_name = request.query_params.get('quota_name')
if not quota_name:
return Response(status=status.HTTP_400_BAD_REQUEST)
content_type = ContentType.objects.get_for_model(Project)
if quota_name == 'estimated_price':
projects = self.annotate_estimated_price(content_type)
elif quota_name == 'current_price':
projects = self.annotate_current_price(content_type)
else:
projects = self.annotate_quotas(quota_name, content_type)
return Response(
[
{
'project_name': project.name,
'customer_name': project.customer.name,
'customer_abbreviation': project.customer.abbreviation,
'value': project.value,
}
for project in projects
]
)
def annotate_quotas(self, quota_name, content_type):
quotas = Quota.objects.filter(
object_id=OuterRef('pk'), content_type=content_type, name=quota_name,
)
subquery = Subquery(quotas.values('usage')[:1])
return Project.objects.annotate(value=subquery)
def annotate_estimated_price(self, content_type):
estimates = PriceEstimate.objects.filter(
object_id=OuterRef('pk'), content_type=content_type,
)
subquery = Subquery(estimates.values('total')[:1])
return Project.objects.annotate(value=subquery)
def annotate_current_price(self, content_type):
projects = Project.objects.all()
year, month = get_current_year(), get_current_month()
for project in projects:
items = InvoiceItem.objects.filter(
invoice__year=year, invoice__month=month, project_id=project.id
)
project.value = sum(item.price_current for item in items)
return projects
| mit | -5,358,902,503,370,012,000 | 35.783019 | 81 | 0.631957 | false |
Rubisk/mcedit2 | src/mcedit2/rendering/chunkmeshes/entitymesh.py | 1 | 4131 | """
${NAME}
"""
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
import numpy
from mcedit2.rendering import renderstates, scenegraph
from mcedit2.rendering.blockmeshes import standardCubeTemplates
from mcedit2.rendering.blockmeshes import ChunkMeshBase
from mcedit2.rendering.layers import Layer
from mcedit2.rendering.slices import _XYZ
from mcedit2.rendering.vertexarraybuffer import QuadVertexArrayBuffer
log = logging.getLogger(__name__)
class EntityMeshBase(ChunkMeshBase):
renderstate = renderstates.RenderstateEntityNode
detailLevels = (0, 1, 2)
def _computeVertices(self, positions, colors, offset=False, chunkPosition=(0, 0)):
cx, cz = chunkPosition
x = cx << 4
z = cz << 4
bounds = self.chunkUpdate.updateTask.worldScene.bounds
if bounds:
positions = [p for p in positions if p in bounds]
vertexBuffer = QuadVertexArrayBuffer(len(positions) * 6, lights=False, textures=False)
vertexBuffer.buffer.shape = (len(positions), 6) + vertexBuffer.buffer.shape[-2:]
if len(positions):
positions = numpy.array(positions, dtype=float)
positions[:, (0, 2)] -= (x, z)
if offset:
positions -= 0.5
vertexBuffer.rgba[:] = colors
vertexBuffer.vertex[:] = positions[:, numpy.newaxis, numpy.newaxis, :]
vertexBuffer.vertex[:] += standardCubeTemplates[_XYZ]
vertexBuffer.buffer.shape = (len(positions) * 6, ) + vertexBuffer.buffer.shape[-2:]
return vertexBuffer
class TileEntityMesh(EntityMeshBase):
layer = Layer.TileEntities
def makeChunkVertices(self, chunk, limitBox):
tilePositions = []
for i, ref in enumerate(chunk.TileEntities):
if i % 10 == 0:
yield
if limitBox and ref.Position not in limitBox:
continue
tilePositions.append(ref.Position)
tiles = self._computeVertices(tilePositions, (0xff, 0xff, 0x33, 0x44), chunkPosition=chunk.chunkPosition)
yield
self.sceneNode = scenegraph.VertexNode(tiles)
class MonsterRenderer(EntityMeshBase):
layer = Layer.Entities # xxx Monsters
notMonsters = {"Item", "XPOrb", "Painting"}
def makeChunkVertices(self, chunk, limitBox):
monsterPositions = []
for i, entityRef in enumerate(chunk.Entities):
if i % 10 == 0:
yield
ID = entityRef.id
if ID in self.notMonsters:
continue
pos = entityRef.Position
if limitBox and pos not in limitBox:
continue
monsterPositions.append(pos)
monsters = self._computeVertices(monsterPositions,
(0xff, 0x22, 0x22, 0x44),
offset=True,
chunkPosition=chunk.chunkPosition)
yield
self.sceneNode = scenegraph.VertexNode(monsters)
class ItemRenderer(EntityMeshBase):
layer = Layer.Items
def makeChunkVertices(self, chunk, limitBox):
entityPositions = []
entityColors = []
colorMap = {
"Item": (0x22, 0xff, 0x22, 0x5f),
"XPOrb": (0x88, 0xff, 0x88, 0x5f),
"Painting": (134, 96, 67, 0x5f),
}
for i, entityRef in enumerate(chunk.Entities):
if i % 10 == 0:
yield
color = colorMap.get(entityRef.id)
if color is None:
continue
pos = entityRef.Position
if limitBox and pos not in limitBox:
continue
entityPositions.append(pos)
entityColors.append(color)
items = self._computeVertices(entityPositions,
numpy.array(entityColors, dtype='uint8')[:, numpy.newaxis, numpy.newaxis],
offset=True, chunkPosition=chunk.chunkPosition)
yield
self.sceneNode = scenegraph.VertexNode(items)
| bsd-3-clause | 7,563,492,981,460,571,000 | 33.140496 | 115 | 0.595013 | false |
CingHu/neutron-ustack | neutron/plugins/mlnx/rpc_callbacks.py | 1 | 4229 | # Copyright 2013 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from oslo.config import cfg
from neutron.common import constants as q_const
from neutron.common import rpc as n_rpc
from neutron.db import api as db_api
from neutron.openstack.common import log as logging
from neutron.plugins.mlnx.db import mlnx_db_v2 as db
LOG = logging.getLogger(__name__)
class MlnxRpcCallbacks(n_rpc.RpcCallback):
# History
# 1.1 Support Security Group RPC
# 1.2 Support get_devices_details_list
RPC_API_VERSION = '1.2'
def get_device_details(self, rpc_context, **kwargs):
"""Agent requests device details."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s details requested from %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port = self.get_port_from_device(device)
if port:
binding = db.get_network_binding(db_api.get_session(),
port['network_id'])
entry = {'device': device,
'physical_network': binding.physical_network,
'network_type': binding.network_type,
'segmentation_id': binding.segmentation_id,
'network_id': port['network_id'],
'port_mac': port['mac_address'],
'port_id': port['id'],
'admin_state_up': port['admin_state_up']}
if cfg.CONF.AGENT.rpc_support_old_agents:
entry['vlan_id'] = binding.segmentation_id
new_status = (q_const.PORT_STATUS_ACTIVE if port['admin_state_up']
else q_const.PORT_STATUS_DOWN)
if port['status'] != new_status:
db.set_port_status(port['id'], new_status)
else:
entry = {'device': device}
LOG.debug(_("%s can not be found in database"), device)
return entry
def get_devices_details_list(self, rpc_context, **kwargs):
return [
self.get_device_details(
rpc_context,
device=device,
**kwargs
)
for device in kwargs.pop('devices', [])
]
def update_device_down(self, rpc_context, **kwargs):
"""Device no longer exists on agent."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s no longer exists on %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port = self.get_port_from_device(device)
if port:
entry = {'device': device,
'exists': True}
if port['status'] != q_const.PORT_STATUS_DOWN:
# Set port status to DOWN
db.set_port_status(port['id'], q_const.PORT_STATUS_DOWN)
else:
entry = {'device': device,
'exists': False}
LOG.debug(_("%s can not be found in database"), device)
return entry
def update_device_up(self, rpc_context, **kwargs):
"""Device is up on agent."""
agent_id = kwargs.get('agent_id')
device = kwargs.get('device')
LOG.debug(_("Device %(device)s up %(agent_id)s"),
{'device': device, 'agent_id': agent_id})
port = self.get_port_from_device(device)
if port:
if port['status'] != q_const.PORT_STATUS_ACTIVE:
# Set port status to ACTIVE
db.set_port_status(port['id'], q_const.PORT_STATUS_ACTIVE)
else:
LOG.debug(_("%s can not be found in database"), device)
| apache-2.0 | 123,379,875,097,202,480 | 40.460784 | 78 | 0.569402 | false |
datapythonista/pandas | pandas/core/internals/construction.py | 1 | 31013 | """
Functions for preparing various inputs passed to the DataFrame or Series
constructors before passing them to a BlockManager.
"""
from __future__ import annotations
from collections import abc
from typing import (
TYPE_CHECKING,
Any,
Hashable,
Sequence,
)
import warnings
import numpy as np
import numpy.ma as ma
from pandas._libs import lib
from pandas._typing import (
ArrayLike,
DtypeObj,
Manager,
)
from pandas.errors import IntCastingNaNError
from pandas.core.dtypes.cast import (
construct_1d_arraylike_from_scalar,
construct_1d_ndarray_preserving_na,
dict_compat,
maybe_cast_to_datetime,
maybe_convert_platform,
maybe_infer_to_datetimelike,
maybe_upcast,
)
from pandas.core.dtypes.common import (
is_1d_only_ea_dtype,
is_datetime64tz_dtype,
is_datetime_or_timedelta_dtype,
is_dtype_equal,
is_extension_array_dtype,
is_integer_dtype,
is_list_like,
is_named_tuple,
is_object_dtype,
)
from pandas.core.dtypes.dtypes import ExtensionDtype
from pandas.core.dtypes.generic import (
ABCDataFrame,
ABCDatetimeIndex,
ABCIndex,
ABCSeries,
ABCTimedeltaIndex,
)
from pandas.core import (
algorithms,
common as com,
)
from pandas.core.arrays import (
Categorical,
DatetimeArray,
ExtensionArray,
TimedeltaArray,
)
from pandas.core.construction import (
ensure_wrapped_if_datetimelike,
extract_array,
range_to_ndarray,
sanitize_array,
)
from pandas.core.indexes import base as ibase
from pandas.core.indexes.api import (
Index,
ensure_index,
get_objs_combined_axis,
union_indexes,
)
from pandas.core.internals.array_manager import (
ArrayManager,
SingleArrayManager,
)
from pandas.core.internals.blocks import (
ensure_block_shape,
new_block,
)
from pandas.core.internals.managers import (
BlockManager,
SingleBlockManager,
create_block_manager_from_arrays,
create_block_manager_from_blocks,
)
if TYPE_CHECKING:
from numpy.ma.mrecords import MaskedRecords
# ---------------------------------------------------------------------
# BlockManager Interface
def arrays_to_mgr(
arrays,
arr_names,
index,
columns,
*,
dtype: DtypeObj | None = None,
verify_integrity: bool = True,
typ: str | None = None,
consolidate: bool = True,
) -> Manager:
"""
Segregate Series based on type and coerce into matrices.
Needs to handle a lot of exceptional cases.
"""
arr_names = ensure_index(arr_names)
if verify_integrity:
# figure out the index, if necessary
if index is None:
index = _extract_index(arrays)
else:
index = ensure_index(index)
# don't force copy because getting jammed in an ndarray anyway
arrays = _homogenize(arrays, index, dtype)
else:
index = ensure_index(index)
columns = ensure_index(columns)
# from BlockManager perspective
axes = [columns, index]
if typ == "block":
return create_block_manager_from_arrays(
arrays, arr_names, axes, consolidate=consolidate
)
elif typ == "array":
if len(columns) != len(arrays):
assert len(arrays) == 0
arrays = [np.array([], dtype=object) for _ in range(len(columns))]
return ArrayManager(arrays, [index, columns])
else:
raise ValueError(f"'typ' needs to be one of {{'block', 'array'}}, got '{typ}'")
def rec_array_to_mgr(
data: MaskedRecords | np.recarray | np.ndarray,
index,
columns,
dtype: DtypeObj | None,
copy: bool,
typ: str,
):
"""
Extract from a masked rec array and create the manager.
"""
# essentially process a record array then fill it
fdata = ma.getdata(data)
if index is None:
index = _get_names_from_index(fdata)
else:
index = ensure_index(index)
if columns is not None:
columns = ensure_index(columns)
arrays, arr_columns = to_arrays(fdata, columns)
# fill if needed
if isinstance(data, np.ma.MaskedArray):
new_arrays = fill_masked_arrays(data, arr_columns)
else:
# error: Incompatible types in assignment (expression has type
# "List[ExtensionArray]", variable has type "List[ndarray]")
new_arrays = arrays # type: ignore[assignment]
# create the manager
# error: Argument 1 to "reorder_arrays" has incompatible type "List[ndarray]";
# expected "List[ExtensionArray]"
arrays, arr_columns = reorder_arrays(
new_arrays, arr_columns, columns # type: ignore[arg-type]
)
if columns is None:
columns = arr_columns
mgr = arrays_to_mgr(arrays, arr_columns, index, columns, dtype=dtype, typ=typ)
if copy:
mgr = mgr.copy()
return mgr
def fill_masked_arrays(data: MaskedRecords, arr_columns: Index) -> list[np.ndarray]:
"""
Convert numpy MaskedRecords to ensure mask is softened.
"""
new_arrays = []
for col in arr_columns:
arr = data[col]
fv = arr.fill_value
mask = ma.getmaskarray(arr)
if mask.any():
arr, fv = maybe_upcast(arr, fill_value=fv, copy=True)
arr[mask] = fv
new_arrays.append(arr)
return new_arrays
def mgr_to_mgr(mgr, typ: str, copy: bool = True):
"""
Convert to specific type of Manager. Does not copy if the type is already
correct. Does not guarantee a copy otherwise. `copy` keyword only controls
whether conversion from Block->ArrayManager copies the 1D arrays.
"""
new_mgr: Manager
if typ == "block":
if isinstance(mgr, BlockManager):
new_mgr = mgr
else:
if mgr.ndim == 2:
new_mgr = arrays_to_mgr(
mgr.arrays, mgr.axes[0], mgr.axes[1], mgr.axes[0], typ="block"
)
else:
new_mgr = SingleBlockManager.from_array(mgr.arrays[0], mgr.index)
elif typ == "array":
if isinstance(mgr, ArrayManager):
new_mgr = mgr
else:
if mgr.ndim == 2:
arrays = [mgr.iget_values(i) for i in range(len(mgr.axes[0]))]
if copy:
arrays = [arr.copy() for arr in arrays]
new_mgr = ArrayManager(arrays, [mgr.axes[1], mgr.axes[0]])
else:
array = mgr.internal_values()
if copy:
array = array.copy()
new_mgr = SingleArrayManager([array], [mgr.index])
else:
raise ValueError(f"'typ' needs to be one of {{'block', 'array'}}, got '{typ}'")
return new_mgr
# ---------------------------------------------------------------------
# DataFrame Constructor Interface
def ndarray_to_mgr(
values, index, columns, dtype: DtypeObj | None, copy: bool, typ: str
) -> Manager:
# used in DataFrame.__init__
# input must be a ndarray, list, Series, Index, ExtensionArray
if isinstance(values, ABCSeries):
if columns is None:
if values.name is not None:
columns = Index([values.name])
if index is None:
index = values.index
else:
values = values.reindex(index)
# zero len case (GH #2234)
if not len(values) and columns is not None and len(columns):
values = np.empty((0, 1), dtype=object)
vdtype = getattr(values, "dtype", None)
if is_1d_only_ea_dtype(vdtype) or isinstance(dtype, ExtensionDtype):
# GH#19157
if isinstance(values, np.ndarray) and values.ndim > 1:
# GH#12513 a EA dtype passed with a 2D array, split into
# multiple EAs that view the values
values = [values[:, n] for n in range(values.shape[1])]
else:
values = [values]
if columns is None:
columns = Index(range(len(values)))
return arrays_to_mgr(values, columns, index, columns, dtype=dtype, typ=typ)
if is_extension_array_dtype(vdtype) and not is_1d_only_ea_dtype(vdtype):
# i.e. Datetime64TZ
values = extract_array(values, extract_numpy=True)
if copy:
values = values.copy()
if values.ndim == 1:
values = values.reshape(-1, 1)
else:
# by definition an array here
# the dtypes will be coerced to a single dtype
values = _prep_ndarray(values, copy=copy)
if dtype is not None and not is_dtype_equal(values.dtype, dtype):
shape = values.shape
flat = values.ravel()
if not is_integer_dtype(dtype):
# TODO: skipping integer_dtype is needed to keep the tests passing,
# not clear it is correct
# Note: we really only need _try_cast, but keeping to exposed funcs
values = sanitize_array(
flat, None, dtype=dtype, copy=copy, raise_cast_failure=True
)
else:
try:
values = construct_1d_ndarray_preserving_na(
flat, dtype=dtype, copy=False
)
except IntCastingNaNError:
# following Series, we ignore the dtype and retain floating
# values instead of casting nans to meaningless ints
pass
values = values.reshape(shape)
# _prep_ndarray ensures that values.ndim == 2 at this point
index, columns = _get_axes(
values.shape[0], values.shape[1], index=index, columns=columns
)
_check_values_indices_shape_match(values, index, columns)
if typ == "array":
if issubclass(values.dtype.type, str):
values = np.array(values, dtype=object)
if dtype is None and is_object_dtype(values.dtype):
arrays = [
ensure_wrapped_if_datetimelike(
maybe_infer_to_datetimelike(values[:, i].copy())
)
for i in range(values.shape[1])
]
else:
if is_datetime_or_timedelta_dtype(values.dtype):
values = ensure_wrapped_if_datetimelike(values)
arrays = [values[:, i].copy() for i in range(values.shape[1])]
return ArrayManager(arrays, [index, columns], verify_integrity=False)
values = values.T
# if we don't have a dtype specified, then try to convert objects
# on the entire block; this is to convert if we have datetimelike's
# embedded in an object type
if dtype is None and is_object_dtype(values.dtype):
if values.ndim == 2 and values.shape[0] != 1:
# transpose and separate blocks
dtlike_vals = [maybe_infer_to_datetimelike(row) for row in values]
dvals_list = [ensure_block_shape(dval, 2) for dval in dtlike_vals]
# TODO: What about re-joining object columns?
block_values = [
new_block(dvals_list[n], placement=n, ndim=2)
for n in range(len(dvals_list))
]
else:
datelike_vals = maybe_infer_to_datetimelike(values)
nb = new_block(datelike_vals, placement=slice(len(columns)), ndim=2)
block_values = [nb]
else:
nb = new_block(values, placement=slice(len(columns)), ndim=2)
block_values = [nb]
if len(columns) == 0:
block_values = []
return create_block_manager_from_blocks(block_values, [columns, index])
def _check_values_indices_shape_match(
values: np.ndarray, index: Index, columns: Index
) -> None:
"""
Check that the shape implied by our axes matches the actual shape of the
data.
"""
if values.shape[1] != len(columns) or values.shape[0] != len(index):
# Could let this raise in Block constructor, but we get a more
# helpful exception message this way.
if values.shape[0] == 0:
raise ValueError("Empty data passed with indices specified.")
passed = values.shape
implied = (len(index), len(columns))
raise ValueError(f"Shape of passed values is {passed}, indices imply {implied}")
def dict_to_mgr(
data: dict,
index,
columns,
*,
dtype: DtypeObj | None = None,
typ: str = "block",
copy: bool = True,
) -> Manager:
"""
Segregate Series based on type and coerce into matrices.
Needs to handle a lot of exceptional cases.
Used in DataFrame.__init__
"""
arrays: Sequence[Any] | Series
if columns is not None:
from pandas.core.series import Series
arrays = Series(data, index=columns, dtype=object)
data_names = arrays.index
missing = arrays.isna()
if index is None:
# GH10856
# raise ValueError if only scalars in dict
index = _extract_index(arrays[~missing])
else:
index = ensure_index(index)
# no obvious "empty" int column
if missing.any() and not is_integer_dtype(dtype):
nan_dtype: DtypeObj
if dtype is None or (
isinstance(dtype, np.dtype) and np.issubdtype(dtype, np.flexible)
):
# GH#1783
nan_dtype = np.dtype("object")
else:
nan_dtype = dtype
val = construct_1d_arraylike_from_scalar(np.nan, len(index), nan_dtype)
arrays.loc[missing] = [val] * missing.sum()
arrays = list(arrays)
else:
keys = list(data.keys())
columns = data_names = Index(keys)
arrays = [com.maybe_iterable_to_list(data[k]) for k in keys]
# GH#24096 need copy to be deep for datetime64tz case
# TODO: See if we can avoid these copies
arrays = [arr if not isinstance(arr, ABCIndex) else arr._data for arr in arrays]
arrays = [
arr if not is_datetime64tz_dtype(arr) else arr.copy() for arr in arrays
]
if copy:
# arrays_to_mgr (via form_blocks) won't make copies for EAs
# dtype attr check to exclude EADtype-castable strs
arrays = [
x
if not hasattr(x, "dtype") or not isinstance(x.dtype, ExtensionDtype)
else x.copy()
for x in arrays
]
# TODO: can we get rid of the dt64tz special case above?
return arrays_to_mgr(
arrays, data_names, index, columns, dtype=dtype, typ=typ, consolidate=copy
)
def nested_data_to_arrays(
data: Sequence,
columns: Index | None,
index: Index | None,
dtype: DtypeObj | None,
):
"""
Convert a single sequence of arrays to multiple arrays.
"""
# By the time we get here we have already checked treat_as_nested(data)
if is_named_tuple(data[0]) and columns is None:
columns = ensure_index(data[0]._fields)
arrays, columns = to_arrays(data, columns, dtype=dtype)
columns = ensure_index(columns)
if index is None:
if isinstance(data[0], ABCSeries):
index = _get_names_from_index(data)
elif isinstance(data[0], Categorical):
# GH#38845 hit in test_constructor_categorical
index = ibase.default_index(len(data[0]))
else:
index = ibase.default_index(len(data))
return arrays, columns, index
def treat_as_nested(data) -> bool:
"""
Check if we should use nested_data_to_arrays.
"""
return (
len(data) > 0
and is_list_like(data[0])
and getattr(data[0], "ndim", 1) == 1
and not (isinstance(data, ExtensionArray) and data.ndim == 2)
)
# ---------------------------------------------------------------------
def _prep_ndarray(values, copy: bool = True) -> np.ndarray:
if isinstance(values, TimedeltaArray) or (
isinstance(values, DatetimeArray) and values.tz is None
):
# On older numpy, np.asarray below apparently does not call __array__,
# so nanoseconds get dropped.
values = values._ndarray
if not isinstance(values, (np.ndarray, ABCSeries, Index)):
if len(values) == 0:
return np.empty((0, 0), dtype=object)
elif isinstance(values, range):
arr = range_to_ndarray(values)
return arr[..., np.newaxis]
def convert(v):
if not is_list_like(v) or isinstance(v, ABCDataFrame):
return v
elif not hasattr(v, "dtype") and not isinstance(v, (list, tuple, range)):
# TODO: should we cast these to list?
return v
v = extract_array(v, extract_numpy=True)
res = maybe_convert_platform(v)
return res
# we could have a 1-dim or 2-dim list here
# this is equiv of np.asarray, but does object conversion
# and platform dtype preservation
if is_list_like(values[0]):
values = np.array([convert(v) for v in values])
elif isinstance(values[0], np.ndarray) and values[0].ndim == 0:
# GH#21861
values = np.array([convert(v) for v in values])
else:
values = convert(values)
else:
# drop subclass info
values = np.array(values, copy=copy)
if values.ndim == 1:
values = values.reshape((values.shape[0], 1))
elif values.ndim != 2:
raise ValueError(f"Must pass 2-d input. shape={values.shape}")
return values
def _homogenize(data, index: Index, dtype: DtypeObj | None):
oindex = None
homogenized = []
for val in data:
if isinstance(val, ABCSeries):
if dtype is not None:
val = val.astype(dtype)
if val.index is not index:
# Forces alignment. No need to copy data since we
# are putting it into an ndarray later
val = val.reindex(index, copy=False)
# TODO extract_array should be preferred, but that gives failures for
# `extension/test_numpy.py` (extract_array will convert numpy arrays
# to PandasArray), see https://github.com/pandas-dev/pandas/issues/40021
# val = extract_array(val, extract_numpy=True)
val = val._values
else:
if isinstance(val, dict):
if oindex is None:
oindex = index.astype("O")
if isinstance(index, (ABCDatetimeIndex, ABCTimedeltaIndex)):
val = dict_compat(val)
else:
val = dict(val)
val = lib.fast_multiget(val, oindex._values, default=np.nan)
val = sanitize_array(
val, index, dtype=dtype, copy=False, raise_cast_failure=False
)
homogenized.append(val)
return homogenized
def _extract_index(data) -> Index:
"""
Try to infer an Index from the passed data, raise ValueError on failure.
"""
index = None
if len(data) == 0:
index = Index([])
elif len(data) > 0:
raw_lengths = []
indexes: list[list[Hashable] | Index] = []
have_raw_arrays = False
have_series = False
have_dicts = False
for val in data:
if isinstance(val, ABCSeries):
have_series = True
indexes.append(val.index)
elif isinstance(val, dict):
have_dicts = True
indexes.append(list(val.keys()))
elif is_list_like(val) and getattr(val, "ndim", 1) == 1:
have_raw_arrays = True
raw_lengths.append(len(val))
if not indexes and not raw_lengths:
raise ValueError("If using all scalar values, you must pass an index")
if have_series:
index = union_indexes(indexes)
elif have_dicts:
index = union_indexes(indexes, sort=False)
if have_raw_arrays:
lengths = list(set(raw_lengths))
if len(lengths) > 1:
raise ValueError("All arrays must be of the same length")
if have_dicts:
raise ValueError(
"Mixing dicts with non-Series may lead to ambiguous ordering."
)
if have_series:
assert index is not None # for mypy
if lengths[0] != len(index):
msg = (
f"array length {lengths[0]} does not match index "
f"length {len(index)}"
)
raise ValueError(msg)
else:
index = ibase.default_index(lengths[0])
# error: Argument 1 to "ensure_index" has incompatible type "Optional[Index]";
# expected "Union[Union[Union[ExtensionArray, ndarray], Index, Series],
# Sequence[Any]]"
return ensure_index(index) # type: ignore[arg-type]
def reorder_arrays(
arrays: list[ArrayLike], arr_columns: Index, columns: Index | None
) -> tuple[list[ArrayLike], Index]:
# reorder according to the columns
if columns is not None and len(columns) and len(arr_columns):
indexer = ensure_index(arr_columns).get_indexer(columns)
arr_columns = ensure_index([arr_columns[i] for i in indexer])
arrays = [arrays[i] for i in indexer]
return arrays, arr_columns
def _get_names_from_index(data) -> Index:
has_some_name = any(getattr(s, "name", None) is not None for s in data)
if not has_some_name:
return ibase.default_index(len(data))
index: list[Hashable] = list(range(len(data)))
count = 0
for i, s in enumerate(data):
n = getattr(s, "name", None)
if n is not None:
index[i] = n
else:
index[i] = f"Unnamed {count}"
count += 1
return Index(index)
def _get_axes(
N: int, K: int, index: Index | None, columns: Index | None
) -> tuple[Index, Index]:
# helper to create the axes as indexes
# return axes or defaults
if index is None:
index = ibase.default_index(N)
else:
index = ensure_index(index)
if columns is None:
columns = ibase.default_index(K)
else:
columns = ensure_index(columns)
return index, columns
def dataclasses_to_dicts(data):
"""
Converts a list of dataclass instances to a list of dictionaries.
Parameters
----------
data : List[Type[dataclass]]
Returns
--------
list_dict : List[dict]
Examples
--------
>>> @dataclass
>>> class Point:
... x: int
... y: int
>>> dataclasses_to_dicts([Point(1,2), Point(2,3)])
[{"x":1,"y":2},{"x":2,"y":3}]
"""
from dataclasses import asdict
return list(map(asdict, data))
# ---------------------------------------------------------------------
# Conversion of Inputs to Arrays
def to_arrays(
data, columns: Index | None, dtype: DtypeObj | None = None
) -> tuple[list[ArrayLike], Index]:
"""
Return list of arrays, columns.
"""
if isinstance(data, ABCDataFrame):
if columns is not None:
arrays = [
data._ixs(i, axis=1).values
for i, col in enumerate(data.columns)
if col in columns
]
else:
columns = data.columns
arrays = [data._ixs(i, axis=1).values for i in range(len(columns))]
return arrays, columns
if not len(data):
if isinstance(data, np.ndarray):
if data.dtype.names is not None:
# i.e. numpy structured array
columns = ensure_index(data.dtype.names)
arrays = [data[name] for name in columns]
return arrays, columns
return [], ensure_index([])
elif isinstance(data[0], Categorical):
# GH#38845 deprecate special case
warnings.warn(
"The behavior of DataFrame([categorical, ...]) is deprecated and "
"in a future version will be changed to match the behavior of "
"DataFrame([any_listlike, ...]). "
"To retain the old behavior, pass as a dictionary "
"DataFrame({col: categorical, ..})",
FutureWarning,
stacklevel=4,
)
if columns is None:
columns = ibase.default_index(len(data))
return data, columns
elif isinstance(data, np.ndarray) and data.dtype.names is not None:
# e.g. recarray
columns = Index(list(data.dtype.names))
arrays = [data[k] for k in columns]
return arrays, columns
if isinstance(data[0], (list, tuple)):
arr = _list_to_arrays(data)
elif isinstance(data[0], abc.Mapping):
arr, columns = _list_of_dict_to_arrays(data, columns)
elif isinstance(data[0], ABCSeries):
arr, columns = _list_of_series_to_arrays(data, columns)
else:
# last ditch effort
data = [tuple(x) for x in data]
arr = _list_to_arrays(data)
content, columns = _finalize_columns_and_data(arr, columns, dtype)
return content, columns
def _list_to_arrays(data: list[tuple | list]) -> np.ndarray:
# Returned np.ndarray has ndim = 2
# Note: we already check len(data) > 0 before getting hre
if isinstance(data[0], tuple):
content = lib.to_object_array_tuples(data)
else:
# list of lists
content = lib.to_object_array(data)
return content
def _list_of_series_to_arrays(
data: list,
columns: Index | None,
) -> tuple[np.ndarray, Index]:
# returned np.ndarray has ndim == 2
if columns is None:
# We know pass_data is non-empty because data[0] is a Series
pass_data = [x for x in data if isinstance(x, (ABCSeries, ABCDataFrame))]
columns = get_objs_combined_axis(pass_data, sort=False)
indexer_cache: dict[int, np.ndarray] = {}
aligned_values = []
for s in data:
index = getattr(s, "index", None)
if index is None:
index = ibase.default_index(len(s))
if id(index) in indexer_cache:
indexer = indexer_cache[id(index)]
else:
indexer = indexer_cache[id(index)] = index.get_indexer(columns)
values = extract_array(s, extract_numpy=True)
aligned_values.append(algorithms.take_nd(values, indexer))
# error: Argument 1 to "vstack" has incompatible type "List[ExtensionArray]";
# expected "Sequence[Union[Union[int, float, complex, str, bytes, generic],
# Sequence[Union[int, float, complex, str, bytes, generic]],
# Sequence[Sequence[Any]], _SupportsArray]]"
content = np.vstack(aligned_values) # type: ignore[arg-type]
return content, columns
def _list_of_dict_to_arrays(
data: list[dict],
columns: Index | None,
) -> tuple[np.ndarray, Index]:
"""
Convert list of dicts to numpy arrays
if `columns` is not passed, column names are inferred from the records
- for OrderedDict and dicts, the column names match
the key insertion-order from the first record to the last.
- For other kinds of dict-likes, the keys are lexically sorted.
Parameters
----------
data : iterable
collection of records (OrderedDict, dict)
columns: iterables or None
Returns
-------
content : np.ndarray[object, ndim=2]
columns : Index
"""
if columns is None:
gen = (list(x.keys()) for x in data)
sort = not any(isinstance(d, dict) for d in data)
pre_cols = lib.fast_unique_multiple_list_gen(gen, sort=sort)
columns = ensure_index(pre_cols)
# assure that they are of the base dict class and not of derived
# classes
data = [(type(d) is dict) and d or dict(d) for d in data]
content = lib.dicts_to_array(data, list(columns))
return content, columns
def _finalize_columns_and_data(
content: np.ndarray, # ndim == 2
columns: Index | None,
dtype: DtypeObj | None,
) -> tuple[list[ArrayLike], Index]:
"""
Ensure we have valid columns, cast object dtypes if possible.
"""
contents = list(content.T)
try:
columns = _validate_or_indexify_columns(contents, columns)
except AssertionError as err:
# GH#26429 do not raise user-facing AssertionError
raise ValueError(err) from err
if len(contents) and contents[0].dtype == np.object_:
contents = _convert_object_array(contents, dtype=dtype)
return contents, columns
def _validate_or_indexify_columns(
content: list[np.ndarray], columns: Index | None
) -> Index:
"""
If columns is None, make numbers as column names; Otherwise, validate that
columns have valid length.
Parameters
----------
content : list of np.ndarrays
columns : Index or None
Returns
-------
Index
If columns is None, assign positional column index value as columns.
Raises
------
1. AssertionError when content is not composed of list of lists, and if
length of columns is not equal to length of content.
2. ValueError when content is list of lists, but length of each sub-list
is not equal
3. ValueError when content is list of lists, but length of sub-list is
not equal to length of content
"""
if columns is None:
columns = ibase.default_index(len(content))
else:
# Add mask for data which is composed of list of lists
is_mi_list = isinstance(columns, list) and all(
isinstance(col, list) for col in columns
)
if not is_mi_list and len(columns) != len(content): # pragma: no cover
# caller's responsibility to check for this...
raise AssertionError(
f"{len(columns)} columns passed, passed data had "
f"{len(content)} columns"
)
elif is_mi_list:
# check if nested list column, length of each sub-list should be equal
if len({len(col) for col in columns}) > 1:
raise ValueError(
"Length of columns passed for MultiIndex columns is different"
)
# if columns is not empty and length of sublist is not equal to content
elif columns and len(columns[0]) != len(content):
raise ValueError(
f"{len(columns[0])} columns passed, passed data had "
f"{len(content)} columns"
)
return columns
def _convert_object_array(
content: list[np.ndarray], dtype: DtypeObj | None
) -> list[ArrayLike]:
"""
Internal function to convert object array.
Parameters
----------
content: List[np.ndarray]
dtype: np.dtype or ExtensionDtype
Returns
-------
List[ArrayLike]
"""
# provide soft conversion of object dtypes
def convert(arr):
if dtype != np.dtype("O"):
arr = lib.maybe_convert_objects(arr)
arr = maybe_cast_to_datetime(arr, dtype)
return arr
arrays = [convert(arr) for arr in content]
return arrays
| bsd-3-clause | -271,554,817,022,136,300 | 30.013 | 88 | 0.585077 | false |
christophercrouzet/hienoi | hienoi/gui.py | 1 | 20220 | """Graphical user interface."""
import collections
import ctypes
import sdl2
import hienoi.renderer
from hienoi._common import GLProfile, GraphicsAPI, ParticleDisplay, UserData
from hienoi._vectors import Vector2i, Vector2f, Vector4f
class NavigationAction(object):
"""Enumerator for the current nagivation action.
Attributes
----------
NONE
MOVE
ZOOM
"""
NONE = 0
MOVE = 1
ZOOM = 2
_Handles = collections.namedtuple(
'_Handles', (
'window',
'renderer',
))
_GLHandles = collections.namedtuple(
'_GLHandles', (
'context',
))
_RGBMasks = collections.namedtuple(
'_RGBMasks', (
'red',
'green',
'blue',
))
_FIT_VIEW_REL_PADDING = 2.0
if sdl2.SDL_BYTEORDER == sdl2.SDL_LIL_ENDIAN:
_RGB_MASKS = _RGBMasks(red=0x000000FF, green=0x0000FF00, blue=0x00FF0000)
else:
_RGB_MASKS = _RGBMasks(red=0x00FF0000, green=0x0000FF00, blue=0x000000FF)
class GUI(object):
"""GUI.
Parameters
----------
window_title : str
Title for the window.
window_position : hienoi.Vector2i
Initial window position.
window_size : hienoi.Vector2i
Initial window size.
window_flags : int
SDL2 window flags.
view_aperture_x : float
Initial length in world units to be shown on the X axis.
view_zoom_range : hienoi.Vector2f
Zoom value range for the view.
mouse_wheel_step : float
Coefficient value for each mouse wheel step.
grid_density : float
See :attr:`GUI.grid_density`.
grid_adaptive_threshold : float
See :attr:`GUI.grid_adaptive_threshold`.
show_grid : bool
See :attr:`GUI.show_grid`.
background_color : hienoi.Vector4f
See :attr:`GUI.background_color`.
grid_color : hienoi.Vector4f
See :attr:`GUI.grid_color`.
grid_origin_color : hienoi.Vector4f
See :attr:`GUI.grid_origin_color`.
particle_display : int
See :attr:`GUI.particle_display`.
point_size : int
See :attr:`GUI.point_size`.
edge_feather : float
See :attr:`GUI.edge_feather`.
stroke_width : float
See :attr:`GUI.stroke_width`.
initialize_callback : function
Callback function to initialize any GUI state.
It takes a single argument ``gui``, an instance of this class.
on_event_callback : function
Callback function ran during the event polling.
It takes 3 arguments: ``gui``, an instance of this class,
``data``, some data to pass back and forth between the caller and this
callback function, and ``event``, the event fired.
renderer : dict
Keyword arguments for the configuration of the renderer. See the
parameters for the class :class:`hienoi.renderer.Renderer`.
Attributes
----------
view_position : hienoi.Vector2f
Position of the view (camera).
view_zoom : float
Current zoom value for the view.
grid_density : float
Density of the grid.
A density of 10.0 means that there are around 10 grid divisions
displayed on the X axis. A grid division unit represents a fixed length
in world units, meaning that the actual grid density changes depending
on the view's zoom.
show_grid : bool
True to show the grid.
background_color : hienoi.Vector4f
Color for the background.
grid_color : hienoi.Vector4f
Color for the grid.
grid_origin_color : hienoi.Vector4f
Color for the origin axis of the grid.
particle_display : int
Display mode for the particles. Available values are enumerated in the
:class:`~hienoi.ParticleDisplay` class.
point_size : int
Size of the particles in pixels when the display mode is set to
:attr:`~hienoi.ParticleDisplay.POINT`.
edge_feather : float
Feather fall-off in pixels to apply to objects drawn with displays such
as :attr:`~hienoi.ParticleDisplay.CIRCLE` or
:attr:`~hienoi.ParticleDisplay.DISC`.
stroke_width : float
Width of the stroke in pixels to apply to objects drawn with displays
such as :attr:`~hienoi.ParticleDisplay.CIRCLE`.
quit : bool
``True`` to signal to the application that it should quit.
has_view_changed : bool
``True`` if the view state has just been changed following an event. It
is reset to ``False`` whenever :meth:`poll_events` is called.
user_data : object
Attribute reserved for any user data.
"""
def __init__(self,
window_title='hienoi',
window_position=Vector2i(sdl2.SDL_WINDOWPOS_CENTERED,
sdl2.SDL_WINDOWPOS_CENTERED),
window_size=Vector2i(800, 600),
window_flags=sdl2.SDL_WINDOW_RESIZABLE,
view_aperture_x=100.0,
view_zoom_range=Vector2f(1e-6, 1e+6),
mouse_wheel_step=0.01,
grid_density=10.0,
grid_adaptive_threshold=3.0,
show_grid=True,
background_color=Vector4f(0.15, 0.15, 0.15, 1.0),
grid_color=Vector4f(0.85, 0.85, 0.85, 0.05),
grid_origin_color=Vector4f(0.85, 0.25, 0.25, 0.25),
particle_display=ParticleDisplay.DISC,
point_size=4,
edge_feather=2.0,
stroke_width=0.0,
initialize_callback=None,
on_event_callback=None,
renderer=None):
renderer = {} if renderer is None else renderer
if sdl2.SDL_Init(sdl2.SDL_INIT_VIDEO) != 0:
raise RuntimeError(sdl2.SDL_GetError().decode())
renderer_info = hienoi.renderer.get_info()
if renderer_info.api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_MAJOR_VERSION,
renderer_info.major_version)
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_MINOR_VERSION,
renderer_info.minor_version)
if renderer_info.profile == GLProfile.CORE:
sdl2.SDL_GL_SetAttribute(sdl2.SDL_GL_CONTEXT_PROFILE_MASK,
sdl2.SDL_GL_CONTEXT_PROFILE_CORE)
self._handles = _create_handles(window_title, window_position,
window_size, window_flags,
renderer_info)
self._renderer = hienoi.renderer.Renderer(**renderer)
self._initial_view_aperture_x = view_aperture_x
self._view_zoom_range = view_zoom_range
self._mouse_wheel_step = mouse_wheel_step
self._grid_adaptive_threshold = grid_adaptive_threshold
self._on_event_callback = on_event_callback
self._listen_for_navigation = False
self._is_view_manipulated = False
self.view_position = Vector2f(0.0, 0.0)
self._view_zoom = 1.0
self.grid_density = grid_density
self.show_grid = show_grid
self.background_color = background_color
self.grid_color = grid_color
self.grid_origin_color = grid_origin_color
self.particle_display = particle_display
self.point_size = point_size
self.edge_feather = edge_feather
self.stroke_width = stroke_width
self._navigation_action = NavigationAction.NONE
self.quit = False
self.user_data = UserData()
if initialize_callback:
initialize_callback(self)
@property
def view_zoom(self):
return self._view_zoom
@view_zoom.setter
def view_zoom(self, value):
self._view_zoom = max(self._view_zoom_range[0],
min(self._view_zoom_range[1], value))
@property
def navigation_action(self):
return self._navigation_action
@property
def has_view_changed(self):
return self._has_view_changed
def poll_events(self, scene_state, data=None):
"""Process each event in the queue.
Parameters
----------
scene_state : hienoi.renderer.SceneState
Scene state.
data : object
Data to pass back and forth between the caller and the function set
for the 'on event' callback.
"""
self._has_view_changed = False
event = sdl2.SDL_Event()
while sdl2.SDL_PollEvent(ctypes.byref(event)) != 0:
event_type = event.type
if event_type == sdl2.SDL_QUIT:
self._on_quit_event(event.quit)
elif event_type == sdl2.SDL_WINDOWEVENT:
self._on_window_event(event.window)
elif event_type == sdl2.SDL_KEYDOWN:
self._on_key_down_event(event.key, scene_state)
elif event_type == sdl2.SDL_KEYUP:
self._on_key_up_event(event.key)
elif event_type == sdl2.SDL_MOUSEBUTTONDOWN:
self._on_mouse_button_down_event(event.button)
elif event_type == sdl2.SDL_MOUSEBUTTONUP:
self._on_mouse_button_up_event(event.button)
elif event_type == sdl2.SDL_MOUSEWHEEL:
self._on_mouse_wheel_event(event.wheel)
elif event_type == sdl2.SDL_MOUSEMOTION:
self._on_mouse_motion_event(event.motion)
if self._on_event_callback:
self._on_event_callback(self, data, event)
if self.quit:
break
def render(self, scene_state):
"""Render a new frame.
Parameters
----------
scene_state : hienoi.renderer.SceneState
Scene state.
"""
renderer_state = hienoi.renderer.State(
window_size=self.get_window_size(),
view_position=self.view_position,
view_zoom=self._view_zoom,
origin=self.world_to_screen(Vector2f(0.0, 0.0)),
initial_view_aperture_x=self._initial_view_aperture_x,
view_aperture=self.get_view_aperture(),
grid_density=self.grid_density,
grid_adaptive_threshold=self._grid_adaptive_threshold,
background_color=self.background_color,
grid_color=self.grid_color,
grid_origin_color=self.grid_origin_color,
show_grid=self.show_grid,
particle_display=self.particle_display,
point_size=self.point_size,
edge_feather=self.edge_feather,
stroke_width=self.stroke_width,
)
self._renderer.render(renderer_state, scene_state)
if hienoi.renderer.get_info().api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_SwapWindow(self._handles.window)
def terminate(self):
"""Cleanup the GUI resources."""
self._renderer.cleanup()
if hienoi.renderer.get_info().api == GraphicsAPI.OPENGL:
sdl2.SDL_GL_DeleteContext(self._handles.renderer.context)
sdl2.SDL_DestroyWindow(self._handles.window)
sdl2.SDL_Quit()
def get_window_size(self):
"""Retrieve the window size.
Returns
-------
hienoi.Vector2i
The window size.
"""
window_size_x = ctypes.c_int()
window_size_y = ctypes.c_int()
sdl2.SDL_GetWindowSize(self._handles.window,
ctypes.byref(window_size_x),
ctypes.byref(window_size_y))
return Vector2i(window_size_x.value, window_size_y.value)
def get_view_aperture(self):
"""Retrieve the view aperture.
It represents the area in world units covered by the view.
Returns
-------
hienoi.Vector2f
The view aperture.
"""
window_size = self.get_window_size()
aperture_x = self._initial_view_aperture_x / self._view_zoom
return Vector2f(aperture_x, aperture_x * window_size.y / window_size.x)
def get_mouse_position(self):
"""Retrieve the mouse position in screen space.
Returns
-------
hienoi.Vector2i
The mouse position.
"""
position_x = ctypes.c_int()
position_y = ctypes.c_int()
sdl2.SDL_GetMouseState(ctypes.byref(position_x),
ctypes.byref(position_y))
return Vector2i(position_x.value, position_y.value)
def get_screen_to_world_ratio(self):
"""Retrieve the ratio to convert a sreen unit into a world unit.
Returns
-------
float
The screen to world ratio.
"""
window_size = self.get_window_size()
aperture_x = self._initial_view_aperture_x / self._view_zoom
return aperture_x / window_size.x
def screen_to_world(self, point):
"""Convert a point from screen space to world space coordinates.
Parameters
----------
point : hienoi.Vector2i
Point in screen space coordinates.
Returns
-------
hienoi.Vector2f
The point in world space coordinates.
"""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
return Vector2f(
(self.view_position.x
+ (point.x - window_size.x / 2.0)
* view_aperture.x / window_size.x),
(self.view_position.y
- (point.y - window_size.y / 2.0)
* view_aperture.y / window_size.y))
def world_to_screen(self, point):
"""Convert a point from world space to screen space coordinates.
Parameters
----------
point : hienoi.Vector2f
Point in world space coordinates.
Returns
-------
hienoi.Vector2i
The point in screen space coordinates.
"""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
return Vector2i(
int(round(
(window_size.x / view_aperture.x)
* (-self.view_position.x + point.x + view_aperture.x / 2.0))),
int(round(
(window_size.y / view_aperture.y)
* (self.view_position.y - point.y + view_aperture.y / 2.0))))
def write_snapshot(self, filename):
"""Take a snapshot of the view and write it as a BMP image.
Parameters
----------
filename : str
Destination filename.
"""
pixel_size = 4
pixels = self._renderer.read_pixels()
surface = sdl2.SDL_CreateRGBSurfaceFrom(
pixels.data, pixels.width, pixels.height,
8 * pixel_size, pixels.width * pixel_size,
_RGB_MASKS.red, _RGB_MASKS.green, _RGB_MASKS.blue, 0)
sdl2.SDL_SaveBMP(surface, filename)
sdl2.SDL_FreeSurface(surface)
def _reset_view(self):
"""Reset the view position and zoom."""
self.view_position = Vector2f(0.0, 0.0)
self.view_zoom = 1.0
self._has_view_changed = True
def _fit_view(self, scene_state):
"""Fit the view to the scene."""
if len(scene_state.particles) > 1:
window_size = self.get_window_size()
initial_size = Vector2f(
self._initial_view_aperture_x,
self._initial_view_aperture_x * window_size.y / window_size.x)
lower_bounds = scene_state.lower_bounds
upper_bounds = scene_state.upper_bounds
required_size = (upper_bounds - lower_bounds).iscale(
_FIT_VIEW_REL_PADDING)
required_size = Vector2f(
max(required_size.x,
initial_size.x * self._view_zoom_range[0]),
max(required_size.y,
initial_size.y * self._view_zoom_range[0]))
self.view_position = (lower_bounds + upper_bounds).iscale(0.5)
self.view_zoom = min(initial_size.x / required_size.x,
initial_size.y / required_size.y)
elif len(scene_state.particles) == 1:
self.view_position = Vector2f(
*scene_state.particles['position'][0])
self.view_zoom = 1.0
else:
self._reset_view()
self._has_view_changed = True
def _on_quit_event(self, event):
"""Event 'on quit'."""
self.quit = True
def _on_window_event(self, event):
"""Event 'on window'."""
if event.event == sdl2.SDL_WINDOWEVENT_SIZE_CHANGED:
self._renderer.resize(event.data1, event.data2)
def _on_key_down_event(self, event, scene_state):
"""Event 'on key down'."""
code = event.keysym.sym
modifier = event.keysym.mod
if modifier == sdl2.KMOD_NONE:
if code == sdl2.SDLK_SPACE:
self._listen_for_navigation = True
elif code == sdl2.SDLK_d:
self.particle_display = (
(self.particle_display + 1) % (ParticleDisplay._LAST + 1))
elif code == sdl2.SDLK_f:
self._fit_view(scene_state)
elif code == sdl2.SDLK_g:
self.show_grid = not self.show_grid
elif code == sdl2.SDLK_r:
self._reset_view()
def _on_key_up_event(self, event):
"""Event 'on key up'."""
code = event.keysym.sym
if code == sdl2.SDLK_SPACE:
self._listen_for_navigation = False
def _on_mouse_button_down_event(self, event):
"""Event 'on mouse button down'."""
if self._listen_for_navigation:
if event.button == sdl2.SDL_BUTTON_LEFT:
self._navigation_action = NavigationAction.MOVE
elif event.button == sdl2.SDL_BUTTON_RIGHT:
self._navigation_action = NavigationAction.ZOOM
def _on_mouse_button_up_event(self, event):
"""Event 'on mouse button up'."""
if (event.button == sdl2.SDL_BUTTON_LEFT
or event.button == sdl2.SDL_BUTTON_RIGHT):
self._navigation_action = NavigationAction.NONE
def _on_mouse_wheel_event(self, event):
"""Event 'on mouse wheel'."""
scale = 1.0 + self._mouse_wheel_step * event.y
self.view_zoom *= scale
self._has_view_changed = True
def _on_mouse_motion_event(self, event):
"""Event 'on mouse motion'."""
window_size = self.get_window_size()
view_aperture = self.get_view_aperture()
if self._navigation_action == NavigationAction.MOVE:
self.view_position.set(
(self.view_position.x
- event.xrel * view_aperture.x / window_size.x),
(self.view_position.y
+ event.yrel * view_aperture.y / window_size.y))
self._has_view_changed = True
elif self._navigation_action == NavigationAction.ZOOM:
scale = (1.0
+ float(event.xrel) / window_size.x
- float(event.yrel) / window_size.y)
self.view_zoom *= scale
self._has_view_changed = True
def _create_handles(window_title, window_position, window_size, window_flags,
renderer_info):
"""Create the SDL2 handles."""
window_flags = sdl2.SDL_WINDOW_SHOWN | window_flags
if renderer_info.api == GraphicsAPI.OPENGL:
window_flags |= sdl2.SDL_WINDOW_OPENGL
window = sdl2.SDL_CreateWindow(
window_title.encode(),
window_position.x, window_position.y,
window_size.x, window_size.y,
window_flags)
if not window:
raise RuntimeError(sdl2.SDL_GetError().decode())
context = sdl2.SDL_GL_CreateContext(window)
if not context:
raise RuntimeError(sdl2.SDL_GetError().decode())
# Try to disable the vertical synchronization. It applies to the active
# context and thus needs to be called after `SDL_GL_CreateContext`.
sdl2.SDL_GL_SetSwapInterval(0)
return _Handles(
window=window,
renderer=_GLHandles(context=context))
| mit | 9,104,025,055,639,628,000 | 34.851064 | 79 | 0.574233 | false |
arthurmensch/modl | modl/input_data/tests/test_image.py | 1 | 1754 | import numpy as np
from modl.input_data.image import scale_patches
from modl.input_data.image_fast import clean_mask, fill
from numpy.testing import assert_array_almost_equal, assert_array_equal
from sklearn.feature_extraction.image import extract_patches
from sklearn.utils import check_random_state
def test_scale_patches():
patch_size = (8, 8, 3)
n = 100
shape = (n, ) + patch_size
rs = check_random_state(0)
X = rs.randn(*shape)
Y = scale_patches(X, with_mean=True, with_std=True, channel_wise=True)
assert_array_almost_equal(Y.mean(axis=(1, 2)), 0)
assert_array_almost_equal(np.sum(Y ** 2, axis=(1, 2)), 1 / 3)
scale_patches(X, with_mean=True, with_std=True, channel_wise=True,
copy=False)
assert_array_equal(X, Y)
X = rs.randn(*shape)
Y = scale_patches(X, with_mean=False, with_std=True, channel_wise=True)
assert_array_almost_equal(np.sum(Y ** 2, axis=(1, 2)), 1 / 3)
Y = scale_patches(X, with_mean=True, with_std=False, channel_wise=True)
assert_array_almost_equal(Y.mean(axis=(1, 2)), 0)
Y = scale_patches(X, with_mean=True, with_std=True, channel_wise=False)
assert_array_almost_equal(Y.mean(axis=(1, 2, 3)), 0)
assert_array_almost_equal(np.sum(Y ** 2, axis=(1, 2, 3)), 1)
def test_clean():
A = np.ones((64, 64, 3))
A[:2, :, :] = -1
A[-2:, :, :] = -1
A[:, :2, :] = -1
A[:, -2:, :] = -1
patches = extract_patches(A, (8, 8, 3))
idx = clean_mask(patches, A)
mask = np.zeros((64, 64, 3))
mask[2:55, 2:55, 0] = 1
true_idx = np.c_[np.where(mask)]
assert_array_almost_equal(idx, true_idx)
def test_fill():
p, q, r = 10, 10, 10
assert_array_equal(np.c_[np.where(np.ones((p, q, r)))], fill(p, q, r))
| bsd-2-clause | -5,648,527,415,653,365,000 | 32.730769 | 75 | 0.611745 | false |
gerasim13/flask-mongoengine-1 | tests/test_basic_app.py | 1 | 2414 | import datetime
import flask
from flask.ext.mongoengine import MongoEngine
from tests import FlaskMongoEngineTestCase
class BasicAppTestCase(FlaskMongoEngineTestCase):
def setUp(self):
super(BasicAppTestCase, self).setUp()
db = MongoEngine()
class Todo(db.Document):
title = db.StringField(max_length=60)
text = db.StringField()
done = db.BooleanField(default=False)
pub_date = db.DateTimeField(default=datetime.datetime.now)
db.init_app(self.app)
Todo.drop_collection()
self.Todo = Todo
@self.app.route('/')
def index():
return '\n'.join(x.title for x in self.Todo.objects)
@self.app.route('/add', methods=['POST'])
def add():
form = flask.request.form
todo = self.Todo(title=form['title'],
text=form['text'])
todo.save()
return 'added'
@self.app.route('/show/<id>/')
def show(id):
todo = self.Todo.objects.get_or_404(id=id)
return '\n'.join([todo.title, todo.text])
self.db = db
def test_connection_default(self):
self.app.config['MONGODB_SETTINGS'] = {}
self.app.config['TESTING'] = True
db = MongoEngine()
db.init_app(self.app)
self.app.config['TESTING'] = True
db = MongoEngine()
db.init_app(self.app)
def test_with_id(self):
c = self.app.test_client()
resp = c.get('/show/38783728378090/')
self.assertEqual(resp.status_code, 404)
c.post('/add', data={'title': 'First Item', 'text': 'The text'})
resp = c.get('/show/%s/' % self.Todo.objects.first_or_404().id)
self.assertEqual(resp.status_code, 200)
self.assertEquals(resp.data.decode('utf-8'), 'First Item\nThe text')
def test_basic_insert(self):
c = self.app.test_client()
c.post('/add', data={'title': 'First Item', 'text': 'The text'})
c.post('/add', data={'title': '2nd Item', 'text': 'The text'})
rv = c.get('/')
self.assertEquals(rv.data.decode('utf-8'), 'First Item\n2nd Item')
def test_request_context(self):
with self.app.test_request_context():
todo = self.Todo(title='Test', text='test')
todo.save()
self.assertEqual(self.Todo.objects.count(), 1)
| bsd-3-clause | 9,086,295,325,273,280,000 | 30.350649 | 76 | 0.564623 | false |
charre2017idv/MathDinamita | Math_Lib/Math_Lib/Math_Lib.py | 1 | 28504 | import math
import os
n=0
print ("MATHLIB (TM)2017")
print ("-"*50)
print ("Las funciones que puede realizar la libreria son las siguientes:")
print ("")
'''En esta parte se crea el menú principal'''
print ("FUNCIONES BASICAS:")
print ("")
print ("-(1) Suma ")
print ("-(2) Multiplicacion ")
print ("-(3) Division")
print ("-(4) Modulo")
print ("-(5) Potencia")
print ("-(6) Raiz")
print ("-(7) Verificacion de numeros primos")
print ("-(8) Rango de numeros primos")
print ("")
print ("FUNCIONES DE CONVERSION:")
print ("")
print ("-(9) Binario -> Hexadecimal")
print ("-(10) Binario -> Decimal")
print ("-(11) Decimal -> Hexadecimal")
print ("-(12) Decimal -> Binario")
print ("-(13) Hexadecimal -> Binario")
print ("-(14) Hexadecimal -> Decimal")
print ("-(15) Metros -> Yardas")
print ("-(16) Yardas -> Metros")
print ("-(17) Metros -> Pulgadas")
print ("-(18) Pulgadas -> Metros")
print ("")
print ("FUNCIONES ADICIONALES: ")
print ("")
print ("-(19) Indice de Masa Corporal [IMC]")
print ("")
print ("-"*50)
'''Aqui empezamos a establecer los parametros de lo que sucedera dependiendo
del numero que introduzca el usuario'''
while (n<1 or n>19):
n=int(input("Escriba el numero de la funcion a realizar: "))
if (n<1 or n>19):
print ("Ese numero es invalido. Por favor, ingrese una opcion permitida")
print ("")
print ("-"*50)
os.system("cls")
"""Aqui definimos algunas de las funciones que vamos a ocupar en el repositorio"""
def suma (a):
return a
def multiplicacion (a):
return a
def division (a,b):
return float(a/b)
def modulo (a,b):
return a%b
def potencia (a,b): #Falta que se pueda ingresar decimal
return float(a**b)
def raiz (a,b):
return math.sqrt(a,b)
def BaH (a):
return a
def DaH (a):
return a
def DaB (a):
return a
def HaB (a): # El codigo que se ejecuta esta en el Num 13
if (a == "0"): # Primer valor de numeros hex que se convertiran
cambio = "0000" # Primer valor de bin que se convertiran
elif ( a == "1"): # Segundo valor de numeros hex que se convertiran
cambio = "0001" # Segundo valor de bin que se convertiran
elif (a == "2"):
cambio = "0010"
elif (a == "3"):
cambio = "0011"
elif (a == "4"):
cambio = "0100"
elif (a == "5"):
cambio = "0101"
elif (a == "6"):
cambio = "0110"
elif (a == "7"):
cambio = "0111"
elif (a == "8"):
cambio = "1000"
elif (a == "9"):
cambio = "1001"
elif (a == "A" or a == "a"):
cambio = "1010"
elif (a == "B" or a == "b"):
cambio = "1011"
elif (a == "C" or a == "c"):
cambio = "1100"
elif (a == "D" or a == "d"):
cambio = "1101"
elif (a == "E" or a == "e"):
cambio = "1110"
elif (a == "F" or a == "f"):
cambio = "1111"
else:
cambio = "Ese valor no es valido."
numero = input("Ingresa un valor hexadecimal :")
print ("-"*50)
return cambio
def HaD (a):
return a
def BaD (a):
return a
def primos (a,b):
return (a,b)
def primosrang (a,b):
return (a,b)
'''SUMA
En esta funcion se puede sumar dos variables que el usuario decida y obtener valores numericos nuevos.
'''
if (n==1) : #Suma
print ("---SUMA---")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Suma---")
print ("(1) Ingresar la cantidad de numeros a sumar")
print ("(2) Ingresar las cifras individualmente y presionar 'Enter' para registrarlas")
print ("(3) Una vez ingresados todos los datos su respuesta se imprimira presionando 'Enter'")
print ("")
#A continuación se le pedide al usuario ingresar la cantidad de numeros a sumar
a=(input("Escriba cantidad de numeros a sumar: "))
#evitar cierre del programa
while (a==""):
print("Porfavor no deje en espacio vacio ")
a=(input("Numero: "))
i=0
r=0
while(i<a):
#Se ingresan los numeros a sumar
b=int(input("Numero: "))
#Realizamos la operacion
r=r+b
i=i+1
#Y entregamos el resultado
print ("El resultado de la suma es: " +str(suma(r)))
print ("")
"""MULTIPLICACION
En esta funcion se pueden multiplicar dos variables dadas por el usuario y asi obtener un nuevo valor numerico """
elif (n==2): #Multiplicacion
print ("---MULTIPLICACION---")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Multiplicacion---")
print ("(1) Ingresar la cantidad de numeros a multiplicar")
print ("(2) Ingresar las cifras individualmente y presionar 'Enter' para registrarlas")
print ("(3) Una vez ingresados todos los datos su respuesta se imprimira presionando 'Enter'")
print ("")
#Se ingresan la cantidad de numeros a multiplicar
a=(input("Escriba cantidad de numeros a multiplicar: "))
i=0
r=1
while(a==""):
print("Porfavor no deje el espacio vacio")
a=(input("Escriba cantidad de numeros a multiplicar: "))
while(i<a):
#Se ingresan los numeros que seran multiplicados
b=int(input("Numero: "))
#Obtenemos el resultado
r=r*b
i=i+1
#Imprimimos el resultado
print ("El resultado de la multiplicacion es: " +str(multiplicacion(r)))
"""DIVISION
En esta funcion se va a poder dividir dos valores para asi poder obtener un resultado numerico nuevo
Se utiliza la funcion de numero flotante para que de decimales"""
elif (n==3): #Division
print ("---DIVISION---")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Division---")
print ("* El programa solo imprime el resultado de la division")
print ("(1) Ingresar el dividendo [El numero a dividir]")
print ("(2) Ingresar el divisor [El numero que dividirá al dividendo]")
print ("(3) Una vez ingresados todos los datos su respuesta se imprimira presionando 'Enter'")
print ("")
#Pedimos el numero que sera dividido
a=(input("Escriba el dividendo: "))
while (not a.isdigit()):
#Si realiza el proceso incorrectamente le pedimos que lo haga de nuevo
print ("Solo se aceptan numeros.")
print ("")
a=(input("Escriba el dividendo: "))
a=float(a)
#Pedimos el numero que va a dividir
b=(input("Escriba el divisor: "))
while (not b.isdigit()):
#Si realiza el proceso incorrectamente le pedimos que lo haga de nuevo
print ("Solo se aceptan numeros.")
print ("")
b=(input("Escriba el divisor: "))
b=float(b)
#Y entregamos el resultado
print ("Su resultado es: " +str(division(a,b)))
"""Aqui implementamos la funcion modulo que es una division que solo nos muestra resultados de enteros """
elif (n==4): #Modulo
print ("---MODULO---")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Modulo---")
print ("* El programa solo imprime el residuo de la division")
print ("(1) Ingresar el dividendo [El numero a dividir]")
print ("(2) Ingresar el divisor [El numero que dividirá al dividendo]")
print ("(3) Una vez ingresados todos los datos su respuesta se imprimira presionando 'Enter'")
print ("")
#Solicitamos divisor y dividendo
a=(input("Escriba el dividendo : "))
while (not a.isdigit()):
#Si realiza el proceso incorrectamente le pedimos que lo haga de nuevo
print ("Solo se aceptan numeros.")
print ("")
a=(input("Escriba el dividendo: "))
a=int(a)
b=(input("Escriba el divisor: "))
while (not b.isdigit()):
#Si realiza el proceso incorrectamente le pedimos que lo haga de nuevo
print ("Solo se aceptan numeros.")
print ("")
b=(input("Escriba el divisor: "))
b=int(b)
#Entregamos el residuo
print ("Su resultado es: " +str(modulo(a,b)))
"""POTENCIA
La función calculara un numero elevado a cierta potencia.
El usuario puede ingresar el numero base y el exponente que guste para hacer la funcion"""
elif (n==5): #Potencia
print ("---POTENCIA---")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Potencia---")
print ("(1) Ingresar el numero base [El numero a potenciar]")
print ("(2) Ingresar el exponente [El numero de veces que la base se multiplicara a si misma]")
print ("(3) Una vez ingresados todos los datos su respuesta se imprimira presionando 'Enter'")
print ("")
#Le pedimos al usuario el numero que sera elevado a una potencia
a=float(input("Escriba el numero base: "))
#Le pedimos al usuario la potencia a la que sera elevado el numero
b=float(input("Escriba el exponente: "))
#Entregamos el resultado
print ("Su resultado es: " +str(potencia(a,b)))
"""RAIZ
La función calculara la raiz de un numero cualquiera ingresado por el usuario.
El usuario puede poner como parametro el indice y numero que gusten"""
elif (n==6): #Raiz
print ("---RAIZ---")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Raiz---")
print ("(1) Ingresar el radicando [El numero del cual se obtendrá la raiz]")
print ("(2) Ingresar el indice [La raiz de la cual se obtendrá el resultado]")
print ("(3) Una vez ingresados todos los datos su respuesta se imprimira presionando 'Enter'")
print ("")
# a es 2 ya que esta es la base para sacar la raiz cuadrada
a=2
#Solicitamos los datos al usuario
#A partir de ahora a puede cambiar si el usuario no quiere sacar raiz cuadrada
b=int(input("Escriba numero del radicando: "))
a=int(input("Escriba a que numero de indice: "))
if (a<=2):
print ("Si el valor es menor que 2, el indice se toma al cuadrado por defecto")
a=2
#Entregamos resultado
print ("Su resultado es: " +str(math.sqrt(b)))
"""VERIFICACION DE NUMEROS PRIMOS
La función demostrara si el numero que ha ingresado el usuario es numero primo o no.
El programa verificara si el numero ingresado el multiplo de sus anteriores.
Caso 1: En caso de que encuentre un multiplo, la función imprimira que no es primo
Caso 2: Si el numero demuestra que no es multiplo de nunguno, la función imprimira que si es primo"""
elif (n==7): #Verificacion de numeros primos
print ("---VERIFICACION DE NUMEROS PRIMOS---")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Numeros Primos por Verificacion---")
print ("(1) Ingrese una cifra o numero entero cualquiera")
print ("(2) Una vez ingresado el numero el programa evaluara el numero")
print ("(3) Como resultado, el programa le dira si su numero es primo o no")
print ("")
#a es un contador
a=0
#Solicitamos el numero a verificar
n=int(input("Ingrese numero para verificar si es primo: "))
#Iniciamos con las pruebas
for i in range(1,n+1):
#Si el residuo es 0 le sumamos 1 a la variable a
if(n % i==0):
a=a+1
#Si a no logra sumar dos puntos no es primo
if(a!=2):
print ("El numero "+(str(n)+" no es primo"))
print ("")
else:
print ("El numero "+(str(n)+" si es primo"))
print ("")
"""NUMERO PRIMO POR RANGO
La función demostrara la lista de numeros primos, tomando como limite el numero ingresado por el usuario
El programa verificara si cada numero dentro del rango es multiplo de sus anteriores.
Caso 1: En caso de que encuentre un multiplo, el numero sera desechado por la funcion.
Caso 2: Si el numero demuestra que no es multiplo de nunguno, sera imprimido en pantalla"""
elif (n==8): #Numero Primo por Rango
#a es un contador
a=0
print ("---NUMEROS PRIMOS POR RANGO---")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Numeros Primos por Rango---")
print ("(1) Ingresar una cifra para ponerlo como limite de la lista")
print ("(2) Una vez ingresado el numero el programa evaluara los numeros primos dentro del rango")
print ("(3) Como resultado, se generara una lista de numeros primos hasta el numero limite")
print ("")
#Solicitamos el limitede la lista
lim=(input("Ingrese el limite de la lista de numeros primos: "))
while (not lim.isdigit()):
print ("Solo se aceptan numeros.")
print ("")
lim=(input("Ingrese el limite de la lista de numeros primos: "))
lim=int(lim)
print ("")
print ("La lista de numeros primos hasta el numero "+str(lim)+" es:")
print ("")
#Iniciamos con la inspeccion
#El rango va del 2 a el limite establecido
for x in range (2,lim):
prnt=False
verf=0
for i in range(1,x+1):
if(x % i==0):
verf=verf+1
if (prnt==False and verf<3):
#Entregamos resultados
print (str(x))
prnt=True
print ("")
''' BINARIO A HEXADECIMAL
Con esta funcion se logra conbertir un numero que esta en binario a uno en hexadecimal
Se utilizan datos de la libreria matematica como hex.
En el int significa el numero en enteros que sera divido con el 2 de la funcion de binario.
is.digit() permite que no se acepten letras y solo acepte numeros.
'''
elif (n==9): #Binario a Hexadecimal
print ("---BINARIO -> HEXADECIMAL---")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Binario -> Hexadecimal---")
print ("(1) Ingresar una cifra en binario. [Recuerde que este sistema usa 1 y 0] y despues presione 'Enter'")
print ("(2) El programa convertirá la cifra a hexadecimal.")
print ("(3) Como resultado, se imprimira en pantalla la cifra convertida a sistema hexadecimal.")
print ("")
print ("-"*50)
numbinario = input("Ingrese un numero binario: ")
print ("-"*50)
while(not numbinario.isdigit()):
print ("Solo se aceptan numeros en binario.")
numbinario = input("Ingrese un numero binario: ")
print ("-"*50)
bina = int(numbinario, 2)
print ("Su numero",numbinario," en hexadecimal es:", hex(bina))
elif (n==10): #Binario a Decimal -- Falta completar
print ("BINARIO -> DECIMAL")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Binario a Decimal---")
print ("(1) Ingresar un numero en binario (Recuerde que estos solo llevan 0 y 1) y luego presionar 'Enter'")
print ("(2) Al recibir su numero en forma decimal aparecera una pregunta")
print ("(3) Presione '1' seguido de un 'Enter' para poder introducir otro numero o presione '0' seguido de un 'Enter' para terminar el programa")
print ("")
respuesta=1
while(respuesta==1):
#Primero le pedimos al usuario su numero en binario
binario=input("Introduzca un numero en binario: ")
if(binario is not int):
try:
#Convertimos la variable a entero para que pueda hacer las siguientes
#Comparaciones
binario=int(binario)
if(binario>2):
decimal=int(str(binario), 2)
print ("\nSu numero en decimal es: ")+str(decimal)
#La ultima opcion restante es que sean numeros que no sean 0 o 1
else:
print ("Los NUMEROS binarios solo llevan 0 y 1")
#Pero si no son numeros pasara lo siguiente
except:
print ("Los NUMEROS binarios solo llevan 0 y 1")
respuesta=int(input("Va a introducir otro numero:<Si[1] No[0]>"))
'''Esta funcion de DaH empieza pideiendole al usuario una cifra de entero,
si el valor que se le pone no es un numero, este marcara un error.
.isdigit() cumple la funcion de revisar si existen puros numeros y de este modo
no acepte caracteres de letra.'''
"""Aqui hacemos implementamos la funcion decimal a hexadecimal que lo que hace es transformar
numeros decimales al codigo hexadecimal """
elif (n==11): #Decimal a Hexadecimal
print ("DECIMAL -> HEXADECIMAL")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Decimal -> Hexadecimal---")
print ("(1) Ingresar una cifra para converir el numero a hexadecimal")
print ("(2) Una vez ingresado el numero, se trasladara a decimal pero contando A, B, C, D, E y F como numeros también.")
print ("(3) Como resultado, el programa le dira el numero que ingreso, pero usando los dieciseis numeros")
print ("")
#Solicitamos un numero
a=(input("Ingrese una cifra en decimal: "))
while (not a.isdigit()):
#Si lo que ingresamos no es un numero le decimos al usuario y le pedimos otro
print ("Solo se aceptan numeros.")
a=(input("Ingrese una cifra en decimal: "))
#Convertimos el numero en entero
a=int(a)
#Entregamos el resultado
print ("Su resultado es: " + format(a, '02x'))
print ("")
"""Aqui hacemos la funcion decimal a binario que se encarga de recibir numeros decimales i tranformarlos a codigo binario"""
elif (n==12): #Decimal a Binario
print ("---DECIMAL -> BINARIO---")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Decimal -> Binario---")
print ("(1) Ingresar una cifra para converir el numero en binario")
print ("(2) Una vez ingresado el numero, se trasladara a 1s y 0s")
print ("(3) Como resultado, el programa le dira el numero que ingreso, pero en binario")
print ("")
respuesta=1
while(respuesta==1):
numero=input("Ingrese un numero: ")
r=[]
#Las letras y los decimales no se pueden pasar a binario
if(numero is not int):
try:
numero = int(numero)
if(numero==0):
print("0 no se puede convertir a binario")
input()
elif(numero<0):
print("Los numeros menores a 0 no se pueden convertir a binario")
input()
elif(numero>0):
while(numero>0):
#Si el residuo de dividir el elemento[n] del numero entre 2 da 0
#Agregamos 0 a la lista, de lo contratrio agregamos 1
if(numero%2==0):
r.append(0)
else:
r.append(1)
numero=numero/2
#Al tener la lista la invertimos para tener el numero binario verdadero
r.reverse()
print (r)
except:
print("Las letras y los numeros decimales no se pueden convertir a binario")
#El numero tiene que ser mayor que 0 porque los numeros
#menores o iguales no se puede convertir a binario
respuesta=int(input("¿Quieres ingresar otro numero? (Si[1] No[0])"))
print ("")
elif (n==13): #Hexadecimal a Binario (Falta a prueba de errores)
print ("HEXADECIMAL -> BINARIO")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Hexadecimal a Binario---")
print ("(1) Escriba una cifra en sistema hexadecimal [Numeros y A,B,C,D,E,F son admitidos]. Luego presionar 'Enter'")
print ("(2) El programá realizará los calculos necesarios para convertir la cifra en binario")
print ("(3) Como resultado se imprimirá el resultado en 1 y 0; Digitos del sistema binario")
print ("")
numero = input("Ingresa un valor hexadecimal :")
print ("-"*50)
a=len(numero)
binnario=0
letras=""
while (binnario < a):
letras=letras+HaB(numero[binnario])
binnario+=1
print ("Tu valor en binario es: ", letras)
""" En esta funcion se puede calcular un numero de hexadecimal a un numero decimal
comenzando por iniciar un ciclo que niegue letras fuera del patron del hexadecima
en este caso solo se permiten de la A a la F."""
elif (n==14): #Hexadecimal a Decimal
print ("HEXADECIMAL -> DECIMAL")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("---Ayuda de Hexadecimal -> Decimal---")
print ("")
if (hlp=="h" or hlp=="H"):
print ("(3) Como resultado, se mostrara el numero en sistema decimal")
print ("(2) Presione 'Enter' para que el programa lo convierta a decimal")
print ("(1) Escriba una cifra en sistema hexadecimal [Numeros y A,B,C,D,E,F son admitidos]")
print ("")
# Se hizo la modificacion para que la funcion hexdec funcionara
print ("Que numero hexadecimal quiere convertir: ")
hexdec=(input("Ingresa el numero en hexadecimal: "))
while (hexdec >= 'g'):
print ("No es una letra valida")
hexdec=(input("Ingresa el numero en hexadecimal: "))
dec = int(hexdec, 16)
print (hexdec + " en Decimal es: " + str(dec) +"\n")
"""METROS A YARDAS
Con esta conversion podras facilmente convertir la medida de metro a yardas
Se solicita la cantidad de metros que el usuario quiere transformar luego
multiplica esa cantidad por metro(1)/yarda(.914) y muestra el resultado """
elif (n==15): #Metros a Yardas (Falta completar)
print ("METROS -> YARDAS")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("---Ayuda de Metros -> Yardas---")
print ("")
if (hlp=="h" or hlp=="H"):
print ("(1) Escriba la cantidad de metros que desea convertir")
print ("(2) Presione 'Enter' para que el programa lo convierta a Yardas")
print ("(3) Como resultado, se mostrara la conversi[on de metros(m) a yardas(yd)")
print ("")
#Solicitamos la cantidad de metros a convertir
metros=input("¿Cuantos metros quieres convertir a yardas? ")
while (metros==""):
#Si el usuario no realizo el proceso correctamente le pedimos que lo haga de nuevo
print ("")
print ("Porfavor escoja no deje ese espacio vacio ")
metros=input("¿Cuantos metros quieres convertir a yardas? ")
#Hacemos la conversion
conversion= int(metros)*(int(1)/float(.914))
#Entregamos el resultado
print ("Sus metros en yardas son: "+ str(conversion)+"yd")
"""YARDAS A METROS
Con esta funcion podras transformar Yardas(yd) a Metros(m) en base a una operacion
basada en regla de 3; multiplicando el numero de yardas por el el equivalente de
un metro pero en medias de yardas y dividiendoloe entre 1 para asi mostrar la conversion"""
elif (n==16): #yardas a metros
print ("YARDAS -> METROS")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("---Ayuda de Yardas -> Metros---")
print ("")
if (hlp=="h" or hlp=="H"):
print ("(1) Escriba la cantidad de yardas que desea convertir")
print ("(2) Presione 'Enter' para que el programa lo convierta a Metros")
print ("(3) Como resultado, se mostrara la conversion de Yardas(yd) a Metros(m)")
print ("")
#Solicitamos la cantidad de yardas a convertir
yardas=input("Ingrese el numero de Yardas que quiere transformar a metros: ")
while (yardas==""):
#Si el usuario realiza el proceso mal le pedimos que lo haga de nuevo
print("Porfavor no deje ese espacio en blanco")
yardas=input("Podria ingresar otra vez el numero?: ")
#Hacemos la conversion
Conversion= int(yardas)*float(.9144)/int(1)
#Entregamos el resultado
print ("Sus yardas transformadas a metros son: "+str(Conversion)+"m")
"""CALCULADORA DE IMC
El proposito de esta funcion es el de calcular el indice de masa corporal del usuario.
Los datos del usuario (Peso y Altura) se utilizan como variables para obtener el dato.
El peso se divide entre la altura en metros al cuadrado."""
elif (n==17): #Metros a Pulgadas
print ("Bienvenido al convertidor Metros a Pulgada")
m=(input("Cuantos metros quiere convertir? "))
pulgada=39.3700787402
if (m== " "):
print("Porfavor no deje el espacio en blanco")
m=int(input("Cuantos metros quiere convertir? "))
elif (m<0):
print("no puedes tomar valores negativos")
m=int(input("Cuantos metros quiere convertir? "))
else:
operacion=pulgada*int(m)
print (operacion)
elif(n==18):#Pulgadas a Metros
print ("Bienvenido al convertidor Metros a Pulgada")
p=int(input("Cuantas pulgadas quiere convertir? "))
me=0.0254
operacion=me*int(p)
elif (n==19): #Calculadora de IMC (Falata comentar)
print ("CALCULADORA DE INDICE DE MASA CORPORAL")
print ("")
hlp=str(input("Para iniciar la funcion presione 'Enter', pero si no sabe como funciona o necesita ayuda, presione 'h': "))
print ("")
if (hlp=="h" or hlp=="H"):
print ("---Ayuda de Calculadora de IMC---")
print ("(1) Ingrese su peso en kg. [Kilogramo: 1kg = 1000gr.]")
print ("(2) Ingrese su altura en mt. [Metro: 1mt. = 100cm.]")
print ("(3) Como resultado, el programa le dira su indice de masa corporal.")
print ("")
#Solicitamos el peso del usuario
pes=(input("Ingrese su peso en Kg (Kilogramos): "))
while (not pes.isdigit()):
#Si este no lo realizo correctamente le pedimos que lo haga de nuevo
print ("Solo se aceptan numeros.")
print ("")
pes=(input("Ingrese su peso en Kg (Kilogramos): "))
#Convertimos la cantidad a enteros
pes=int(pes)
#Le pedimos al usuario que introduzca su altura en centimetros
alt=(input("Ingrese su altura en Cm (Centimetros): "))
while (not alt.isdigit()):
#Si realiza el proceso incorrectamente le pedimos que lo haga de nuevo
print ("Solo se aceptan numeros.")
print ("")
alt=(input("Ingrese un altura en Mt (Metros): "))
#Convertimos la altura en un numero flotante
alt=float(alt)
#Realizamos la operacion para calcular el IMC
imc=(pes/((alt/100)**2))
#Entregamos el resultado
print ("Su IMC es de: "+str(imc))
print ("")
else:
print ("No existe ese valor")
#Ejemplo de commit | gpl-3.0 | 2,029,962,430,459,301,400 | 40.139053 | 153 | 0.612203 | false |
ric2b/Vivaldi-browser | chromium/tools/perf/page_sets/rendering/tough_path_rendering_cases.py | 1 | 2198 | # Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from page_sets.rendering import rendering_story
from page_sets.rendering import story_tags
class ToughPathRenderingPage(rendering_story.RenderingStory):
ABSTRACT_STORY = True
TAGS = [story_tags.TOUGH_PATH_RENDERING]
def RunPageInteractions(self, action_runner):
with action_runner.CreateInteraction('ClickStart'):
action_runner.Wait(10)
class GUIMarkVectorChartPage(ToughPathRenderingPage):
BASE_NAME = 'guimark_vector_chart'
URL = 'http://www.craftymind.com/factory/guimark2/HTML5ChartingTest.html'
class MotionMarkCanvasFillShapesPage(ToughPathRenderingPage):
BASE_NAME = 'motion_mark_canvas_fill_shapes'
# pylint: disable=line-too-long
URL = 'http://rawgit.com/WebKit/webkit/master/PerformanceTests/MotionMark/developer.html?test-name=Fillshapes&test-interval=20&display=minimal&tiles=big&controller=fixed&frame-rate=50&kalman-process-error=1&kalman-measurement-error=4&time-measurement=performance&suite-name=Canvassuite&complexity=1000'
TAGS = ToughPathRenderingPage.TAGS + [story_tags.REPRESENTATIVE_MOBILE]
class MotionMarkCanvasStrokeShapesPage(ToughPathRenderingPage):
BASE_NAME = 'motion_mark_canvas_stroke_shapes'
# pylint: disable=line-too-long
URL = 'http://rawgit.com/WebKit/webkit/master/PerformanceTests/MotionMark/developer.html?test-name=Strokeshapes&test-interval=20&display=minimal&tiles=big&controller=fixed&frame-rate=50&kalman-process-error=1&kalman-measurement-error=4&time-measurement=performance&suite-name=Canvassuite&complexity=1000'
class ChalkboardPage(rendering_story.RenderingStory):
BASE_NAME = 'ie_chalkboard'
URL = 'https://testdrive-archive.azurewebsites.net/performance/chalkboard/'
TAGS = [
story_tags.TOUGH_PATH_RENDERING,
story_tags.REPRESENTATIVE_MOBILE,
story_tags.REPRESENTATIVE_MAC_DESKTOP
]
def RunPageInteractions(self, action_runner):
with action_runner.CreateInteraction('ClickStart'):
action_runner.EvaluateJavaScript(
'document.getElementById("StartButton").click()')
action_runner.Wait(20)
| bsd-3-clause | -4,094,181,774,901,659,600 | 43.857143 | 306 | 0.787534 | false |
palfrey/coherence | coherence/upnp/devices/binary_light_client.py | 1 | 2116 | # Licensed under the MIT license
# http://opensource.org/licenses/mit-license.php
# Copyright 2008, Frank Scholz <[email protected]>
from coherence.upnp.services.clients.switch_power_client import SwitchPowerClient
from coherence import log
import coherence.extern.louie as louie
class BinaryLightClient(log.Loggable):
logCategory = 'binarylight_client'
def __init__(self, device):
self.device = device
self.device_type,self.version = device.get_device_type().split(':')[3:5]
self.icons = device.icons
self.switch_power = None
self.detection_completed = False
louie.connect(self.service_notified, signal='Coherence.UPnP.DeviceClient.Service.notified', sender=self.device)
for service in self.device.get_services():
if service.get_type() in ["urn:schemas-upnp-org:service:SwitchPower:1"]:
self.switch_power = SwitchPowerClient(service)
self.info("BinaryLight %s" % (self.device.get_friendly_name()))
if self.switch_power:
self.info("SwitchPower service available")
else:
self.warning("SwitchPower service not available, device not implemented properly according to the UPnP specification")
def remove(self):
self.info("removal of BinaryLightClient started")
if self.switch_power != None:
self.switch_power.remove()
def service_notified(self, service):
self.info("Service %r sent notification" % service);
if self.detection_completed == True:
return
if self.switch_power != None:
if not hasattr(self.switch_power.service, 'last_time_updated'):
return
if self.switch_power.service.last_time_updated == None:
return
self.detection_completed = True
louie.send('Coherence.UPnP.DeviceClient.detection_completed', None,
client=self,udn=self.device.udn)
def state_variable_change( self, variable):
self.info(variable.name, 'changed from', variable.old_value, 'to', variable.value)
| mit | 4,234,525,556,472,552,400 | 38.185185 | 130 | 0.657845 | false |
nilbody/h2o-3 | h2o-py/h2o/model/model_base.py | 1 | 25473 | """
This module implements the base model class. All model things inherit from this class.
"""
from __future__ import print_function
from builtins import zip
from builtins import str
from builtins import range
from builtins import object
import h2o
import imp, traceback
from ..utils.shared_utils import can_use_pandas
class ModelBase(object):
def __init__(self):
self._id = None
self._model_json = None
self._metrics_class = None
self._is_xvalidated = False
self._xval_keys = None
self._parms = {} # internal, for object recycle
self.parms = {} # external
self._estimator_type = "unsupervised"
self._future = False # used by __repr__/show to query job state
self._job = None # used when _future is True
@property
def model_id(self):
"""
:return: Retrieve this model's identifier.
"""
return self._id
@model_id.setter
def model_id(self, value):
oldname = self.model_id
self._id = value
h2o.rapids("(rename \"{}\" \"{}\")".format(oldname, value))
@property
def params(self):
"""
Get the parameters and the actual/default values only.
:return: A dictionary of parameters used to build this model.
"""
params = {}
for p in self.parms:
params[p] = {"default":self.parms[p]["default_value"], "actual":self.parms[p]["actual_value"]}
return params
@property
def full_parameters(self):
"""
Get the full specification of all parameters.
:return: a dictionary of parameters used to build this model.
"""
return self.parms
@property
def type(self):
"""Get the type of model built as a string.
Returns
-------
"classifier" or "regressor" or "unsupervised"
"""
return self._estimator_type
def __repr__(self):
# PUBDEV-2278: using <method>? from IPython caused everything to dump
stk = traceback.extract_stack()
if not ("IPython" in stk[-2][0] and "info" == stk[-2][2]):
self.show()
return ""
def predict(self, test_data):
"""
Predict on a dataset.
Parameters
----------
test_data: H2OFrame
Data on which to make predictions.
Returns
-------
A new H2OFrame of predictions.
"""
if not isinstance(test_data, h2o.H2OFrame): raise ValueError("test_data must be an instance of H2OFrame")
j = h2o.H2OConnection.post_json("Predictions/models/" + self.model_id + "/frames/" + test_data.frame_id)
# prediction_frame_id = j["predictions_frame"] #j["model_metrics"][0]["predictions"]["frame_id"]["name"]
return h2o.get_frame(j["predictions_frame"]["name"])
def is_cross_validated(self):
"""
:return: True if the model was cross-validated.
"""
return self._is_xvalidated
def xval_keys(self):
"""
:return: The model keys for the cross-validated model.
"""
return self._xval_keys
def get_xval_models(self,key=None):
"""
Return a Model object.
:param key: If None, return all cross-validated models; otherwise return the model that key points to.
:return: A model or list of models.
"""
return h2o.get_model(key) if key is not None else [h2o.get_model(k) for k in self._xval_keys]
@property
def xvals(self):
"""
Return a list of the cross-validated models.
:return: A list of models
"""
return self.get_xval_models()
def deepfeatures(self, test_data, layer):
"""
Return hidden layer details
:param test_data: Data to create a feature space on
:param layer: 0 index hidden layer
"""
if test_data is None: raise ValueError("Must specify test data")
j = h2o.H2OConnection.post_json("Predictions/models/" + self._id + "/frames/" + test_data.frame_id, deep_features_hidden_layer=layer)
return h2o.get_frame(j["predictions_frame"]["name"])
def weights(self, matrix_id=0):
"""
Return the frame for the respective weight matrix
:param: matrix_id: an integer, ranging from 0 to number of layers, that specifies the weight matrix to return.
:return: an H2OFrame which represents the weight matrix identified by matrix_id
"""
num_weight_matrices = len(self._model_json['output']['weights'])
if matrix_id not in list(range(num_weight_matrices)):
raise ValueError("Weight matrix does not exist. Model has {0} weight matrices (0-based indexing), but matrix {1} "
"was requested.".format(num_weight_matrices, matrix_id))
return h2o.get_frame(self._model_json['output']['weights'][matrix_id]['URL'].split('/')[3])
def biases(self, vector_id=0):
"""
Return the frame for the respective bias vector
:param: vector_id: an integer, ranging from 0 to number of layers, that specifies the bias vector to return.
:return: an H2OFrame which represents the bias vector identified by vector_id
"""
num_bias_vectors = len(self._model_json['output']['biases'])
if vector_id not in list(range(num_bias_vectors)):
raise ValueError("Bias vector does not exist. Model has {0} bias vectors (0-based indexing), but vector {1} "
"was requested.".format(num_bias_vectors, vector_id))
return h2o.get_frame(self._model_json['output']['biases'][vector_id]['URL'].split('/')[3])
def normmul(self):
"""
Normalization/Standardization multipliers for numeric predictors
"""
return self._model_json['output']['normmul']
def normsub(self):
"""
Normalization/Standardization offsets for numeric predictors
"""
return self._model_json['output']['normsub']
def respmul(self):
"""
Normalization/Standardization multipliers for numeric response
"""
return self._model_json['output']['normrespmul']
def respsub(self):
"""
Normalization/Standardization offsets for numeric response
"""
return self._model_json['output']['normrespsub']
def catoffsets(self):
"""
Categorical offsets for one-hot encoding
"""
return self._model_json['output']['catoffsets']
def model_performance(self, test_data=None, train=False, valid=False):
"""
Generate model metrics for this model on test_data.
Parameters
----------
test_data: H2OFrame, optional
Data set for which model metrics shall be computed against. Both train and valid arguments are ignored if test_data is not None.
train: boolean, optional
Report the training metrics for the model. If the test_data is the training data, the training metrics are returned.
valid: boolean, optional
Report the validation metrics for the model. If train and valid are True, then it defaults to True.
Returns
-------
An object of class H2OModelMetrics.
"""
if test_data is None:
if not train and not valid: train = True # default to train
if train: return self._model_json["output"]["training_metrics"]
if valid: return self._model_json["output"]["validation_metrics"]
else: # cases dealing with test_data not None
if not isinstance(test_data, h2o.H2OFrame):
raise ValueError("`test_data` must be of type H2OFrame. Got: " + type(test_data))
res = h2o.H2OConnection.post_json("ModelMetrics/models/" + self.model_id + "/frames/" + test_data.frame_id)
# FIXME need to do the client-side filtering... PUBDEV-874: https://0xdata.atlassian.net/browse/PUBDEV-874
raw_metrics = None
for mm in res["model_metrics"]:
if not mm["frame"] == None and mm["frame"]["name"] == test_data.frame_id:
raw_metrics = mm
break
return self._metrics_class(raw_metrics,algo=self._model_json["algo"])
def score_history(self):
"""
Retrieve Model Score History
Returns
-------
The score history as an H2OTwoDimTable.
"""
model = self._model_json["output"]
if 'scoring_history' in list(model.keys()) and model["scoring_history"] != None:
s = model["scoring_history"]
if can_use_pandas():
import pandas
pandas.options.display.max_rows = 20
return pandas.DataFrame(s.cell_values,columns=s.col_header)
return s
else: print("No score history for this model")
def summary(self):
"""
Print a detailed summary of the model.
"""
model = self._model_json["output"]
if model["model_summary"]:
model["model_summary"].show() # H2OTwoDimTable object
def show(self):
"""
Print innards of model, without regards to type
"""
if self._future:
self._job.poll_once()
return
if self._model_json is None:
print("No model trained yet")
return
if self.model_id is None:
print("This H2OEstimator has been removed.")
return
model = self._model_json["output"]
print("Model Details")
print("=============")
print(self.__class__.__name__, ": ", self._model_json["algo_full_name"])
print("Model Key: ", self._id)
self.summary()
print()
# training metrics
tm = model["training_metrics"]
if tm: tm.show()
vm = model["validation_metrics"]
if vm: vm.show()
xm = model["cross_validation_metrics"]
if xm: xm.show()
if "scoring_history" in list(model.keys()) and model["scoring_history"]: model["scoring_history"].show()
if "variable_importances" in list(model.keys()) and model["variable_importances"]: model["variable_importances"].show()
def varimp(self, use_pandas=False):
"""
Pretty print the variable importances, or return them in a list
Parameters
----------
use_pandas: boolean, optional
If True, then the variable importances will be returned as a pandas data frame.
Returns
-------
A list or Pandas DataFrame.
"""
model = self._model_json["output"]
if "variable_importances" in list(model.keys()) and model["variable_importances"]:
vals = model["variable_importances"].cell_values
header=model["variable_importances"].col_header
if use_pandas and can_use_pandas():
import pandas
return pandas.DataFrame(vals, columns=header)
else:
return vals
else:
print("Warning: This model doesn't have variable importances")
def residual_deviance(self,train=False,valid=False,xval=False):
"""
Retreive the residual deviance if this model has the attribute, or None otherwise.
:param train: Get the residual deviance for the training set. If both train and valid are False, then train is selected by default.
:param valid: Get the residual deviance for the validation set. If both train and valid are True, then train is selected by default.
:return: Return the residual deviance, or None if it is not present.
"""
if xval: raise ValueError("Cross-validation metrics are not available.")
if not train and not valid: train = True
if train and valid: train = True
return self._model_json["output"]["training_metrics"].residual_deviance() if train else self._model_json["output"]["validation_metrics"].residual_deviance()
def residual_degrees_of_freedom(self,train=False,valid=False,xval=False):
"""
Retreive the residual degress of freedom if this model has the attribute, or None otherwise.
:param train: Get the residual dof for the training set. If both train and valid are False, then train is selected by default.
:param valid: Get the residual dof for the validation set. If both train and valid are True, then train is selected by default.
:return: Return the residual dof, or None if it is not present.
"""
if xval: raise ValueError("Cross-validation metrics are not available.")
if not train and not valid: train = True
if train and valid: train = True
return self._model_json["output"]["training_metrics"].residual_degrees_of_freedom() if train else self._model_json["output"]["validation_metrics"].residual_degrees_of_freedom()
def null_deviance(self,train=False,valid=False,xval=False):
"""
Retreive the null deviance if this model has the attribute, or None otherwise.
:param: train Get the null deviance for the training set. If both train and valid are False, then train is selected by default.
:param: valid Get the null deviance for the validation set. If both train and valid are True, then train is selected by default.
:return: Return the null deviance, or None if it is not present.
"""
if xval: raise ValueError("Cross-validation metrics are not available.")
if not train and not valid: train = True
if train and valid: train = True
return self._model_json["output"]["training_metrics"].null_deviance() if train else self._model_json["output"]["validation_metrics"].null_deviance()
def null_degrees_of_freedom(self,train=False,valid=False,xval=False):
"""
Retreive the null degress of freedom if this model has the attribute, or None otherwise.
:param train: Get the null dof for the training set. If both train and valid are False, then train is selected by default.
:param valid: Get the null dof for the validation set. If both train and valid are True, then train is selected by default.
:return: Return the null dof, or None if it is not present.
"""
if xval: raise ValueError("Cross-validation metrics are not available.")
if not train and not valid: train = True
if train and valid: train = True
return self._model_json["output"]["training_metrics"].null_degrees_of_freedom() if train else self._model_json["output"]["validation_metrics"].null_degrees_of_freedom()
def pprint_coef(self):
"""
Pretty print the coefficents table (includes normalized coefficients)
"""
print(self._model_json["output"]["coefficients_table"]) # will return None if no coefs!
def coef(self):
"""
:return: Return the coefficients for this model.
"""
tbl = self._model_json["output"]["coefficients_table"]
if tbl is None: return None
tbl = tbl.cell_values
return {a[0]:a[1] for a in tbl}
def coef_norm(self):
"""
:return: Return the normalized coefficients
"""
tbl = self._model_json["output"]["coefficients_table"]
if tbl is None: return None
tbl = tbl.cell_values
return {a[0]:a[2] for a in tbl}
def r2(self, train=False, valid=False, xval=False):
"""
Return the R^2 for this regression model.
The R^2 value is defined to be 1 - MSE/var,
where var is computed as sigma*sigma.
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the R^2 value for the training data.
:param valid: If valid is True, then return the R^2 value for the validation data.
:param xval: If xval is True, then return the R^2 value for the cross validation data.
:return: The R^2 for this regression model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(list(tm.keys()),list(tm.values())): m[k] = None if v is None else v.r2()
return list(m.values())[0] if len(m) == 1 else m
def mse(self, train=False, valid=False, xval=False):
"""
Get the MSE(s).
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
Parameters
----------
train : bool, default=True
If train is True, then return the MSE value for the training data.
valid : bool, default=True
If valid is True, then return the MSE value for the validation data.
xval : bool, default=True
If xval is True, then return the MSE value for the cross validation data.
Returns
-------
The MSE for this regression model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(list(tm.keys()),list(tm.values())): m[k] = None if v is None else v.mse()
return list(m.values())[0] if len(m) == 1 else m
def logloss(self, train=False, valid=False, xval=False):
"""
Get the Log Loss(s).
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the Log Loss value for the training data.
:param valid: If valid is True, then return the Log Loss value for the validation data.
:param xval: If xval is True, then return the Log Loss value for the cross validation data.
:return: The Log Loss for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(list(tm.keys()),list(tm.values())): m[k] = None if v is None else v.logloss()
return list(m.values())[0] if len(m) == 1 else m
def mean_residual_deviance(self, train=False, valid=False, xval=False):
"""
Get the Mean Residual Deviances(s).
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the Mean Residual Deviance value for the training data.
:param valid: If valid is True, then return the Mean Residual Deviance value for the validation data.
:param xval: If xval is True, then return the Mean Residual Deviance value for the cross validation data.
:return: The Mean Residual Deviance for this regression model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(list(tm.keys()),list(tm.values())): m[k] = None if v is None else v.mean_residual_deviance()
return list(m.values())[0] if len(m) == 1 else m
def auc(self, train=False, valid=False, xval=False):
"""
Get the AUC(s).
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the AUC value for the training data.
:param valid: If valid is True, then return the AUC value for the validation data.
:param xval: If xval is True, then return the AUC value for the validation data.
:return: The AUC.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(list(tm.keys()),list(tm.values())): m[k] = None if v is None else v.auc()
return list(m.values())[0] if len(m) == 1 else m
def aic(self, train=False, valid=False, xval=False):
"""
Get the AIC(s).
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the AIC value for the training data.
:param valid: If valid is True, then return the AIC value for the validation data.
:param xval: If xval is True, then return the AIC value for the validation data.
:return: The AIC.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(list(tm.keys()),list(tm.values())): m[k] = None if v is None else v.aic()
return list(m.values())[0] if len(m) == 1 else m
def giniCoef(self, train=False, valid=False, xval=False):
"""
Get the Gini Coefficient(s).
If all are False (default), then return the training metric value.
If more than one options is set to True, then return a dictionary of metrics where the keys are "train", "valid",
and "xval"
:param train: If train is True, then return the Gini Coefficient value for the training data.
:param valid: If valid is True, then return the Gini Coefficient value for the validation data.
:param xval: If xval is True, then return the Gini Coefficient value for the cross validation data.
:return: The Gini Coefficient for this binomial model.
"""
tm = ModelBase._get_metrics(self, train, valid, xval)
m = {}
for k,v in zip(list(tm.keys()),list(tm.values())): m[k] = None if v is None else v.giniCoef()
return list(m.values())[0] if len(m) == 1 else m
def download_pojo(self,path=""):
"""
Download the POJO for this model to the directory specified by path (no trailing slash!).
If path is "", then dump to screen.
:param model: Retrieve this model's scoring POJO.
:param path: An absolute path to the directory where POJO should be saved.
:return: None
"""
h2o.download_pojo(self,path) # call the "package" function
@staticmethod
def _get_metrics(o, train, valid, xval):
metrics = {}
if train: metrics["train"] = o._model_json["output"]["training_metrics"]
if valid: metrics["valid"] = o._model_json["output"]["validation_metrics"]
if xval : metrics["xval"] = o._model_json["output"]["cross_validation_metrics"]
if len(metrics) == 0: metrics["train"] = o._model_json["output"]["training_metrics"]
return metrics
# Delete from cluster as model goes out of scope
# def __del__(self):
# h2o.remove(self._id)
def _plot(self, timestep, metric, **kwargs):
# check for matplotlib. exit if absent
try:
imp.find_module('matplotlib')
import matplotlib
if 'server' in list(kwargs.keys()) and kwargs['server']: matplotlib.use('Agg', warn=False)
import matplotlib.pyplot as plt
except ImportError:
print("matplotlib is required for this function!")
return
scoring_history = self.score_history()
# Separate functionality for GLM since its output is different from other algos
if self._model_json["algo"] == "glm":
# GLM has only one timestep option, which is `iteration`
timestep = "iteration"
if metric == "AUTO": metric = "log_likelihood"
elif metric not in ("log_likelihood", "objective"):
raise ValueError("for GLM, metric must be one of: log_likelihood, objective")
plt.xlabel(timestep)
plt.ylabel(metric)
plt.title("Validation Scoring History")
plt.plot(scoring_history[timestep], scoring_history[metric])
elif self._model_json["algo"] in ("deeplearning", "drf", "gbm"):
# Set timestep
if self._model_json["algo"] in ("gbm", "drf"):
if timestep == "AUTO": timestep = "number_of_trees"
elif timestep not in ("duration","number_of_trees"):
raise ValueError("timestep for gbm or drf must be one of: duration, number_of_trees")
else: #self._model_json["algo"] == "deeplearning":
# Delete first row of DL scoring history since it contains NAs & NaNs
if scoring_history["samples"][0] == 0:
scoring_history = scoring_history[1:]
if timestep == "AUTO": timestep = "epochs"
elif timestep not in ("epochs","samples","duration"):
raise ValueError("timestep for deeplearning must be one of: epochs, samples, duration")
training_metric = "training_{}".format(metric)
validation_metric = "validation_{}".format(metric)
if timestep == "duration":
dur_colname = "duration_{}".format(scoring_history["duration"][1].split()[1])
scoring_history[dur_colname] = [str(x).split()[0] for x in scoring_history["duration"]]
timestep = dur_colname
if can_use_pandas():
valid = validation_metric in list(scoring_history)
ylim = (scoring_history[[training_metric, validation_metric]].min().min(), scoring_history[[training_metric, validation_metric]].max().max()) if valid \
else (scoring_history[training_metric].min(), scoring_history[training_metric].max())
else:
valid = validation_metric in scoring_history.col_header
ylim = (min(min(scoring_history[[training_metric, validation_metric]])), max(max(scoring_history[[training_metric, validation_metric]]))) if valid \
else (min(scoring_history[training_metric]), max(scoring_history[training_metric]))
if ylim[0] == ylim[1]: ylim = (0,1)
if valid: # Training and validation scoring history
plt.xlabel(timestep)
plt.ylabel(metric)
plt.title("Scoring History")
plt.ylim(ylim)
plt.plot(scoring_history[timestep], scoring_history[training_metric], label="Training")
plt.plot(scoring_history[timestep], scoring_history[validation_metric], color="orange", label="Validation")
plt.legend()
else: # Training scoring history only
plt.xlabel(timestep)
plt.ylabel(training_metric)
plt.title("Training Scoring History")
plt.ylim(ylim)
plt.plot(scoring_history[timestep], scoring_history[training_metric])
else: # algo is not glm, deeplearning, drf, gbm
raise ValueError("Plotting not implemented for this type of model")
if "server" not in list(kwargs.keys()) or not kwargs["server"]: plt.show()
@staticmethod
def _check_targets(y_actual, y_predicted):
"""Check that y_actual and y_predicted have the same length.
:param y_actual: An H2OFrame
:param y_predicted: An H2OFrame
:return: None
"""
if len(y_actual) != len(y_predicted):
raise ValueError("Row mismatch: [{},{}]".format(len(y_actual),len(y_predicted)))
| apache-2.0 | 5,914,568,412,930,820,000 | 39.562102 | 180 | 0.660228 | false |
kasmith/cbmm-project-christmas | python-trials/batchMakeTrials.py | 1 | 6661 | from __future__ import division, print_function
from physicsTable import *
from physicsTable.constants import *
import threading
import pygame as pg
import random, os, sys
import numpy as np
import json
defVel = 300
# modified trial folder:
#trialfolder = os.path.join('..','public_html','trials')
trialfolder = os.path.join('..','psiturk-rg','templates', 'trials')
#random.seed(10001)
def makeRect(ul, lr):
return pg.Rect(ul, (lr[0]-ul[0],lr[1]-ul[1]))
def checkOverlap(trial):
walls = [makeRect(w[0],w[1]) for w in trial.normwalls]
goals = [makeRect(g[0],g[1]) for g in trial.goals]
objs = walls + goals
b = trial.ball
if b is not None:
br = makeRect((b[0][0]-b[2],b[1][0]-b[2]),(b[2]*2,b[2]*2))
objs.append(br)
for i in range(len(objs) - 1):
o = objs[i]
cls = o.collidelist(objs[(i+1):])
if cls != -1: return True
return False
def checkCoverage(trial, minsteps = 20, FPS = 40.):
tb = trial.makeTable()
notcovered = True
covered = False
ncovs = 0
while tb.step(1/FPS) is None:
if tb.fullyOcc():
notcovered = False
ncovs += 1
if ncovs >= minsteps: covered = True
else: ncovs = 0
return [notcovered, covered]
def checkSmallVel(v):
x = abs(v[0])
y = abs(v[1])
atan = np.arctan(y/x)
return (atan < np.pi/40) or (atan > 19*np.pi/40)
def MakeRandTrial(name, blocks, occs, covered = False, blockdims = (50,300), occdims = (150, 400), res = (1000, 620), maxfails = 10000):
retry_flag = True
while retry_flag:
fails = 0
chk = False
tr = RedGreenTrial(name, res, def_ball_vel = defVel)
blocksize = (random.randint(blockdims[0],blockdims[1]),random.randint(blockdims[0],blockdims[1]))
pos = (random.randint(0,res[0]-blocksize[0]),random.randint(0,res[1]-blocksize[1]))
lr = (pos[0]+blocksize[0],pos[1]+blocksize[1])
tr.addGoal(pos,lr,REDGOAL,RED)
chk = False
while not chk:
blocksize = (random.randint(blockdims[0],blockdims[1]),random.randint(blockdims[0],blockdims[1]))
pos = (random.randint(0,res[0]-blocksize[0]),random.randint(0,res[1]-blocksize[1]))
lr = (pos[0]+blocksize[0],pos[1]+blocksize[1])
tr.addGoal(pos,lr,GREENGOAL,GREEN)
if checkOverlap(tr):
fails += 1
tr.goals = [tr.goals[0]]
else: chk = True
if fails > maxfails:
print("Resetting trial")
#return MakeRandTrial(name,blocks,occs,covered,blockdims,occdims,res,maxfails)
continue
for i in range(blocks):
chk = False
while not chk:
blocksize = (random.randint(blockdims[0],blockdims[1]),random.randint(blockdims[0],blockdims[1]))
pos = (random.randint(0,res[0]-blocksize[0]),random.randint(0,res[1]-blocksize[1]))
lr = (pos[0]+blocksize[0],pos[1]+blocksize[1])
tr.addWall(pos,lr)
if checkOverlap(tr):
fails += 1
tr.normwalls = tr.normwalls[:-1]
else: chk = True
if fails > maxfails:
print("Resetting trial")
#return MakeRandTrial(name,blocks,occs,covered,blockdims,occdims,res,maxfails)
continue
for i in range(occs):
chk = False
while not chk:
blocksize = (random.randint(blockdims[0],blockdims[1]),random.randint(blockdims[0],blockdims[1]))
pos = (random.randint(0,res[0]-blocksize[0]),random.randint(0,res[1]-blocksize[1]))
lr = (pos[0]+blocksize[0],pos[1]+blocksize[1])
noc = pg.Rect(pos,blocksize)
if noc.collidelist([makeRect(o[0],o[1]) for o in tr.occs]) == -1:
tr.addOcc(pos,lr)
chk = True
else:
fails += 1
bsize = tr.dbr
chk = False
while not chk:
bpos = (random.randint(bsize, res[0]-bsize), random.randint(bsize,res[1]-bsize))
vchk = False
while not vchk:
bvel = (random.random(), random.random())
if not checkSmallVel(bvel): vchk = True
tr.addBall(bpos, bvel)
if checkOverlap(tr):
fails += 1
tr.ball = None
else: chk = True
if fails > maxfails:
print("Resetting trial")
#return MakeRandTrial(name,blocks,occs,covered,blockdims,occdims,res,maxfails)
continue
tr.normalizeVel()
if not tr.checkConsistency(maxsteps=10000):
#return MakeRandTrial(name,blocks,occs,covered,blockdims,occdims,res,maxfails)
continue
if tr.checkConsistency(maxsteps=3000):
print("Too short")
#return MakeRandTrial(name,blocks,occs,covered,blockdims,occdims,res,maxfails)
continue
coverage = checkCoverage(tr)
if covered:
if not coverage[1]:
#return MakeRandTrial(name,blocks,occs,covered,blockdims,occdims,res,maxfails)
continue
else:
if not coverage[0]:
#return MakeRandTrial(name,blocks,occs,covered,blockdims,occdims,res,maxfails)
continue
retry_flag = False
return tr
def threadMakeTrial(nTrials, b):
for i in range(nTrials):
nm = "RTr_Bl" + str(b) + "_" + str(i)
output_path = os.path.join(output_dir, nm + '.ptr')
if not os.path.exists(output_path):
print('Thread ' + str(b) + ': Trial ' + nm, file=sys.stderr)
t = MakeRandTrial(nm, b, 0)
t.save(output_path, askoverwrite=False)
if __name__ == '__main__':
# First arg is number of trials, since there will be
# 5 block variations for each trial, expect an effective
# total of 5*nTrials.
if len(sys.argv) > 1:
nTrials = int(sys.argv[1])
else:
nTrials = 20
# Create directory for output files
output_dir = 'trials'
if not os.path.exists(output_dir):
os.makedirs(output_dir)
threads = []
# Make random trials
for b in range(1,6):
thr = threading.Thread(target=threadMakeTrial, args=(nTrials, b))
thr.start()
threads.append(thr)
for thread in threads:
thread.join()
| mit | 3,735,357,943,701,721,600 | 33.692708 | 136 | 0.545714 | false |
was4444/chromium.src | third_party/WebKit/Tools/Scripts/webkitpy/layout_tests/port/browser_test_driver.py | 1 | 4527 | # Copyright (C) 2014 Google Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the Google name nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from webkitpy.layout_tests.port import driver
import time
import shutil
class BrowserTestDriver(driver.Driver):
"""Object for running print preview test(s) using browser_tests."""
def __init__(self, port, worker_number, pixel_tests, no_timeout=False):
"""Invokes the constructor of driver.Driver."""
super(BrowserTestDriver, self).__init__(port, worker_number, pixel_tests, no_timeout)
def start(self, pixel_tests, per_test_args, deadline):
"""Same as Driver.start() however, it has an extra step. It waits for
a path to a file to be used for stdin to be printed by the browser test.
If a path is found by the deadline test test will open the file and
assign it to the stdin of the process that is owned by this driver's
server process.
"""
# FIXME(ivandavid): Need to handle case where the layout test doesn't
# get a file name.
new_cmd_line = self.cmd_line(pixel_tests, per_test_args)
if not self._server_process or new_cmd_line != self._current_cmd_line:
self._start(pixel_tests, per_test_args)
self._run_post_start_tasks()
self._open_stdin_path(deadline)
# Gets the path of the directory that the file for stdin communication is
# in. Since the browser test cannot clean it up, the layout test framework
# will. Everything the browser test uses is stored in the same directory as
# the stdin file, so deleting that directory recursively will remove all the
# other temp data, like the printed pdf. This function assumes the correct
# file path is sent. It won't delete files with only one component to avoid
# accidentally deleting files like /tmp.
def _open_stdin_path(self, deadline, test=False):
# FIXME(ivandavid): Come up with a way to test & see what happens when
# the file can't be opened.
path, found = self._read_stdin_path(deadline)
if found:
if test == False:
self._server_process._proc.stdin = open(path, 'wb', 0)
def _read_stdin_path(self, deadline):
# return (stdin_path, bool)
block = self._read_block(deadline)
if block.stdin_path:
return (block.stdin_path, True)
return (None, False)
def cmd_line(self, pixel_tests, per_test_args):
"""Command line arguments to run the browser test."""
cmd = self._command_wrapper(self._port.get_option('wrapper'))
cmd.append(self._port._path_to_driver())
cmd.append('--gtest_filter=PrintPreviewPdfGeneratedBrowserTest.MANUAL_LayoutTestDriver')
cmd.append('--run-manual')
cmd.append('--single_process')
cmd.extend(per_test_args)
cmd.extend(self._port.get_option('additional_driver_flag', []))
return cmd
def stop(self):
if self._server_process:
self._server_process.write('QUIT')
super(BrowserTestDriver, self).stop(self._port.driver_stop_timeout())
| bsd-3-clause | 3,738,755,799,461,432,000 | 48.206522 | 96 | 0.696709 | false |
xuru/pyvisdk | pyvisdk/do/host_admin_enable_event.py | 1 | 1169 |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def HostAdminEnableEvent(vim, *args, **kwargs):
'''This event records that the administrator permission has been restored.'''
obj = vim.client.factory.create('ns0:HostAdminEnableEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 4:
raise IndexError('Expected at least 5 arguments got: %d' % len(args))
required = [ 'chainId', 'createdTime', 'key', 'userName' ]
optional = [ 'changeTag', 'computeResource', 'datacenter', 'ds', 'dvs',
'fullFormattedMessage', 'host', 'net', 'vm', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| mit | 2,315,399,473,027,123,700 | 33.411765 | 124 | 0.602224 | false |
joakim-hove/ert | python/tests/share/test_synthesizer.py | 1 | 4102 | import sys
import os
from tests import ErtTest
try:
from synthesizer import OilSimulator
except ImportError as e:
share_lib_path = os.path.join(ErtTest.createSharePath("lib"))
sys.path.insert(0, share_lib_path)
synthesizer_module = __import__("synthesizer")
OilSimulator = synthesizer_module.OilSimulator
sys.path.pop(0)
class SynthesizerTest(ErtTest):
def test_oil_simulator(self):
sim = OilSimulator()
sim.addWell("OP1", seed=1)
sim.addBlock("6,6,6", seed=2)
expected_values = [
[0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 1.0, 0.0, 0.0, 0.0, 1.0, 0.0, 1.0],
[0.3359771423145687, 0.3359771423145687, 0.25672494192349865, 0.25672494192349865, 0.010039005455891323, 0.010039005455891323, 0.029013112597713192, 0.7641143089523995, 0.3359771423145687, 0.25672494192349865, 0.010039005455891323, 0.7641143089523995, 0.029013112597713192, 0.8462347957619747],
[0.7252470407619624, 1.0612241830765312, 0.5175173529594699, 0.7742422948829686, 0.017973831236885583, 0.028012836692776905, 0.02418370085402209, 0.7135738912023045, 0.7252470407619624, 0.5175173529594699, 0.017973831236885583, 0.7135738912023045, 0.02418370085402209, 0.6888364145396828],
[0.7723163496234255, 1.8335405326999568, 0.5742386073607806, 1.3484809022437492, 0.11041673583737899, 0.1384295725301559, 0.12508507685507134, 0.7435277106858791, 0.7723163496234255, 0.5742386073607806, 0.11041673583737899, 0.7435277106858791, 0.12508507685507134, 0.6403046565762696],
[0.6038799675664164, 2.437420500266373, 0.6888868738548185, 2.037367776098568, 0.267892132439122, 0.4063217049692779, 0.3072960610203287, 1.140767885762087, 0.6038799675664164, 0.6888868738548185, 0.267892132439122, 1.140767885762087, 0.3072960610203287, 0.5205364945011657],
[0.23016535126253962, 2.6675858515289126, 0.721655666522216, 2.7590234426207836, 0.35552466124555465, 0.7618463662148325, 0.6070184801736589, 3.135379250454838, 0.23016535126253962, 0.721655666522216, 0.35552466124555465, 3.135379250454838, 0.6070184801736589, 0.4800677649914682],
[0.026293782934652718, 2.693879634463565, 0.7131990780527108, 3.4722225206734945, 0.6392372725163122, 1.4010836387311447, 0.8647254356377257, 7.131990780527107, 0.026293782934652718, 0.7131990780527108, 0.6392372725163122, 7.131990780527107, 0.8647254356377257, 0.3872974839053025],
[0.0, 2.693879634463565, 0.8676997908824122, 4.339922311555907, 0.8580356376693129, 2.2591192764004577, 0.8956197493411856, 8.676997908824122, 0.0, 0.8676997908824122, 0.8580356376693129, 8.676997908824122, 0.8956197493411856, 0.22557165737149715],
[0.10560669451549878, 2.799486328979064, 0.869082212788759, 5.209004524344666, 0.8903674796589355, 3.1494867560593933, 0.8939664328113363, 8.229423492288294, 0.10560669451549878, 0.869082212788759, 0.8903674796589355, 8.229423492288294, 0.8939664328113363, 0.1340241573819292],
[0.08615885630000791, 2.885645185279072, 0.44074890315982446, 5.64975342750449, 0.9425699260811738, 4.0920566821405675, 0.9040831722665535, 4.407489031598244, 0.08615885630000791, 0.44074890315982446, 0.9425699260811738, 4.407489031598244, 0.9040831722665535, 0.13404047971467026]
]
for report_step in range(10):
sim.step(scale=1.0 / 10.0)
values = [sim.fopr(), sim.fopt(), sim.fgpr(), sim.fgpt(), sim.fwpr(), sim.fwpt(), sim.fwct(), sim.fgor(),
sim.opr("OP1"), sim.gpr("OP1"), sim.wpr("OP1"), sim.gor("OP1"), sim.wct("OP1"), sim.bpr("6,6,6")]
self.assertAlmostEqual(values[0], values[8]) # fopr = opr:op1
self.assertAlmostEqual(values[2], values[9]) # fgpr = gpr:op1
self.assertAlmostEqual(values[4], values[10]) # fwpr = wpr:op1
self.assertAlmostEqual(sim.foip(), sim.ooip - sim.fopt())
self.assertAlmostEqual(sim.fgip(), sim.goip - sim.fgpt())
self.assertAlmostEqual(sim.fwip(), sim.woip - sim.fwpt())
self.assertAlmostEqualList(values, expected_values[report_step])
| gpl-3.0 | -2,593,821,575,484,354,600 | 81.04 | 306 | 0.730132 | false |
ezbake/ezbake-frontend | ezReverseProxy/TSSLSocket/TGeventServer.py | 1 | 1758 | # Copyright (C) 2013-2014 Computer Sciences Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import gevent
from thrift.server.TServer import TServer
from thrift.transport import TSocket, TTransport
import gevent.socket
TSocket.socket = gevent.socket
class TGEventServer(TServer):
def __init__(self, logger, *args, **kwargs):
TServer.__init__(self, *args)
self._logger = logger
def handle(self, client):
itrans = self.inputTransportFactory.getTransport(client)
otrans = self.outputTransportFactory.getTransport(client)
iprot = self.inputProtocolFactory.getProtocol(itrans)
oprot = self.outputProtocolFactory.getProtocol(otrans)
try:
while True:
self.processor.process(iprot, oprot)
except TTransport.TTransportException, e:
pass
itrans.close()
otrans.close()
def serve(self):
self.serverTransport.listen()
while True:
try:
client = self.serverTransport.accept()
gevent.spawn(self.handle, client)
except KeyboardInterrupt:
raise
except Exception, e:
self._logger.exception(e)
| apache-2.0 | 8,150,297,926,331,764,000 | 34.16 | 76 | 0.663823 | false |
fcbond/OMW | omw/bin/load-pwn.py | 1 | 18238 | #!/usr/bin/python3
# This script loads PWN in the new OMW schema
# It requires Python3 and NLTK3 installed
import sqlite3, sys, nltk
from nltk.corpus import wordnet as wn
from collections import defaultdict as dd
### ToDo: add antonyms as synset links (?)
### ToDo: examples are being loaded as synset examples, change to sense (?)
# It takes one argument: the name of the db
if (len(sys.argv) != 3):
sys.stderr.write('usage: load-pwn.py DBFILE ILIMAP\n')
sys.exit(1)
else:
u = sys.argv[0]
dbfile = sys.argv[1]
ilimapfile = sys.argv[2]
sys.stderr.write('Found ({}) as the new OMW database.\n'.format(dbfile))
# Verb Frames Names per Verb_id
vframe = dd(lambda: dd(str))
vframe['eng'][1] = "Something ----s"
vframe['eng'][2] = "Somebody ----s"
vframe['eng'][3] = "It is ----ing"
vframe['eng'][4] = "Something is ----ing PP"
vframe['eng'][5] = "Something ----s something Adjective/Noun"
vframe['eng'][6] = "Something ----s Adjective/Noun"
vframe['eng'][7] = "Somebody ----s Adjective"
vframe['eng'][8] = "Somebody ----s something"
vframe['eng'][9] = "Somebody ----s somebody"
vframe['eng'][10] = "Something ----s somebody"
vframe['eng'][11] = "Something ----s something"
vframe['eng'][12] = "Something ----s to somebody"
vframe['eng'][13] = "Somebody ----s on something"
vframe['eng'][14] = "Somebody ----s somebody something"
vframe['eng'][15] = "Somebody ----s something to somebody"
vframe['eng'][16] = "Somebody ----s something from somebody"
vframe['eng'][17] = "Somebody ----s somebody with something"
vframe['eng'][18] = "Somebody ----s somebody of something"
vframe['eng'][19] = "Somebody ----s something on somebody"
vframe['eng'][20] = "Somebody ----s somebody PP"
vframe['eng'][21] = "Somebody ----s something PP"
vframe['eng'][22] = "Somebody ----s PP"
vframe['eng'][23] = "Somebody's (body part) ----s"
vframe['eng'][24] = "Somebody ----s somebody to INFINITIVE"
vframe['eng'][25] = "Somebody ----s somebody INFINITIVE"
vframe['eng'][26] = "Somebody ----s that CLAUSE"
vframe['eng'][27] = "Somebody ----s to somebody"
vframe['eng'][28] = "Somebody ----s to INFINITIVE"
vframe['eng'][29] = "Somebody ----s whether INFINITIVE"
vframe['eng'][30] = "Somebody ----s somebody into V-ing something"
vframe['eng'][31] = "Somebody ----s something with something"
vframe['eng'][32] = "Somebody ----s INFINITIVE"
vframe['eng'][33] = "Somebody ----s VERB-ing"
vframe['eng'][34] = "It ----s that CLAUSE"
vframe['eng'][35] = "Something ----s INFINITIVE "
# Verb Frames Symbols per Verb_id
vframe['engsym'][1] = "☖ ~"
vframe['engsym'][2] = "☺ ~"
vframe['engsym'][3] = "It is ~ing"
vframe['engsym'][4] = "☖ is ~ing PP"
vframe['engsym'][5] = "☖ ~ ☖ Adj/N"
vframe['engsym'][6] = "☖ ~ Adj/N"
vframe['engsym'][7] = "☺ ~ Adj"
vframe['engsym'][8] = "☺ ~ ☖"
vframe['engsym'][9] = "☺ ~ ☺"
vframe['engsym'][10] = "☖ ~ ☺"
vframe['engsym'][11] = "☖ ~ ☖"
vframe['engsym'][12] = "☖ ~ to ☺"
vframe['engsym'][13] = "☺ ~ on ☖"
vframe['engsym'][14] = "☺ ~ ☺ ☖"
vframe['engsym'][15] = "☺ ~ ☖ to ☺"
vframe['engsym'][16] = "☺ ~ ☖ from ☺"
vframe['engsym'][17] = "☺ ~ ☺ with ☖"
vframe['engsym'][18] = "☺ ~ ☺ of ☖"
vframe['engsym'][19] = "☺ ~ ☖ on ☺"
vframe['engsym'][20] = "☺ ~ ☺ PP"
vframe['engsym'][21] = "☺ ~ ☖ PP"
vframe['engsym'][22] = "☺ ~ PP"
vframe['engsym'][23] = "☺'s (body part) ~"
vframe['engsym'][24] = "☺ ~ ☺ to INF"
vframe['engsym'][25] = "☺ ~ ☺ INF"
vframe['engsym'][26] = "☺ ~ that CLAUSE"
vframe['engsym'][27] = "☺ ~ to ☺"
vframe['engsym'][28] = "☺ ~ to INF"
vframe['engsym'][29] = "☺ ~ whether INF"
vframe['engsym'][30] = "☺ ~ ☺ into Ving ☖"
vframe['engsym'][31] = "☺ ~ ☖ with ☖"
vframe['engsym'][32] = "☺ ~ INF"
vframe['engsym'][33] = "☺ ~ V-ing"
vframe['engsym'][34] = "It ~ that CLAUSE"
vframe['engsym'][35] = "☖ ~ INF "
lexnames = """0 adj.all all adjective clusters
1 adj.pert relational adjectives (pertainyms)
2 adv.all all adverbs
3 noun.Tops unique beginner for nouns
4 noun.act nouns denoting acts or actions
5 noun.animal nouns denoting animals
6 noun.artifact nouns denoting man-made objects
7 noun.attribute nouns denoting attributes of people and objects
8 noun.body nouns denoting body parts
9 noun.cognition nouns denoting cognitive processes and contents
10 noun.communication nouns denoting communicative processes and contents
11 noun.event nouns denoting natural events
12 noun.feeling nouns denoting feelings and emotions
13 noun.food nouns denoting foods and drinks
14 noun.group nouns denoting groupings of people or objects
15 noun.location nouns denoting spatial position
16 noun.motive nouns denoting goals
17 noun.object nouns denoting natural objects (not man-made)
18 noun.person nouns denoting people
19 noun.phenomenon nouns denoting natural phenomena
20 noun.plant nouns denoting plants
21 noun.possession nouns denoting possession and transfer of possession
22 noun.process nouns denoting natural processes
23 noun.quantity nouns denoting quantities and units of measure
24 noun.relation nouns denoting relations between people or things or ideas
25 noun.shape nouns denoting two and three dimensional shapes
26 noun.state nouns denoting stable states of affairs
27 noun.substance nouns denoting substances
28 noun.time nouns denoting time and temporal relations
29 verb.body verbs of grooming, dressing and bodily care
30 verb.change verbs of size, temperature change, intensifying, etc.
31 verb.cognition verbs of thinking, judging, analyzing, doubting
32 verb.communication verbs of telling, asking, ordering, singing
33 verb.competition verbs of fighting, athletic activities
34 verb.consumption verbs of eating and drinking
35 verb.contact verbs of touching, hitting, tying, digging
36 verb.creation verbs of sewing, baking, painting, performing
37 verb.emotion verbs of feeling
38 verb.motion verbs of walking, flying, swimming
39 verb.perception verbs of seeing, hearing, feeling
40 verb.possession verbs of buying, selling, owning
41 verb.social verbs of political and social activities and events
42 verb.stative verbs of being, having, spatial relations
43 verb.weather verbs of raining, snowing, thawing, thundering
44 adj.ppl participial adjectives"""
# Short and Full Lexnames per Lexid
lexname = dd(lambda: dd(str))
for line in lexnames.split('\n'):
lexnlst = line.split('\t')
lexname['eng'][lexnlst[1]] = lexnlst[2]
lexname['id'][lexnlst[1]] = lexnlst[0]
################################################################
# OPEN omw.db
################################################################
con = sqlite3.connect(dbfile)
c = con.cursor()
################################################################
# GET PWN3.0-ILI ORIGINAL MAPPING
################################################################
f = open(ilimapfile, 'r')
ili_map = dict()
for line in f:
if line.strip() == "":
continue
else:
tab = line.split('\t')
pwn_ss = tab[1].strip()
ili_id = tab[0][1:].strip()
ili_map[pwn_ss] = ili_id
################################################################
# INSERT PROJECT / SRC / SRC_META DATA
################################################################
c.execute("""INSERT INTO proj (code, u)
VALUES (?,?)""", ['pwn',u])
c.execute("""SELECT MAX(id) FROM proj""")
proj_id = c.fetchone()[0]
sys.stderr.write('PWN was attributed ({}) as proj_id.\n'.format(proj_id))
c.execute("""INSERT INTO src (proj_id, version, u)
VALUES (?,?,?)""", [proj_id,'3.0', u])
c.execute("""SELECT MAX(id) FROM src""")
src_id = c.fetchone()[0]
sys.stderr.write('PWN30 was attributed (%s) as src_id.\n' % (src_id))
c.execute("""INSERT INTO src_meta (src_id, attr, val, u)
VALUES (?,?,?,?)""", [src_id, 'id', 'pwn', u])
c.execute("""INSERT INTO src_meta (src_id, attr, val, u)
VALUES (?,?,?,?)""", [src_id, 'version', '3.0', u])
c.execute("""INSERT INTO src_meta (src_id, attr, val, u)
VALUES (?,?,?,?)""", [src_id, 'label', 'Princeton Wordnet', u])
c.execute("""INSERT INTO src_meta (src_id, attr, val, u)
VALUES (?,?,?,?)""", [src_id, 'url', 'https://wordnet.princeton.edu', u])
c.execute("""INSERT INTO src_meta (src_id, attr, val, u)
VALUES (?,?,?,?)""", [src_id, 'description', 'WordNet is a large, open-source, lexical database of English. Nouns, verbs, adjectives and adverbs are grouped into sets of cognitive synonyms (synsets), each expressing a distinct concept. Synsets are interlinked by means of conceptual-semantic and lexical relations.', u])
c.execute("""INSERT INTO src_meta (src_id, attr, val, u)
VALUES (?,?,?,?)""", [src_id, 'license', 'wordnet', u])
c.execute("""INSERT INTO src_meta (src_id, attr, val, u)
VALUES (?,?,?,?)""", [src_id, 'language', 'en', u])
sys.stderr.write('PWN30 meta-data was added.\n')
################################################################
# INSERT (WN-EXTERNAL) RESOURCE DATA
################################################################
# FIXME!!! ADD SRC_META
c.execute("""INSERT INTO resource (code, u)
VALUES (?,?)""", ['pwn30-lexnames',u])
c.execute("""SELECT MAX(id) FROM resource""")
lexnames_resource_id = c.fetchone()[0]
c.execute("""INSERT INTO resource (code, u)
VALUES (?,?)""", ['pwn30-verbframes',u])
c.execute("""SELECT MAX(id) FROM resource""")
verbframes_resource_id = c.fetchone()[0]
################################################################
# INSERT LANG DATA (CODES AND NAMES)
################################################################
c.execute("""INSERT INTO lang (bcp47, iso639, u)
VALUES (?,?,?)""", ['en','eng',u])
c.execute("""INSERT INTO lang_name (lang_id, in_lang_id, name, u)
VALUES (1,1,'English',?)""", [u])
c.execute("""SELECT MAX(id) FROM lang""")
lang_id = c.fetchone()[0]
################################################################
# LOAD POS, SSREL, AND SREL DATA
################################################################
pos_id = dict()
c.execute("""SELECT id, tag FROM pos""")
rows = c.fetchall()
for r in rows:
pos_id[r[1]]=r[0]
ssrel_id = dict()
c.execute("""SELECT id, rel FROM ssrel""")
rows = c.fetchall()
for r in rows:
ssrel_id[r[1]]=r[0]
srel_id = dict()
c.execute("""SELECT id, rel FROM srel""")
rows = c.fetchall()
for r in rows:
srel_id[r[1]]=r[0]
################################################################
# ADD ENGLISH ENTRIES
################################################################
ssid = dict()
fid = dict()
wid=dict()
ss_lemma_sense_id = dict()
def ss2of(ss):
# FIXME!!!! 's' is getting through as the src_key on purpose!
return "%08d-%s" % (ss.offset(), ss.pos())
for ss in wn.all_synsets():
ili_id = int(ili_map[ss2of(ss)])
# (1) LOAD PWN CONCEPTS AS ILI CONCEPTS
if ss.instance_hypernyms():
kind = 2
c.execute("""INSERT INTO ili (id, kind_id, def, status_id,
origin_src_id, src_key, u)
VALUES (?,?,?,?,?,?,?)
""", (ili_id, kind, ss.definition(), 1,
src_id, ss2of(ss), u))
else:
kind = 1
c.execute("""INSERT INTO ili (id, kind_id, def, status_id,
origin_src_id, src_key, u)
VALUES (?,?,?,?,?,?,?)
""", (ili_id, kind, ss.definition(), 1,
src_id, ss2of(ss), u))
# (2) LOAD PWN CONCEPTS AS OMW CONCEPTS
pos = ss.pos()
pid = pos_id[pos.replace('s', 'a')]
# SYNSETS
c.execute("""INSERT INTO ss (ili_id, pos_id, u)
VALUES (?,?,?)
""", (ili_id, pid, u))
ss_id = c.lastrowid
c.execute("""INSERT INTO ss_src (ss_id, src_id, src_key, conf, u)
VALUES (?,?,?,?,?)
""", (ss_id, src_id, ss2of(ss), 1, u))
ssid[ss2of(ss)] = ss_id
c.execute("""INSERT INTO def (ss_id, lang_id, def, u)
VALUES (?,?,?,?)
""", (ss_id, lang_id, ss.definition(), u))
def_id = c.lastrowid
c.execute("""INSERT INTO def_src (def_id, src_id, conf, u)
VALUES (?,?,?,?)
""", (def_id, src_id, 1, u))
# EXAMPLES
exs = ss.examples()
for e in exs:
c.execute("""INSERT INTO ssexe (ss_id, lang_id, ssexe, u)
VALUES (?,?,?,?)
""", (ss_id, lang_id, e, u))
ex_id = c.lastrowid
c.execute("""INSERT INTO ssexe_src (ssexe_id, src_id, conf, u)
VALUES (?,?,?,?)
""", (ex_id, src_id, 1, u))
# INSERT FORMS, WORDS (SAME) and SENSES
for l in ss.lemmas():
# FORMS
form = l.name().replace('_', ' ')
if (pid, form) in fid:
form_id = fid[(pid, form)]
word_id= wid
else:
c.execute("""INSERT INTO f (lang_id, pos_id, lemma, u)
VALUES (?,?,?,?)
""", (lang_id, pid, form, u))
form_id = c.lastrowid
fid[(pid, form)] = form_id
c.execute("""INSERT INTO f_src (f_id, src_id, conf, u)
VALUES (?,?,?,?)
""", (form_id, src_id, 1, u))
# WORDS Only add for new form/pos pairs
c.execute("""INSERT INTO w (canon, u)
VALUES (?,?) """, (form_id, u))
word_id = c.lastrowid
wid[(pid, form)] = word_id
c.execute("""INSERT INTO wf_link (w_id, f_id, src_id, conf, u)
VALUES (?,?,?,?,?)
""", (word_id, form_id, src_id, 1, u))
# SENSES
word_id = wid[(pid, form)]
c.execute("""INSERT INTO s (ss_id, w_id, u)
VALUES (?,?,?) """, (ss_id, word_id, u))
s_id = c.lastrowid
c.execute("""INSERT INTO s_src (s_id, src_id, conf, u)
VALUES (?,?,?,?) """, (s_id, src_id, 1, u))
ss_lemma_sense_id[(ss,l)] = s_id
################################################################
# SECOND ROUND: INSERT RELATIONS
################################################################
# This now includes all relations as named in NLTK3.0
nltk_synlink_names = """also also_sees
attribute attributes
causes causes
entails entailments
hypernym hypernyms
hyponym hyponyms
instance_hypernym instance_hypernyms
instance_hyponym instance_hyponyms
holo_part part_holonyms
mero_part part_meronyms
similar similar_tos
holo_substance substance_holonyms
mero_substance substance_meronyms
holo_member member_holonyms
mero_member member_meronyms
domain_topic topic_domains
domain_region region_domains
exemplifies usage_domains"""
synlinks = dict()
for line in nltk_synlink_names.splitlines():
(k, v) = line.split('\t')
synlinks[k] = v
# list with relations not present in NLTK3.0
# but that can be inserted by finding their reverse
linkrev = dict()
linkrev['domain_topic'] = 'has_domain_topic'
linkrev['exemplifies'] = 'is_exemplified_by'
linkrev['domain_region'] = 'has_domain_region'
nltk_senslink_names = """antonym antonyms
pertainym pertainyms
derivation derivationally_related_forms"""
senslinks = dict()
for line in nltk_senslink_names.splitlines():
(k, v) = line.split('\t')
senslinks[k] = v
for ss in wn.all_synsets():
pos = ss.pos()
pid = pos_id[pos.replace('s', 'a')]
# SSREL
for r in synlinks.keys():
for ss2 in getattr(ss, synlinks[r])():
c.execute("""INSERT INTO sslink (ss1_id, ssrel_id, ss2_id, u)
VALUES (?,?,?,?)""",
(ssid[ss2of(ss)], ssrel_id[r], ssid[ss2of(ss2)], u))
sslink_id = c.lastrowid
c.execute("""INSERT INTO sslink_src (sslink_id, src_id, conf, lang_id, u)
VALUES (?,?,?,?,?)""",
(sslink_id, src_id, 1, lang_id, u))
if r in linkrev.keys(): # insert the reverse relation
c.execute("""INSERT INTO sslink (ss1_id, ssrel_id, ss2_id, u)
VALUES (?,?,?,?)""",
(ssid[ss2of(ss2)], ssrel_id[linkrev[r]], ssid[ss2of(ss)], u))
sslink_id = c.lastrowid
c.execute("""INSERT INTO sslink_src (sslink_id, src_id, conf, lang_id, u)
VALUES (?,?,?,?,?)""",
(sslink_id, src_id, 1, lang_id, u))
# SS LEXNAMES
lxn = ss.lexname()
c.execute("""INSERT INTO ssxl (ss_id, resource_id, x1, x2, x3, u)
VALUES (?,?,?,?,?,?)
""", (ssid[ss2of(ss)], lexnames_resource_id, lexname['id'][lxn],
lxn, lexname['eng'][lxn], u))
# SS VERBFRAMES
sframes = ss.frame_ids()
for frame in sframes:
c.execute("""INSERT INTO ssxl (ss_id, resource_id, x1, x2, x3, u)
VALUES (?,?,?,?,?,?)
""", (ssid[ss2of(ss)], verbframes_resource_id, frame,
vframe['eng'][frame], vframe['engsym'][frame], u))
# SENSE LINKS
for l1 in ss.lemmas():
s1_id = ss_lemma_sense_id[(ss,l1)]
lframeids = l1.frame_ids() # lemma frames
for frame in lframeids:
c.execute("""INSERT INTO sxl (s_id, resource_id, x1, x2, x3, u)
VALUES (?,?,?,?,?,?)
""", (s1_id, verbframes_resource_id, frame,
vframe['eng'][frame], vframe['engsym'][frame], u))
for r in senslinks:
for l2 in getattr(l1, senslinks[r])():
s2_id = ss_lemma_sense_id[(l2.synset(),l2)]
c.execute("""INSERT INTO slink (s1_id, srel_id, s2_id, u)
VALUES (?,?,?,?)""",
(s1_id, srel_id[r], s2_id, u))
slink_id = c.lastrowid
c.execute("""INSERT INTO slink_src (slink_id, src_id, conf, u)
VALUES (?,?,?,?)""",
(slink_id, src_id, 1, u))
con.commit()
con.close()
sys.stderr.write('Loaded PWN30!')
| mit | -2,236,290,472,514,885,400 | 32.240367 | 333 | 0.54278 | false |
Kha/flask-admin | flask_admin/base.py | 1 | 20999 | import os.path as op
from functools import wraps
from flask import Blueprint, current_app, render_template, abort, g, url_for
from flask_admin import babel
from flask_admin._compat import with_metaclass, as_unicode
from flask_admin import helpers as h
# For compatibility reasons import MenuLink
from flask_admin.menu import MenuCategory, MenuView, MenuLink
def expose(url='/', methods=('GET',)):
"""
Use this decorator to expose views in your view classes.
:param url:
Relative URL for the view
:param methods:
Allowed HTTP methods. By default only GET is allowed.
"""
def wrap(f):
if not hasattr(f, '_urls'):
f._urls = []
f._urls.append((url, methods))
return f
return wrap
def expose_plugview(url='/'):
"""
Decorator to expose Flask's pluggable view classes
(``flask.views.View`` or ``flask.views.MethodView``).
:param url:
Relative URL for the view
.. versionadded:: 1.0.4
"""
def wrap(v):
handler = expose(url, v.methods)
if hasattr(v, 'as_view'):
return handler(v.as_view(v.__name__))
else:
return handler(v)
return wrap
# Base views
def _wrap_view(f):
# Avoid wrapping view method twice
if hasattr(f, '_wrapped'):
return f
@wraps(f)
def inner(self, *args, **kwargs):
# Store current admin view
h.set_current_view(self)
# Check if administrative piece is accessible
abort = self._handle_view(f.__name__, **kwargs)
if abort is not None:
return abort
return self._run_view(f, *args, **kwargs)
inner._wrapped = True
return inner
class AdminViewMeta(type):
"""
View metaclass.
Does some precalculations (like getting list of view methods from the class) to avoid
calculating them for each view class instance.
"""
def __init__(cls, classname, bases, fields):
type.__init__(cls, classname, bases, fields)
# Gather exposed views
cls._urls = []
cls._default_view = None
for p in dir(cls):
attr = getattr(cls, p)
if hasattr(attr, '_urls'):
# Collect methods
for url, methods in attr._urls:
cls._urls.append((url, p, methods))
if url == '/':
cls._default_view = p
# Wrap views
setattr(cls, p, _wrap_view(attr))
class BaseViewClass(object):
pass
class BaseView(with_metaclass(AdminViewMeta, BaseViewClass)):
"""
Base administrative view.
Derive from this class to implement your administrative interface piece. For example::
from flask_admin import BaseView, expose
class MyView(BaseView):
@expose('/')
def index(self):
return 'Hello World!'
Icons can be added to the menu by using `menu_icon_type` and `menu_icon_value`. For example::
admin.add_view(MyView(name='My View', menu_icon_type='glyph', menu_icon_value='glyphicon-home'))
"""
@property
def _template_args(self):
"""
Extra template arguments.
If you need to pass some extra parameters to the template,
you can override particular view function, contribute
arguments you want to pass to the template and call parent view.
These arguments are local for this request and will be discarded
in the next request.
Any value passed through ``_template_args`` will override whatever
parent view function passed to the template.
For example::
class MyAdmin(ModelView):
@expose('/')
def index(self):
self._template_args['name'] = 'foobar'
self._template_args['code'] = '12345'
super(MyAdmin, self).index()
"""
args = getattr(g, '_admin_template_args', None)
if args is None:
args = g._admin_template_args = dict()
return args
def __init__(self, name=None, category=None, endpoint=None, url=None,
static_folder=None, static_url_path=None,
menu_class_name=None, menu_icon_type=None, menu_icon_value=None):
"""
Constructor.
:param name:
Name of this view. If not provided, will default to the class name.
:param category:
View category. If not provided, this view will be shown as a top-level menu item. Otherwise, it will
be in a submenu.
:param endpoint:
Base endpoint name for the view. For example, if there's a view method called "index" and
endpoint is set to "myadmin", you can use `url_for('myadmin.index')` to get the URL to the
view method. Defaults to the class name in lower case.
:param url:
Base URL. If provided, affects how URLs are generated. For example, if the url parameter
is "test", the resulting URL will look like "/admin/test/". If not provided, will
use endpoint as a base url. However, if URL starts with '/', absolute path is assumed
and '/admin/' prefix won't be applied.
:param static_url_path:
Static URL Path. If provided, this specifies the path to the static url directory.
:param menu_class_name:
Optional class name for the menu item.
:param menu_icon_type:
Optional icon. Possible icon types:
- `flask_admin.consts.ICON_TYPE_GLYPH` - Bootstrap glyph icon
- `flask_admin.consts.ICON_TYPE_FONT_AWESOME` - Font Awesome icon
- `flask_admin.consts.ICON_TYPE_IMAGE` - Image relative to Flask static directory
- `flask_admin.consts.ICON_TYPE_IMAGE_URL` - Image with full URL
:param menu_icon_value:
Icon glyph name or URL, depending on `menu_icon_type` setting
"""
self.name = name
self.category = category
self.endpoint = self._get_endpoint(endpoint)
self.url = url
self.static_folder = static_folder
self.static_url_path = static_url_path
self.menu = None
self.menu_class_name = menu_class_name
self.menu_icon_type = menu_icon_type
self.menu_icon_value = menu_icon_value
# Initialized from create_blueprint
self.admin = None
self.blueprint = None
# Default view
if self._default_view is None:
raise Exception(u'Attempted to instantiate admin view %s without default view' % self.__class__.__name__)
def _get_endpoint(self, endpoint):
"""
Generate Flask endpoint name. By default converts class name to lower case if endpoint is
not explicitly provided.
"""
if endpoint:
return endpoint
return self.__class__.__name__.lower()
def _get_view_url(self, admin, url):
"""
Generate URL for the view. Override to change default behavior.
"""
if url is None:
if admin.url != '/':
url = '%s/%s' % (admin.url, self.endpoint)
else:
if self == admin.index_view:
url = '/'
else:
url = '/%s' % self.endpoint
else:
if not url.startswith('/'):
url = '%s/%s' % (admin.url, url)
return url
def create_blueprint(self, admin):
"""
Create Flask blueprint.
"""
# Store admin instance
self.admin = admin
# If the static_url_path is not provided, use the admin's
if not self.static_url_path:
self.static_url_path = admin.static_url_path
# Generate URL
self.url = self._get_view_url(admin, self.url)
# If we're working from the root of the site, set prefix to None
if self.url == '/':
self.url = None
# prevent admin static files from conflicting with flask static files
if not self.static_url_path:
self.static_folder = 'static'
self.static_url_path = '/static/admin'
# If name is not povided, use capitalized endpoint name
if self.name is None:
self.name = self._prettify_class_name(self.__class__.__name__)
# Create blueprint and register rules
self.blueprint = Blueprint(self.endpoint, __name__,
url_prefix=self.url,
subdomain=self.admin.subdomain,
template_folder=op.join('templates', self.admin.template_mode),
static_folder=self.static_folder,
static_url_path=self.static_url_path)
for url, name, methods in self._urls:
self.blueprint.add_url_rule(url,
name,
getattr(self, name),
methods=methods)
return self.blueprint
def render(self, template, **kwargs):
"""
Render template
:param template:
Template path to render
:param kwargs:
Template arguments
"""
# Store self as admin_view
kwargs['admin_view'] = self
kwargs['admin_base_template'] = self.admin.base_template
# Provide i18n support even if flask-babel is not installed
# or enabled.
kwargs['_gettext'] = babel.gettext
kwargs['_ngettext'] = babel.ngettext
kwargs['h'] = h
# Expose get_url helper
kwargs['get_url'] = self.get_url
# Expose config info
kwargs['config'] = current_app.config
# Contribute extra arguments
kwargs.update(self._template_args)
return render_template(template, **kwargs)
def _prettify_class_name(self, name):
"""
Split words in PascalCase string into separate words.
:param name:
String to prettify
"""
return h.prettify_class_name(name)
def is_visible(self):
"""
Override this method if you want dynamically hide or show administrative views
from Flask-Admin menu structure
By default, item is visible in menu.
Please note that item should be both visible and accessible to be displayed in menu.
"""
return True
def is_accessible(self):
"""
Override this method to add permission checks.
Flask-Admin does not make any assumptions about the authentication system used in your application, so it is
up to you to implement it.
By default, it will allow access for everyone.
"""
return True
def _handle_view(self, name, **kwargs):
"""
This method will be executed before calling any view method.
It will execute the ``inaccessible_callback`` if the view is not
accessible.
:param name:
View function name
:param kwargs:
View function arguments
"""
if not self.is_accessible():
return self.inaccessible_callback(name, **kwargs)
def _run_view(self, fn, *args, **kwargs):
"""
This method will run actual view function.
While it is similar to _handle_view, can be used to change
arguments that are passed to the view.
:param fn:
View function
:param kwargs:
Arguments
"""
return fn(self, *args, **kwargs)
def inaccessible_callback(self, name, **kwargs):
"""
Handle the response to inaccessible views.
By default, it throw HTTP 403 error. Override this method to
customize the behaviour.
"""
return abort(403)
def get_url(self, endpoint, **kwargs):
"""
Generate URL for the endpoint. If you want to customize URL generation
logic (persist some query string argument, for example), this is
right place to do it.
:param endpoint:
Flask endpoint name
:param kwargs:
Arguments for `url_for`
"""
return url_for(endpoint, **kwargs)
@property
def _debug(self):
if not self.admin or not self.admin.app:
return False
return self.admin.app.debug
class AdminIndexView(BaseView):
"""
Default administrative interface index page when visiting the ``/admin/`` URL.
It can be overridden by passing your own view class to the ``Admin`` constructor::
class MyHomeView(AdminIndexView):
@expose('/')
def index(self):
arg1 = 'Hello'
return self.render('admin/myhome.html', arg1=arg1)
admin = Admin(index_view=MyHomeView())
Also, you can change the root url from /admin to / with the following::
admin = Admin(
app,
index_view=AdminIndexView(
name='Home',
template='admin/myhome.html',
url='/'
)
)
Default values for the index page are:
* If a name is not provided, 'Home' will be used.
* If an endpoint is not provided, will default to ``admin``
* Default URL route is ``/admin``.
* Automatically associates with static folder.
* Default template is ``admin/index.html``
"""
def __init__(self, name=None, category=None,
endpoint=None, url=None,
template='admin/index.html',
menu_class_name=None,
menu_icon_type=None,
menu_icon_value=None):
super(AdminIndexView, self).__init__(name or babel.lazy_gettext('Home'),
category,
endpoint or 'admin',
url or '/admin',
'static',
menu_class_name=menu_class_name,
menu_icon_type=menu_icon_type,
menu_icon_value=menu_icon_value)
self._template = template
@expose()
def index(self):
return self.render(self._template)
class Admin(object):
"""
Collection of the admin views. Also manages menu structure.
"""
def __init__(self, app=None, name=None,
url=None, subdomain=None,
index_view=None,
translations_path=None,
endpoint=None,
static_url_path=None,
base_template=None,
template_mode=None,
category_icon_classes=None):
"""
Constructor.
:param app:
Flask application object
:param name:
Application name. Will be displayed in the main menu and as a page title. Defaults to "Admin"
:param url:
Base URL
:param subdomain:
Subdomain to use
:param index_view:
Home page view to use. Defaults to `AdminIndexView`.
:param translations_path:
Location of the translation message catalogs. By default will use the translations
shipped with Flask-Admin.
:param endpoint:
Base endpoint name for index view. If you use multiple instances of the `Admin` class with
a single Flask application, you have to set a unique endpoint name for each instance.
:param static_url_path:
Static URL Path. If provided, this specifies the default path to the static url directory for
all its views. Can be overridden in view configuration.
:param base_template:
Override base HTML template for all static views. Defaults to `admin/base.html`.
:param template_mode:
Base template path. Defaults to `bootstrap2`. If you want to use
Bootstrap 3 integration, change it to `bootstrap3`.
:param category_icon_classes:
A dict of category names as keys and html classes as values to be added to menu category icons.
Example: {'Favorites': 'glyphicon glyphicon-star'}
"""
self.app = app
self.translations_path = translations_path
self._views = []
self._menu = []
self._menu_categories = dict()
self._menu_links = []
if name is None:
name = 'Admin'
self.name = name
self.index_view = index_view or AdminIndexView(endpoint=endpoint, url=url)
self.endpoint = endpoint or self.index_view.endpoint
self.url = url or self.index_view.url
self.static_url_path = static_url_path
self.subdomain = subdomain
self.base_template = base_template or 'admin/base.html'
self.template_mode = template_mode or 'bootstrap2'
self.category_icon_classes = category_icon_classes or dict()
# Add predefined index view
self.add_view(self.index_view)
# Register with application
if app is not None:
self._init_extension()
def add_view(self, view):
"""
Add a view to the collection.
:param view:
View to add.
"""
# Add to views
self._views.append(view)
# If app was provided in constructor, register view with Flask app
if self.app is not None:
self.app.register_blueprint(view.create_blueprint(self))
self._add_view_to_menu(view)
def add_link(self, link):
"""
Add link to menu links collection.
:param link:
Link to add.
"""
if link.category:
self._add_menu_item(link, link.category)
else:
self._menu_links.append(link)
def _add_menu_item(self, menu_item, target_category):
if target_category:
cat_text = as_unicode(target_category)
category = self._menu_categories.get(cat_text)
# create a new menu category if one does not exist already
if category is None:
category = MenuCategory(target_category)
category.class_name = self.category_icon_classes.get(cat_text)
self._menu_categories[cat_text] = category
self._menu.append(category)
category.add_child(menu_item)
else:
self._menu.append(menu_item)
def _add_view_to_menu(self, view):
"""
Add a view to the menu tree
:param view:
View to add
"""
self._add_menu_item(MenuView(view.name, view), view.category)
def get_category_menu_item(self, name):
return self._menu_categories.get(name)
def init_app(self, app):
"""
Register all views with the Flask application.
:param app:
Flask application instance
"""
self.app = app
self._init_extension()
# Register views
for view in self._views:
app.register_blueprint(view.create_blueprint(self))
def _init_extension(self):
if not hasattr(self.app, 'extensions'):
self.app.extensions = dict()
admins = self.app.extensions.get('admin', [])
for p in admins:
if p.endpoint == self.endpoint:
raise Exception(u'Cannot have two Admin() instances with same'
u' endpoint name.')
if p.url == self.url and p.subdomain == self.subdomain:
raise Exception(u'Cannot assign two Admin() instances with same'
u' URL and subdomain to the same application.')
admins.append(self)
self.app.extensions['admin'] = admins
def menu(self):
"""
Return the menu hierarchy.
"""
return self._menu
def menu_links(self):
"""
Return menu links.
"""
return self._menu_links
| bsd-3-clause | -4,105,701,313,336,007,000 | 32.437898 | 120 | 0.54455 | false |
Acidburn0zzz/servo | tests/wpt/web-platform-tests/tools/ci/tc/tests/test_valid.py | 1 | 10276 | import json
import os
from io import open
import jsone
import mock
import pytest
import requests
import yaml
from jsonschema import validate
from tools.ci.tc import decision
here = os.path.dirname(__file__)
root = os.path.abspath(os.path.join(here, "..", "..", "..", ".."))
def data_path(filename):
return os.path.join(here, "..", "testdata", filename)
def test_verify_taskcluster_yml():
"""Verify that the json-e in the .taskcluster.yml is valid"""
with open(os.path.join(root, ".taskcluster.yml"), encoding="utf8") as f:
template = yaml.safe_load(f)
events = [("pr_event.json", "github-pull-request", "Pull Request"),
("master_push_event.json", "github-push", "Push to master")]
for filename, tasks_for, title in events:
with open(data_path(filename), encoding="utf8") as f:
event = json.load(f)
context = {"tasks_for": tasks_for,
"event": event,
"as_slugid": lambda x: x}
jsone.render(template, context)
def test_verify_payload():
"""Verify that the decision task produces tasks with a valid payload"""
from tools.ci.tc.decision import decide
r = requests.get("https://community-tc.services.mozilla.com/schemas/queue/v1/create-task-request.json")
r.raise_for_status()
create_task_schema = r.json()
r = requests.get("https://raw.githubusercontent.com/taskcluster/taskcluster/master/workers/docker-worker/schemas/v1/payload.json")
r.raise_for_status()
payload_schema = r.json()
jobs = ["lint",
"manifest_upload",
"resources_unittest",
"tools_unittest",
"wpt_integration",
"wptrunner_infrastructure",
"wptrunner_unittest"]
for filename in ["pr_event.json", "master_push_event.json"]:
with open(data_path(filename), encoding="utf8") as f:
event = json.load(f)
with mock.patch("tools.ci.tc.decision.get_fetch_rev", return_value=(None, event["after"], None)):
with mock.patch("tools.ci.tc.decision.get_run_jobs", return_value=set(jobs)):
task_id_map = decide(event)
for name, (task_id, task_data) in task_id_map.items():
try:
validate(instance=task_data, schema=create_task_schema)
validate(instance=task_data["payload"], schema=payload_schema)
except Exception as e:
print("Validation failed for task '%s':\n%s" % (name, json.dumps(task_data, indent=2)))
raise e
@pytest.mark.parametrize("event_path,is_pr,files_changed,expected", [
("master_push_event.json", False, None,
{'download-firefox-nightly',
'wpt-firefox-nightly-testharness-1',
'wpt-firefox-nightly-testharness-2',
'wpt-firefox-nightly-testharness-3',
'wpt-firefox-nightly-testharness-4',
'wpt-firefox-nightly-testharness-5',
'wpt-firefox-nightly-testharness-6',
'wpt-firefox-nightly-testharness-7',
'wpt-firefox-nightly-testharness-8',
'wpt-firefox-nightly-testharness-9',
'wpt-firefox-nightly-testharness-10',
'wpt-firefox-nightly-testharness-11',
'wpt-firefox-nightly-testharness-12',
'wpt-firefox-nightly-testharness-13',
'wpt-firefox-nightly-testharness-14',
'wpt-firefox-nightly-testharness-15',
'wpt-firefox-nightly-testharness-16',
'wpt-chrome-dev-testharness-1',
'wpt-chrome-dev-testharness-2',
'wpt-chrome-dev-testharness-3',
'wpt-chrome-dev-testharness-4',
'wpt-chrome-dev-testharness-5',
'wpt-chrome-dev-testharness-6',
'wpt-chrome-dev-testharness-7',
'wpt-chrome-dev-testharness-8',
'wpt-chrome-dev-testharness-9',
'wpt-chrome-dev-testharness-10',
'wpt-chrome-dev-testharness-11',
'wpt-chrome-dev-testharness-12',
'wpt-chrome-dev-testharness-13',
'wpt-chrome-dev-testharness-14',
'wpt-chrome-dev-testharness-15',
'wpt-chrome-dev-testharness-16',
'wpt-firefox-nightly-reftest-1',
'wpt-firefox-nightly-reftest-2',
'wpt-firefox-nightly-reftest-3',
'wpt-firefox-nightly-reftest-4',
'wpt-firefox-nightly-reftest-5',
'wpt-chrome-dev-reftest-1',
'wpt-chrome-dev-reftest-2',
'wpt-chrome-dev-reftest-3',
'wpt-chrome-dev-reftest-4',
'wpt-chrome-dev-reftest-5',
'wpt-firefox-nightly-wdspec-1',
'wpt-chrome-dev-wdspec-1',
'wpt-firefox-nightly-crashtest-1',
'wpt-chrome-dev-crashtest-1',
'lint'}),
("pr_event.json", True, {".taskcluster.yml",".travis.yml","tools/ci/start.sh"},
{'download-firefox-nightly',
'lint',
'tools/ unittests (Python 2)',
'tools/ unittests (Python 3.6)',
'tools/ unittests (Python 3.8)',
'tools/wpt/ tests (Python 2)',
'tools/wpt/ tests (Python 3.6)',
'tools/wpt/ tests (Python 3.8)',
'resources/ tests',
'infrastructure/ tests',
'infrastructure/ tests (Python 3)'}),
# More tests are affected in the actual PR but it shouldn't affect the scheduled tasks
("pr_event_tests_affected.json", True, {"layout-instability/clip-negative-bottom-margin.html",
"layout-instability/composited-element-movement.html"},
{'download-firefox-nightly',
'wpt-firefox-nightly-stability',
'wpt-firefox-nightly-results',
'wpt-firefox-nightly-results-without-changes',
'wpt-chrome-dev-stability',
'wpt-chrome-dev-results',
'wpt-chrome-dev-results-without-changes',
'lint'}),
("epochs_daily_push_event.json", False, None,
{'download-firefox-stable',
'wpt-chrome-stable-reftest-1',
'wpt-chrome-stable-reftest-2',
'wpt-chrome-stable-reftest-3',
'wpt-chrome-stable-reftest-4',
'wpt-chrome-stable-reftest-5',
'wpt-chrome-stable-testharness-1',
'wpt-chrome-stable-testharness-10',
'wpt-chrome-stable-testharness-11',
'wpt-chrome-stable-testharness-12',
'wpt-chrome-stable-testharness-13',
'wpt-chrome-stable-testharness-14',
'wpt-chrome-stable-testharness-15',
'wpt-chrome-stable-testharness-16',
'wpt-chrome-stable-testharness-2',
'wpt-chrome-stable-testharness-3',
'wpt-chrome-stable-testharness-4',
'wpt-chrome-stable-testharness-5',
'wpt-chrome-stable-testharness-6',
'wpt-chrome-stable-testharness-7',
'wpt-chrome-stable-testharness-8',
'wpt-chrome-stable-testharness-9',
'wpt-chrome-stable-wdspec-1',
'wpt-chrome-stable-crashtest-1',
'wpt-firefox-stable-reftest-1',
'wpt-firefox-stable-reftest-2',
'wpt-firefox-stable-reftest-3',
'wpt-firefox-stable-reftest-4',
'wpt-firefox-stable-reftest-5',
'wpt-firefox-stable-testharness-1',
'wpt-firefox-stable-testharness-10',
'wpt-firefox-stable-testharness-11',
'wpt-firefox-stable-testharness-12',
'wpt-firefox-stable-testharness-13',
'wpt-firefox-stable-testharness-14',
'wpt-firefox-stable-testharness-15',
'wpt-firefox-stable-testharness-16',
'wpt-firefox-stable-testharness-2',
'wpt-firefox-stable-testharness-3',
'wpt-firefox-stable-testharness-4',
'wpt-firefox-stable-testharness-5',
'wpt-firefox-stable-testharness-6',
'wpt-firefox-stable-testharness-7',
'wpt-firefox-stable-testharness-8',
'wpt-firefox-stable-testharness-9',
'wpt-firefox-stable-wdspec-1',
'wpt-firefox-stable-crashtest-1',
'wpt-webkitgtk_minibrowser-nightly-reftest-1',
'wpt-webkitgtk_minibrowser-nightly-reftest-2',
'wpt-webkitgtk_minibrowser-nightly-reftest-3',
'wpt-webkitgtk_minibrowser-nightly-reftest-4',
'wpt-webkitgtk_minibrowser-nightly-reftest-5',
'wpt-webkitgtk_minibrowser-nightly-testharness-1',
'wpt-webkitgtk_minibrowser-nightly-testharness-10',
'wpt-webkitgtk_minibrowser-nightly-testharness-11',
'wpt-webkitgtk_minibrowser-nightly-testharness-12',
'wpt-webkitgtk_minibrowser-nightly-testharness-13',
'wpt-webkitgtk_minibrowser-nightly-testharness-14',
'wpt-webkitgtk_minibrowser-nightly-testharness-15',
'wpt-webkitgtk_minibrowser-nightly-testharness-16',
'wpt-webkitgtk_minibrowser-nightly-testharness-2',
'wpt-webkitgtk_minibrowser-nightly-testharness-3',
'wpt-webkitgtk_minibrowser-nightly-testharness-4',
'wpt-webkitgtk_minibrowser-nightly-testharness-5',
'wpt-webkitgtk_minibrowser-nightly-testharness-6',
'wpt-webkitgtk_minibrowser-nightly-testharness-7',
'wpt-webkitgtk_minibrowser-nightly-testharness-8',
'wpt-webkitgtk_minibrowser-nightly-testharness-9',
'wpt-webkitgtk_minibrowser-nightly-wdspec-1',
'wpt-webkitgtk_minibrowser-nightly-crashtest-1',
'wpt-servo-nightly-reftest-1',
'wpt-servo-nightly-reftest-2',
'wpt-servo-nightly-reftest-3',
'wpt-servo-nightly-reftest-4',
'wpt-servo-nightly-reftest-5',
'wpt-servo-nightly-testharness-1',
'wpt-servo-nightly-testharness-10',
'wpt-servo-nightly-testharness-11',
'wpt-servo-nightly-testharness-12',
'wpt-servo-nightly-testharness-13',
'wpt-servo-nightly-testharness-14',
'wpt-servo-nightly-testharness-15',
'wpt-servo-nightly-testharness-16',
'wpt-servo-nightly-testharness-2',
'wpt-servo-nightly-testharness-3',
'wpt-servo-nightly-testharness-4',
'wpt-servo-nightly-testharness-5',
'wpt-servo-nightly-testharness-6',
'wpt-servo-nightly-testharness-7',
'wpt-servo-nightly-testharness-8',
'wpt-servo-nightly-testharness-9',
'wpt-servo-nightly-wdspec-1',
'wpt-servo-nightly-crashtest-1',})
])
def test_schedule_tasks(event_path, is_pr, files_changed, expected):
with mock.patch("tools.ci.tc.decision.get_fetch_rev", return_value=(None, None, None)):
with mock.patch("tools.wpt.testfiles.repo_files_changed",
return_value=files_changed):
with open(data_path(event_path), encoding="utf8") as event_file:
event = json.load(event_file)
scheduled = decision.decide(event)
assert set(scheduled.keys()) == expected
| mpl-2.0 | -4,476,193,184,806,406,000 | 39.777778 | 134 | 0.64636 | false |
barentsen/iphas-dr2 | scripts/release-preparation/augment-image-metadata.py | 1 | 3046 | """Script to create a user-friendly index of IPHAS image meta data.
"""
import numpy as np
from astropy.table import Table
from astropy.table import Column
from dr2.constants import IPHASQC
# Index of images found by the DR2 pipeline
# ie. produced by dr2.images.prepare_images()
t = Table.read('iphas-images-pipeline.fits')
# Run 376022 on the disk received from CASU is a corrupt file
t.remove_row(np.argwhere(t['run'] == 376022)[0][0])
# Run 367744 appeared twice in iphas-qc.fits
t.remove_rows(np.argwhere(t['run'] == 367744)[4:])
# Add the URL of the image location
urldata = ['http://www.iphas.org/data/images/'+name[0:4]+'/'+name for name in t['filename']]
url = Column(name='url', data=urldata)
t.add_column(url, 0)
t.remove_column('filename')
# Load auxillary data from the IPHAS-QC file
runs = np.concatenate((IPHASQC['run_r'], IPHASQC['run_i'], IPHASQC['run_ha']))
fields = np.concatenate((IPHASQC['id'], IPHASQC['id'], IPHASQC['id']))
qflags = np.concatenate((IPHASQC['qflag'], IPHASQC['qflag'], IPHASQC['qflag']))
qcproblems = np.concatenate((IPHASQC['problems'], IPHASQC['problems'], IPHASQC['problems']))
depth5sig = np.concatenate((IPHASQC['r5sig_judged'],
IPHASQC['i5sig_judged'],
IPHASQC['h5sig_judged']))
field_dict = dict(zip(runs, fields))
qflag_dict = dict(zip(runs, qflags))
qcproblems_dict = dict(zip(runs, qcproblems))
depth5sig_dict = dict(zip(runs, depth5sig))
# Add the IPHAS field number
field = Column(name='fieldid', data=[field_dict[r] for r in t['run']])
t.add_column(field)
# Add the DR2 quality grade
qcgrade = Column(name='qcgrade', data=[qflag_dict[r] for r in t['run']])
t.add_column(qcgrade)
# Add the 'quality problems' summary
qcproblems = Column(name='qcproblems', data=[qcproblems_dict[r] for r in t['run']])
t.add_column(qcproblems)
# Add the 5-sigma detection limit
depth = Column(name='depth', data=[depth5sig_dict[r] for r in t['run']])
t.add_column(depth)
# Limit the number of decimals in the ascii output:
t['ra'].format = '{0:.3f}'
t['dec'].format = '{0:.3f}'
t.remove_column('airmass')
t.sort(['run', 'ccd'])
# We will export the resulting table to FITS, ASCII, and SQLITE
# First, export to FITS
columns = ['run', 'ccd', 'url', 'ra', 'dec', 'band', 'utstart',
'fieldid', 'in_dr2', 'qcgrade', 'qcproblems',
'exptime', 'seeing', 'elliptic', 'skylevel', 'skynoise',
'depth', 'photzp', 'confmap',
'ra_min', 'ra_max', 'dec_min', 'dec_max']
t[columns].write('iphas-images.fits.gz', overwrite=True)
# Export to ASCII
t['url', 'ra', 'dec', 'band', 'fieldid', 'in_dr2', 'qcgrade'].write('iphas-images.txt', format='ascii.fixed_width')
# Export to SQLITE (using atpy as astropy doesn't support sqlite yet)
import atpy
tbl = atpy.Table('iphas-images.fits.gz', name='images')
tbl.write('sqlite', 'iphas-images.sqlite', overwrite=True)
# For fast queries, you might want to do:
# CREATE INDEX images_ra_min_idx ON images(ra_min);
# CREATE INDEX images_ra_max_idx ON images(ra_max);
# VACUUM; | mit | 7,983,065,310,120,781,000 | 37.0875 | 115 | 0.673342 | false |
sieben/pycolo | tests/etsi/TestCore.py | 1 | 55943 | # coding=utf-8
"""
Implementing ETSI CoAP CoRE Tests
"""
import logging
import unittest
from pycolo.codes import codes, msgType, options
from pycolo.codes import mediaCodes
from pycolo.endpoint import Endpoint
from pycolo.message import Response
from pycolo.request import request as coap
from pycolo.resource import Resource
from pycolo.token import acquireToken
from tests.etsi import Default, LongPath, Query, Separate
from tests.examples.TestProxy import Proxy
class TestCore(unittest.TestCase):
"""
Test suite for ETSI CoAP Core Tests
- TD_COAP_CORE_01 Perform GET transaction (CON mode)
- TD_COAP_CORE_02 Perform POST transaction (CON mode)
- TD_COAP_CORE_03 Perform PUT transaction (CON mode)
- TD_COAP_CORE_04 Perform DELETE transaction (CON mode)
- TD_COAP_CORE_05 Perform GET transaction (NON mode)
- TD_COAP_CORE_06 Perform POST transaction (NON mode)
- TD_COAP_CORE_07 Perform PUT transaction (NON mode)
- TD_COAP_CORE_08 Perform DELETE transaction (NON mode)
- TD_COAP_CORE_09 Perform GET transaction with delayed response (CON mode, no piggyback)
- TD_COAP_CORE_10 Perform GET transaction containing Token option (CON mode)
- TD_COAP_CORE_11 Perform GET transaction containing token option with a separate response (CON mode)
- TD_COAP_CORE_12 Perform GET transaction not containing Token option (CON mode)
- TD_COAP_CORE_13 Perform GET transaction containing several URI-Path options (CON mode)
- TD_COAP_CORE_14 Perform GET transaction containing several URI-Query options (CON mode)
- TD_COAP_CORE_17 Perform GET transaction with a separate response (NON mode)
- TD_COAP_CORE_18 Perform POST transaction with responses containing several Location-Path options (CON mode)
- TD_COAP_CORE_19 Perform POST transaction with responses containing several Location-Query options (CON mode)
- TD_COAP_CORE_20 Perform GET transaction containing the Accept option (CON mode)
- TD_COAP_CORE_21 Perform GET transaction containing the ETag option (CON mode)
- TD_COAP_CORE_22 Perform GET transaction with responses containing the ETag option and requests containing the If-Match option (CON mode)
- TD_COAP_CORE_23 Perform GET transaction with responses containing the ETag option and requests containing the If-None-Match option (CON mode)
"""
def setUp(self):
"""
TODO
"""
server = Endpoint()
res = Default()
server.register(res)
def test_TD_COAP_CORE_01(self):
"""
:Identifier: TD_COAP_CORE_01
:Objective: Perform GET transaction (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions: Server offers the resource /test that handle GET with an arbitrary payload
Step 1 (stimulus): Client is requested to send a GET request with:
- Type = 0(CON)
- Code = 1(GET)
Step 2 (check): Sent request contains Type value indicating 0 and Code value indicating 1
Step 3 (check): Server sends response containing:
- Code = 69(2.05 Content)
- The same Message ID as that of the previous request
- Content type option
Step 4 (verify): Client displays the received information
"""
r = coap.get(self.server.url + "/test", confirmable=True,
payload="TD_COAP_CORE_01",
trace=True)
self.assertEqual(r.sent.msgType, msgType.con)
self.assertEqual(r.sent.code, codes.GET)
self.assertEqual(r.code, codes.content)
self.assertEqual(r.sent.messageID, r.messageID)
self.assertEqual(r.sent.payload, r.payload)
logging.info(r)
def test_TD_COAP_CORE_02(self):
"""
:Identifier: TD_COAP_CORE_02
:Objective: Perform POST transaction (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions: Server accepts creation of new resource on /test (resource does not exists yet)
- Step 1 (stimulus) Client is requested to send a POST request with:
- Type = 0(CON)
- Code = 2(POST)
- An arbitrary payload
- Content type option
- Step 2 (check (CON)) Sent request contains Type value indicating 0 and Code value indicating 2
- Step 3 (verify (IOP)) Server displays received information
- Step 4 (check (CON)) Server sends response containing:
- Code = 65(2.01 Created)
- The same Message ID as that of the previous request
- Step 5 (verify (IOP)) Client displays the received response
"""
r = coap.post(self.server.url + "/test", payload="TD_COAP_CORE_02")
self.assertEqual(codes.ok, r.code)
self.assertEqual(r.code, codes.created)
self.assertEqual(r.msgType, msgType.ack)
self.assertEqual(r.sent.MID, r.MID)
logging.info(r)
def test_TD_COAP_CORE_03(self):
"""
:Identifier: TD_COAP_CORE_03
:Objective: Perform PUT transaction (CON mode)
:Configuration: CoAP_CFG_01
- Step 1 (stimulus) Client is requested to send a PUT request with:
- Type = 0(CON)
- Code = 3(PUT)
- An arbitrary payload
- Content type option
- Step 2 (check (CON)) Sent request contains Type value indicating 0 and Code value indicating 3
- Step 3 (verify (IOP)) Server displays received information
- Step 4 (check (CON)) Server sends response containing:
- Code = 68(2.04 Changed)
- The same Message ID as that of the previous request
- Step 5 (verify (IOP)) Client displays the received response
"""
r = coap.put(self.server.url + "/test",
confirmable=True,
payload="TD_COAP_CORE_02")
self.assertEqual(codes.changed, r.code)
self.assertEqual(r.msgType, msgType.ack)
def test_TD_COAP_CORE_04(self):
"""
:Identifier: TD_COAP_CORE_04
:Objective: Perform DELETE transaction (CON mode)
:Configuration: CoAP_CFG_01
Pre-test conditions:
- Server offers a /test resource that handles DELETE
Step 1 (stimulus) Client is requested to send a DELETE request with:
- Type = 0(CON)
- Code = 4(DELETE)
Step 2 (check (CON)) Sent request contains Type value indicating 0 and Code value indicating 4
Step 3 (check (CON)) Server sends response containing:
- Code = 66(2.02 Deleted)
- The same Message ID as that of the previous request
Step 4 (verify (IOP)) Client displays the received information
"""
r = coap.delete(self.server.url + "/test")
self.assertEqual(codes.deleted, r.code)
self.assertEqual(r.msgType, msgType.ack)
def test_TD_COAP_CORE_05(self):
"""
:Identifier: TD_COAP_CORE_05
:Objective: Perform GET transaction (NON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a /test resource that handles GET
- Step 1 (stimulus) Client is requested to send a GET request with:
- Type = 1(NON)
- Code = 1(GET)
- Step 2 (check (CON)) Sent request contains Type value indicating 1 and Code value indicating 1
- Step 3 (check (CON)) Server sends response containing:
- Type = 1(NON)
- Code= 69(2.05 Content)
- Content type option
- Step 4 (verify (IOP)) Client displays the received information
"""
r = coap.get(self.server.url + "/test", confirmable=False)
self.assertEqual(r.msgType, msgType.non)
self.assertEqual(r.code, codes.content)
self.assertIn("Content-Type", r)
def test_TD_COAP_CORE_06(self):
"""
:Identifier: TD_COAP_CORE_06
:Objective: Perform POST transaction (NON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server accepts creation of new resource on /test (resource does not exists yet)
- Step 1 (stimulus) Client is requested to send a POST request with:
- Type = 1(NON)
- Code = 2(POST)
- An arbitrary payload
- Content type option
- Step 2 check (CON) Sent request contains Type value indicating 1 and Code value indicating 2
- Step 3 (verify) Server displays the received information
- Step 4 (check (CON)) Server sends response containing:
- Type = 1(NON)
- Code = 65(2.01 Created)
- Step 5 (verify (IOP)) Client displays the received response
"""
r = coap.post(self.server.url + "/test",
confirmable=False, payload="TD_COAP_CORE_06")
self.assertEqual(r.code, codes.created)
self.assertEqual(r.msgType, msgType.non)
def test_TD_COAP_CORE_07(self):
"""
:Identifier: TD_COAP_CORE_07
:Objective: Perform PUT transaction (NON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a /test resource that handles PUT
- Step 1 (stimulus) Client is requested to send a PUT request with:
- Type = 1(NON)
- Code = 3(PUT)
- An arbitrary payload
- Content type option
- Step 2 (check (CON)) Sent request contains Type value indicating 1 and Code value indicating 3
- Step 3 verify Server displays the received information
- Step 4 (check (CON)) Server sends response containing:
- Type = 1(NON)
- Code = 68(2.04 Changed)
- Step 5 (verify (IOP)) Client displays the received response
"""
r = coap.put(self.server.url + "/test",
confirmable=False, payload="TD_COAP_CORE_07")
self.assertEqual(r.msgType, msgType.non)
self.assertEqual(r.code, codes.changed)
def test_TD_COAP_CORE_08(self):
"""
:Identifier: TD_COAP_CORE_08
:Objective: Perform DELETE transaction (NON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a /test resource that handles DELETE
- Step 1 stimulus Client is requested to send a DELETE request with:
- Type = 1(NON)
- Code = 4(DELETE)
- Step 2 (check (CON)) Sent request contains Type value indicating 1 and Code value indicating 4
- Step 3 (check (CON)) Server sends response containing:
- Type = 1(NON)
- Code = 66(2.02 Deleted)
- Step 4 (verify (IOP)) Client displays the received information
"""
r = coap.delete(self.server.url + "/test", confirmable=False)
self.assertEqual(r.msgType, msgType.non)
self.assertEqual(r.code, codes.deleted)
def test_TD_COAP_CORE_09(self):
"""
:Identifier: TD_COAP_CORE_09
:Objective: Perform GET transaction with a separate response
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a resource /separate which cannot be served immediately and which
cannot be acknowledged in a piggy-backed way.
- Step 1 stimulus Client is requested to send a confirmable GET request to server’s resource
- Step 2 (Check (CON)) Sent request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
- Client generated Message ID
- Step 3 (Check (CON)) Server sends response containing:
- Type = 2 (ACK)
- message ID same as the request
- empty Payload
- Step 4 (Check (CON)) Server sends response containing:
- Type = 0 (CON)
- Code = 69 (2.05 content)
- Payload = Content of the requested resource
- Content type option
- Step 5 (Check (CON)) Client sends response containing:
- Type = 2 (ACK)
- message ID same as the response
- empty Payload
- Step 6 (Verify (IOP)) Client displays the response
"""
r = coap.get(self.server.url + "/separate", confirmable=True)
self.assertEqual(r.msgType, msgType.non)
self.assertIn("Content-Type", r)
self.assertEqual(r.code, codes.content)
def test_TD_COAP_CORE_10(self):
"""
Identifier: TD_COAP_CORE_10
Objective: Handle request containing Token option
Configuration: CoAP_CFG_01
Pre-test conditions:
- Server offers a /test resource that handles GET
Step 1 stimulus Client is requested to send a GET request to server’s
resource including Token option
Step 2 (Check (CON)) Sent request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
- Client generated Token value
- Length of the token should be between 1 to 8 B
- Option Type = Token
Step 3 (Check (CON)) Server sends response containing:
- Code = 69 (2.05 content)
- Length of the token should be between 1 to 8 B
- Token value same as the requested
- Payload = Content of the requested resource
- Content type option
Step 4 (Verify (IOP)) Client displays the response
"""
token = acquireToken() # not preferring empty token
r = coap.get(self.server.url + "/test", confirmable=True, token=token,
trace=True)
self.assertEqual(r.code, codes.content)
self.assertEqual(r.msgType, msgType.ack)
self.assertEqual(
r.sent.options[options.token],
r.options[options.token])
self.assertIn("Content-Type", r)
def test_TD_COAP_CORE_11(self):
"""
:Identifier: TD_COAP_CORE_11
:Objective: Handle request not containing Token option
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a /test resource that handles GET
- Step 1 stimulus Client is requested to send a confirmable GET
request to server’s resource not containing Token option
- Step 2 (Check (CON)) Sent request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
- No Token option
- Step 3 (Check (CON)) Server sends response containing:
- Code = 69 (2.05 content)
- No Token option
- Payload = Content of the requested resource
- Content type option
- Step 4 (Verify (IOP)) Client displays the response
"""
r = coap.get(self.server.url + "/test", confirmable=True, trace=True)
self.assertEqual(r.sent.msgType, msgType.con)
self.assertEqual(r.msgType, msgType.con)
self.assertEqual(r.code, codes.content)
self.assertNotIn(r.options, options.token)
self.assertIn("Content-type", r)
logging.info(r)
# self.assertEqual(Message.messageType.ACK, response.getType())
# self.assertEqual(new Option(TokenManager.emptyToken, options.TOKEN), response.getFirstOption(options.TOKEN))
def test_TD_COAP_CORE_12(self):
"""
Identifier: TD_COAP_CORE_12
Objective: Handle request containing several URI-Path options
Configuration: CoAP_CFG_01
Pre-test conditions:
- Server offers a /seg1/seg2/seg3 resource
Step 1 (stimulus) Client is requested to send a confirmable GET request to server’s resource
Step 2 (Check (CON)) Sent request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
- Option type = URI-Path (one for each path segment)
Step 3 (Check (CON)) Server sends response containing:
- Code = 69 (2.05 content)
- Payload = Content of the requested resource
- Content type option
Step 4 (Verify (IOP)) Client displays the response
"""
r = coap.get(self.server.url + "/seg1/seg2/seg3", trace=True, confirmable=True)
self.assertEqual(r.sent.code, codes.GET)
self.assertEqual(r.sent.msgType, msgType.con)
self.assertIn(r.sent.options, ["seg1", "seg2", "seg3"])
self.assertEqual(r.code, codes.content)
self.assertEqual(r.payload, "TD_COAP_CORE_12")
logging.info(r)
def test_TD_COAP_CORE_13(self):
"""
:Identifier: TD_COAP_CORE_13
:Objective: Handle request containing several URI-Query options
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a /query resource
- Step 1: stimulus Client is requested to send a confirmable GET request with
three Query parameters (e.g. ?first=1&second=2&third=3) to
the server’s resource
- Step 2: (Check (CON)) Sent request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
- Option type = URI-Query (More than one query parameter)
- Step 3: (Check (CON)) Server sends response containing:
- Type = 0/2 (CON/ACK)
- Code = 69 (2.05 content)
- Payload = Content of the requested resource
- Content type option
- Step 4 (Verify (IOP)) Client displays the response
"""
options = {"first": 1, "second": 2, "third": 3}
r = coap.get(self.server.url + "/query", trace=True, confirmable=True,
options=options)
self.assertEqual(r.sent.msgType, msgType.con)
self.assertEqual(r.sent.code, codes.GET)
self.assertIn(r.sent.options[options.query], ["first", "second", "third"])
self.assertEqual(r.code, codes.content)
self.assertIn("Content-Type", r)
self.assertTrue(r.msgType == msgType.con or r.msgType == msgType.ack)
logging.info(r)
def test_TD_COAP_CORE_16(self):
"""
:Identifier: TD_COAP_CORE_16
:Objective: Perform GET transaction with a separate response (NON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a resource /separate which cannot be served immediately.
- Step 1 stimulus Client is requested to send a confirmable GET request to server’s resource
- Step 2 (Check (CON)) Sent request must contain:
- Type = 1 (NON)
- Code = 1 (GET)
- Client generated Message ID
- Step 3 (Check (CON)) Server does not send response containing:
- Type = 2 (ACK)
- message ID same as the request
- empty Payload
- Step 4 (Check (CON)) Server sends response containing:
- Type = 1 (NON)
- Code = 69 (2.05 content)
- Payload = Content of the requested resource
- Content type option
- Step 5 (Verify (IOP)) Client displays the response
"""
r = coap.get(self.server.url + "/separate", confirmable=False)
self.assertEqual(r.sent.code, codes.GET)
self.assertEqual(r.sent.msgType, msgType.non)
self.assertEqual(r.code, codes.content)
self.assertEqual(r.msgType, msgType.non)
self.assertIn("Content-Type", r)
logging.info(r)
def test_TD_COAP_CORE_17(self):
"""
:Identifier: TD_COAP_CORE_17
:Objective: Perform GET transaction with a separate response (NON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server offers a resource /separate which cannot be served immediately.
- Step 1 (stimulus) Client is requested to send a non-confirmable GET request to server’s resource
- Step 2 (check) The request sent by the client contains:
- Type = 1 (NON)
- Code = 1 (GET)
- A message ID generated by the Client
- Step 3 (check) Server DOES NOT send response containing:
- Type = 2 (ACK)
- Same message ID as in the request in step 2
- empty Payload
- Step 4 (check) Server sends response containing:
- Type = 1 (NON)
- Code = 69 (2.05 content)
- Payload = Content of the requested resource
- Content format option
- Step 5 (verify) Client displays the response
"""
r = coap.get(self.server.url + "/separate")
self.assertEqual(r.sent.msgType, msgType.non)
self.assertEqual(r.sent.code, codes.GET)
self.assertNotEqual(r.msgType, msgType.ack)
self.assertNotEqual(r.sent.messageID, r.messageID)
self.assertNotEqual(r.payload, "")
self.assertEqual(r.msgType, msgType.non)
self.assertEqual(r.code, codes.content)
logging.info(r)
def test_TD_COAP_CORE_18(self):
"""
:Identifier: TD_COAP_CORE_18
:Objective: Perform POST transaction with responses containing several Location-Path options (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server accepts creation of new resource on /test and the created resource is
located at /location1/location2/location3 (resource does not exist yet)
- Step 1 (Stimulus) Client is requested to send a confirmable POST request to server’s resource
- Step 2 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 2 (POST)
- An arbitrary payload
- Content-format option
- Step 3 (check) Server sends response containing:
- Code = 65 (2.01 created)
- Option type = Location-Path (one for each segment)
- Option values must contain “location1”, “location2” &
“location3” without containing any ‘/’
- Step 4 (verify) Client displays the response
"""
r = coap.post(self.server.url + "/test", payload="TD_COAP_CORE_18")
self.assertEqual(r.sent.msgtype, msgType.con)
self.assertEqual(r.sent.code, codes.post)
self.assertEqual(r.code, codes.created)
self.assertIn(["location1", "location2", "location3"], r.options)
logging.info(r)
def test_TD_COAP_CORE_19(self):
"""
:Identifier: TD_COAP_CORE_19
:Objective: Perform POST transaction with responses containing several Location-Query options (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions: Server accepts creation of new resource on uri /location-query, the location of
the created resource contains two query parameters ?first=1&second=2
- Step 1 (stimulus) Client is requested to send a confirmable POST request to server’s resource
- Step 2 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 2 (POST)
- An arbitrary payload
- Content-format option
- Step 3 (check) Server sends response containing:
- Code = 65 (2.01 created)
- Two options whose type is Location-Query:
- The first option contains first=1
- The second option contains second=2
- Step 4 (verify) Client displays the response
"""
r = coap.post(self.server.url + "/location-query",
query={"first": 1, "second": 2},
payload="TD_COAP_CORE_19")
self.assertEqual(r.sent.msgtype, msgType.con)
self.assertEqual(r.sent.code, codes.post)
self.assertEqual(r.code, codes.created)
self.assertEqual(r.options, {"first": 1, "second": 2})
logging.info(r)
def test_TD_COAP_CORE_20(self):
"""
:Identifier: TD_COAP_CORE_20
:Objective: Perform GET transaction containing the Accept option (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions: Server should provide a resource /multi-format which exists in two formats:
- text/plain;charset=utf-8
- application/xml
**Part A** Client requests a resource in text format
- Step 1 (stimulus) Client is requested to send a confirmable GET request to server’s resource
- Step 2 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 1 (GET)
- Option: type = Accept, value = 1 (text/plain;charset=utf-8)
- Step 3 (check) Server sends response containing:
- Code = 69 (2.05 content)
- Option type = Content-Format, value = 1 (text/plain;charset=utf-8)
- Payload = Content of the requested resource in text/plain;charset=utf-8 format
- Step 4 (verify) Client displays the response
**Part B** Client requests a resource in xml format
- Step 5 (stimulus) Client is requested to send a confirmable GET request to server’s resource
- Step 6 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 1 (GET)
- Option: type = Accept, value = 41 (application/xml)
- Step 7 (check) Server sends response containing:
- Code = 69 (2.05 content)
- Option: type = Content-Format, value = 41 (application/xml)
- Payload: Content of the requested resource in application/xml format
- Step 8 : Client displays the response
"""
r = coap.get(self.server.url + "/multi-format", confirmable=True,
options={"Accept": mediaCodes.text})
self.assertEqual(r.sent.msgType, msgType.confirmable)
self.assertEqual(r.sent.code, codes.GET)
self.assertIn({"Accept": 1}, r.sent.options)
self.assertEqual(r.code, codes.content)
self.assertIn({"Content-Format": 1}, r.options)
logging.info(r)
opt = {options.accept: mediaCodes.xml}
r = coap.get(self.server.url + "/multi-format", confirmable=True, options=opt)
def test_TD_COAP_CORE_21(self):
"""
:Identifier: TD_COAP_CORE_21
:Objective: Perform GET transaction containing the ETag option (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server should offer a /test resource which vary in time
- Client & server supports ETag option
- The Client ‘s cache must be purged
**Part A** Verifying that client cache is empty
- Step 1 (stimulus) Client is requested to send a confirmable GET request to server’s resource
- Step 2 (check) The request sent request by the client contains:
- Type = 0 (CON)
- Code = 1 (GET)
- Step 3 (check) Server sends response containing:
- Code = 69 (2.05 content)
- Option type = ETag
- Option value = an arbitrary ETag value
- Step 4 (verify) Client displays the response
**Part B** Verifying client cache entry is still valid
- Step 5 (stimulus) Client is requested to send s confirmable GET request to
server’s resource so as to check if the resource was updated
- Step 6 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 1 (GET)
- Option Type = ETag
- Option value=the ETag value received in step 3
- Step 7 (check) Server sends response containing:
- Code = 67 (2.03 Valid)
- Option type = ETag
- Option value = the ETag value sent in step 3
- Step 8 (verify) Client displays the response
**Part C** Verifying that client cache entry is no longer valid
- Step 9 (stimulus) Update the content of the server’s resource (either locally or from another CoAP client)
- Step 10 (stimulus) Client is requested to send a confirmable GET request to
server’s resource so as to check if the resource was updated
- Step 11 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 1 (GET)
- Option Type=ETag
- Option value=the ETag value received in step 3
- Step 12 (check) Server sends response containing:
- Code = 69 (2.05 Content)
- Option type = ETag
- Option value = an arbitrary ETag value which differs from the ETag sent in step 3
- Step 13 (verify) Client displays the response
"""
pass
def test_TD_COAP_CORE_22(self):
"""
:Identifier: TD_COAP_CORE_22
:Objective: Perform GET transaction with responses containing the
ETag option and requests containing the If-Match option (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server should offer a /validate resource
- Client & server supports ETag and If-Match option
- The Client ‘s cache must be purged
*Preamble* client gets the resource
- Step 1 (stimulus) Client is requested to send a confirmable GET
request to server’s resource
- Step 2 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 1 (GET)
- Step 3 (check) Server sends response containing:
- Code = 69 (2.05 content)
- Option type = ETag
- Option value = an arbitrary Etag value
- Not empty payload
*Part A* single update
- Step 4 (Stimulus) Client is requested to send a confirmable PUT request to
server’s resource so as to perform an atomic update
- Step 5 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-Match
- Option value=ETag value received in step 3
- An arbitrary payload (which differs from the payload received in step 3)
- Step 6 (check) Server sends response containing:
- Code = 68 (2.04 Changed)
- Step 7 (verify) Client displays the response and the server changed
its resource
*Part B* concurrent updates
- Step 8 (stimulus) Client is requested to send a confirmable GET request to
server’s resource
- Step 9 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 1 (GET)
- Step 10 (check) The request sent by the client contains:
- Code: 69 (2.05 content)
- Option type: ETag
- Option value = an arbitrary Etag value which differs from the ETag sent in step 3
- The payload sent in step 5
- Step 11 (verify) Client displays the response
- Step 12 (stimulus) Update the content of the server’s resource from a CoAP client
- Step 13 (stimulus) Client is requested to send a confirmable PUT
request to server’s resource so as to perform an atomic update
- Step 14 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-Match
- Option value=ETag value received in step 106
- An arbitrary payload (which differs from the previous payloads)
- Step 15 (check) Server sends response containing:
- Code = 140 (4.12 Precondition Failed)
- Step 16 (verify) Client displays the response and the server did
not update the content of the resource
"""
# Part A
r = coap.get(self.server.url + "/test", confirmable=True)
self.assertEqual(r.sent.msgType, msgType.con)
self.assertEqual(r.sent.code, codes.put)
# Part B
r = coap.put(self.server.url + "/test")
logging.info(r)
def test_TD_COAP_CORE_23(self):
"""
:Identifier: TD_COAP_CORE_23
:Objective: Perform GET transaction with responses containing the ETag option and requests containing the
If-None-Match option (CON mode)
:Configuration: CoAP_CFG_01
:Pre-test conditions:
- Server should offer a /test resource, which does not exist and which can be created by the client
- Client & server supports If-Non-Match
*Part A* single creation
- Step 1 (stimulus) Client is requested to send a confirmable PUT request to server’s resource so as to
atomically create the resource.
- Step 2 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-None-Match
- An arbitrary payload
- Step 3 (check) Server sends response containing:
- Code = 65 (2.01 Created)
- Step 4 (verify) Client displays the response and the server created a new resource
*Part B* concurrent creations
- Step 5 (stimulus) Client is requested to send a confirmable PUT request to server’s resource so as
to atomically create the resource.
- Step 6 (check) The request sent by the client contains:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-None-Match
- An arbitrary payload
- Step 7 (check) Server sends response containing:
- 140 (4.12 Precondition Failed)
- Step 8 (verify) Client displays the response
"""
pass
class TestLossy(unittest.TestCase):
"""
Test suite for ETSI CoAP Core Tests (Lossy Context)
- TD_COAP_CORE_15 Perform GET transaction (CON mode, piggybacked response)
in a lossy context
- TD_COAP_CORE_16 Perform GET transaction (CON mode, delayed response)
in a lossy context
"""
def setUp(self):
"""
Implement a CoAP_CFG_02:
Basic One-2-One CoAP client/server Configuration in lossy context
The Gateway emulates a lossy medium between the client and the server.
It does not implement the CoAP protocol itself (in other terms it is
not a CoAP proxy), but works at the transport layer. It provides two
features:
- It performs NAT-style UDP port redirections towards the server
(thus the client contacts the gateway and is transparently
redirected towards the server)
- It randomly drops packets that are forwarded between the client and
the server
"""
self.server = Endpoint(trace=True, lossy_factor=0.5)
def test_TD_COAP_CORE_14(self):
"""
:Identifier: TD_COAP_CORE_14
:Objective: Interoperate in lossy context (CON mode, piggybacked response)
:Configuration: CoAP_CFG_02
:Pre-test conditions:
- Gateway is introduced and configured to produce packet loss
- Server offers a /test resource that can handle GET
Need to observe :
- One dropped request
- One dropped request ACK
- One dropped response
- One dropped response ACK and its retransmission
- Test sequence should be executed several times
- Step 1 stimulus Client is requested to send a confirmable GET request to server’s resource
- Step 2 (Check (CON)) Sent request must contain:
- Type = 0
- Code = 1
- Client generated Message ID
- Step 3 (Check (CON)) Server sends response containing:
- Type = 2 (ACK)
- Code = 69 (2.05 content)
- Payload = Content of the requested resource
- Content type option
- Step 4 (Verify (IOP)) Client displays the response
"""
while self.server.drop_requests < 1\
and self.server.drop_requests_ack < 1\
and self.server.drop_response < 1\
and self.server.drop_response_ack < 1:
r = coap.get(self.server.url + "/test")
self.assertEqual(r.sent.msgType, msgType.con)
self.assertEqual(r.sent.code, codes.GET)
self.assertIsNotNone(r.sent.messageID)
def test_TD_COAP_CORE_15(self):
"""
:Identifier: TD_COAP_CORE_15
:Objective: Interoperate in lossy context (CON mode, delayed response)
:Configuration: CoAP_CFG_02
:Pre-test conditions:
- Gateway is introduced and configured to produce packet loss
- Server offers a /separate resource which cannot be served immediately and which
cannot be acknowledged in a piggy-backed way.
:Need to observe:
- One dropped request
- One dropped request ACK
- One dropped response
- One dropped response ACK and its retransmission
- Test sequence should be executed several times
- Step 1 (stimulus) Client is requested to send a confirmable GET request to server’s resource
- Step 2 (Check (CON)) Sent request must contain:
- Type = 0
- Code = 1
- Client generated Message ID
- Step 3 (Check (CON)) Server sends response containing:
- Type = 2 (ACK)
- message ID same as the request
- empty Payload
- Step 4 (Check (CON)) Server sends response containing:
- Type = 0 (CON)
- Code = 69 (2.05 content)
- Payload = Content of the requested resource
- Content type option
- Step 5 (Check (CON)) Client sends response containing:
- Type = 2 (ACK)
- message ID same as the response
- empty Payload
- Step 6 (Verify (IOP)) Client displays the response
"""
class TestProxy(unittest.TestCase):
"""
Test suite for ETSI CoAP Core Tests Basic One-2-One CoAP proxy/server
Configuration
- TD_COAP_CORE_24 Perform POST transaction with responses containing
several Location-Path options (Reverse Proxy in CON mode)
- TD_COAP_CORE_24 Perform POST transaction with responses containing
several Location- Query (Reverse proxy)
- TD_COAP_CORE_24 Perform GET transaction containing the Accept option
(CON mode) (Reverse proxy)
- TD_COAP_CORE_24 Perform GET transaction with responses containing the
ETag option and requests containing the If-Match option (CON mode)
(Reverse proxy)
- TD_COAP_CORE_24 Perform GET transaction with responses containing the
ETag option and requests containing the If-None-Match option
(CON mode) (Reverse proxy)
- TD_COAP_CORE_24 Perform GET transaction with responses containing the
Max-Age option (Reverse proxy)
"""
def setUp(self):
"""
Implement a CoAP_CFG_03:
Basic One-2-One CoAP proxy/server Configuration
The reverse proxy shown in the Figure 3 is assumed as CoAP/CoAP proxy.
Test operator includes an interface (it can be a CoAP client) that
creates the stimulus to initiate the tests for reverse proxy.
More clearly, there exists two methods to create the stimulus for
reverse proxy.
1. Reverse proxy can provide a direct interface to create and
launch the stimulus
2. A CoAP client can be connected to reverse proxy to create and
launch the stimulus for the tests
In the both cases, reverse proxy and client equally act as point of
observation.
"""
self.proxy = Proxy("coap://localhost", port=5684, trace=True) # Different port for the proxy
self.server = Endpoint(trace=True)
def test_TD_COAP_CORE_24(self):
"""
:Identifier: TD_COAP_CORE_24
:Objective: Perform POST transaction with responses containing
several Location-Path options (Reverse Proxy in CON mode)
:Configuration: CoAP_CFG_03
:Pre-test conditions:
- Proxy is configured as a reverse-proxy for the server
- Proxy’s cache is cleared
- Server accepts creation of new resource on /test and the created resource is
located at /location1/location2/location3 (resource does not exist yet)
- Step 1 (stimulus) Client is requested to send a confirmable POST request to proxy
- Step 2 (check) The POST sent by the client contains:
- Type = 0 (CON)
- Code = 2 (POST)
- An arbitrary payload
- Content-format option
- Step 3 (check) The Proxy forwards the POST request to server’s resource and that it contains:
- Type = 0 (CON)
- Code = 2 (POST)
- An arbitrary payload
- Content-format option
- Step 4 (check) Server sends a response to the proxy containing:
- Code = 65 (2.01 created)
- Option type = Location-Path (one for each segment)
- Option values must contain “location1”, “location2” & “location3” without contain any ‘/’
- Step 5 (check) Observe that the Proxy forwards the response (in step 4) to client and check that the
forwarded response contains:
- Code = 65 (2.01 created)
- Option type = Location-Path (one for each segment)
- Option values must contain “location1”, “location2” & “location3” without contain any ‘/’
- Step 6 (verify) Client displays the response
- Step 7 (verify) Client interface returns the response
- 2.01 created
- Location: coap://proxy/location1/location2/location3
"""
payload = "TD_COAP_CORE_24"
r = coap.post(self.proxy.url + "/test", confirmable=True)
self.assertEqual(r.sent.msgType, msgType.con)
self.assertEqual(r.sent.code, codes.post)
self.assertEqual(r.sent.payload, payload)
self.assertEqual(self.proxy.msg_sent[0].msgType, msgType.con)
self.assertEqual(self.proxy.msg_sent[0].code, codes.post)
self.assertEqual(self.proxy.msg_sent[0].payload, payload)
self.assertEqual(self.proxy.msg_received[0].code, codes.created)
def test_TD_COAP_CORE_25(self):
"""
:Identifier: TD_COAP_CORE_25
:Objective: Perform POST transaction with responses containing several Location- Query option (Reverse proxy)
:Configuration: COAP_CFG_03
:Pre-test conditions:
- Proxy is configured as a reverse-proxy for the server
- Proxy’s cache is cleared
- Server accepts creation of new resource on uri /location-query, the location of
the created resource contains two query parameters ?first=1&second=2
- Step 1 (Stimulus): Client is requested to send a confirmable POST request to proxy
- Step 2 (check) Proxy receives the request from client & forwards it to server’s resource
- Step 3 (check) Forwarded request must contain:
- Type = 0 (CON)
- Code = 2 (POST)
- An arbitrary payload
- Content-format option
- Step 4 (check) Server sends response to proxy containing:
- Code = 65 (2.01 created)
- Two options whose type is Location-Query
- The first option contains first=1
- The second option contains second=2
- Step 5 (check) Proxy forwards the response to client
- Step 6 (check) Client displays the message
- Step 7 (verify) Client interface returns the response:
- 2.01 created
- Location: coap://proxy/?first=1&second=2
"""
r = coap.post(self.proxy.url + "/location-query", confirmable=True, query={"first": 1, "second": 2})
logging.info(r)
self.assertEqual(r.code, codes.created)
self.assertEqual(r.url, self.proxy.url + "?first=1&second=2")
def test_TD_COAP_CORE_26(self):
"""
:Identifier: TD_COAP_CORE_26
:Objective: Perform GET transaction containing the Accept option (CON mode)
:Configuration: CoAP_CFG_03
:Pre-test conditions:
- Proxy is configured as a reverse-proxy for the server
- Proxy’s cache is cleared
- Server should provide a resource /multi-format which exists in
two formats:
- text/plain;charset=utf-8
- application/xml
*Part A*: client requests text format
- Step 1 (stimulus) Client is requested to send a confirmable GET request to proxy
- Step 2 (check) Proxy receives the request from client & forwards it to server’s resource
- Step 3 (check) Forwarded request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
- Option: type = Accept, value = 1 (text/plain;charset=utf-8)
- Step 4 (check) Server sends response containing:
- Code = 69 (2.05 content)
- Option: type = Content-Format, value = 1 (text/plain;charset=utf-8)
- Payload = Content of the requested resource in text/plain;charset=utf-8 format
- Step 5 (check) Proxy forwards the response to client
- Step 6 (verify) Client receives & displays the response
- Step 7 (check) Response contains:
- Code = 69 (2.05 content)
- Option: type = Content-Format, value = 1 (text/plain;charset=utf-8)
- Payload = Content of the requested resource in text/plain;charset=utf-8 format
*Part B*: client requests xml format
- Step 8 (stimulus) Client is requested to send a confirmable GET request to Proxy
- Step 9 (check) Proxy forwards the request to server
- Step 10 (check) Sent request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
Option: type = Accept, value = 41 (application/xml)
- Step 11 (check) Server sends response containing:
- Code = 69 (2.05 content)
- Option: type = Content-Format, value = 41 (application/xml)
- Payload = Content of the requested resource in application/xml format
- Step 12 (check) Proxy forwards the response to client
- Step 13 (verify) Client receives & displays the response
- Step 14 (check) Client displays the response received:
- Code = 69 (2.05 content)
- Option: type = Content-Format, value = 41 (application/xml)
- Payload = Content of the requested resource in application/xml format
"""
# Part A
r = coap.get(self.proxy.url + "/multi-format", confirmable=True)
self.assertEqual(len(self.proxy.msg_received), 1)
self.assertEqual(len(self.proxy.msg_sent), 1)
self.assertEqual(self.proxy.msg_sent[0].msgType, msgType.con)
self.assertEqual(self.proxy.msg_sent[0].code, codes.GET)
self.assertEqual(self.proxy.msg_sent[0].options[options.accept], mediaCodes.text)
self.assertEqual()
# Part B
r = coap.get(self.proxy.url + "/multi-format", confirmable=True)
self.assertEqual(r.code, codes.content)
self.assertEqual(r.msgType, codes.xml)
# TODO
def test_TD_COAP_CORE_27(self):
"""
:Identifier: TD_COAP_CORE_27
:Objective: Perform GET transaction with responses containing the ETag option and requests containing the
If-Match option (CON mode)
:Configuration: CoAP_CFG_03
:Pre-test conditions:
- Proxy is configured as a reverse-proxy for the server
- Proxy’s cache is cleared
- Server should offer a /test resource
- Client & server supports ETag option and If-Match option
*Preamble* client gets the resource
- Step 1 (stimulus) Client is requested to send a confirmable GET request to proxy
- Step 2 (check) Proxy forwards the request to server
- Step 3 (check) Forwarded request must contain:
- Type = 0 (CON)
- Code = 1 (GET)
- Step 4 (check) Server sends response containing:
- Code = 69 (2.05 content)
- Option type = ETag
- Option value = an arbitrary ETag value
- Step 5 (check) Proxy forwards the response to client
*Part A*: single update
- Step 6 (stimulus) Client is requested to send a confirmable PUT request to Proxy
- Step 7 (check) Sent request must contain:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-Match
- Option value=ETag value received in step 4
- An arbitrary payload (which differs from the payload received in step 3)
- Step 8 (verify) Proxy forwards the request to servers resource & server updates the resource
- Step 9 (check) Server sends response containing:
- Code = 68 (2.04 Changed)
- Option type = ETag
- Option value = an arbitrary ETag value which differs from the ETag received in step 4
- Step 10 (check) Proxy forwards the response to client
- Step 11 (check) Forwarded response contains:
- Code = 68 (2.04 Changed)
- Option type = ETag
- Option value = same ETag value found in step 8
- Step 12 (verify) Client displays the response
*Part B*: concurrent updates
- Step 13 (stimulus) Update the content of the server’s resource
(either locally or from another CoAP client)
- Step 14 (stimulus) Client is requested to send s confirmable PUT
request to proxy so as to perform an atomic update
- Step 15 (check) Sent request must contain:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-Match
- Option value=ETag value received in step 8
An arbitrary payload (which differs from the previous payloads)
- Step 16 (check) Proxy forwards the request to server’s resource
- Step 17 (check) Sent request must contain:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-Match
- Option value=same ETag value found in step 14 An arbitrary
payload (which differs from the previous payloads)
- Step 18 (check) Server sends response containing:
- Code = 140 (4.12 Precondition Failed)
- Step 19 (Verify) Proxy forwards the response to client
- Step 20 (check) Response contains:
- Code = 140 (4.12 Precondition Failed)
- Step 21 (Verify) Client displays the response
"""
# Preamble
r = coap.get(self.proxy.url, confirmable=True)
self.assertEqual(self.proxy.msg_fowarded[0].url, self.server.url)
self.assertEqual(self.proxy.msg_fowarded[0].msgType, msgType.con)
self.assertEqual(self.proxy.msg_fowarded[0].code, codes.GET)
self.assertEqual(self.server.msg_sent[0].code, codes.content)
self.assertEqual(self.proxy.msg_fowarded[1].code, codes.content)
self.assertIn(self.proxy.msg_fowarded[1].options, options.etag)
# Part A
# Part B
self.assertEqual()
self.assertEqual(r.code, codes.precondition_failed)
logging.info(r)
def test_TD_COAP_CORE_28(self):
"""
:Identifier: TD_COAP_CORE_28
:Objective: Perform GET transaction with responses containing the
ETag option and requests containing the
If-None-Match option (CON mode) (Reverse proxy)
:Configuration: CoAP_CFG_03
:Pre-test conditions:
- Proxy is configured as a reverse-proxy for the server
- Proxy’s cache is cleared
- Server should offer a /test resource, which does not exist and
which can be created by the client
- Client & server supports If-None-Match
*Part A*: single creation
- Step 1 (stimulus) Client is requested to send a confirmable PUT
request to proxy to atomically create resource in server
- Step 2 (check) Proxy forwards the request to server
- Step 3 (check) Forwarded request must contain:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-None-Match
- An arbitrary payload
- Step 4 (check) Server sends response containing:
- Code = 65 (2.01 Created)
- Step 5 (check) Proxy forwards the response to client
- Step 6 (verify) Client displays the response & and server created
new resource
*Part B*: concurrent creations
- Step 5 (stimulus) Client is requested to send s confirmable PUT
request to proxy to atomically create resource in server
- Step 6 (check) Sent request must contain:
- Type = 0 (CON)
- Code = 3 (PUT)
- Option Type=If-Non-Match
- Option value=Received ETag value
- Step 7 (check) Server sends response containing:
- 140 (4.12 Precondition Failed)
- Step 8 (verify) Proxy forwards the response to client
- Step 9 (check) Response contains:
- 140 (4.12 Precondition Failed)
- Step 10 (verify) Client displays the response
"""
r = coap.put(self.proxy.url + "/test", confirmable=True)
self.assertEqual(len(self.proxy.fowarded_msg), 1)
self.assertEqual(self.proxy.fowarded_msg,)
logging.info(r)
def test_TD_COAP_CORE_29(self):
"""
:Identifier: TD_COAP_CORE_29
:Objective: Perform GET transaction with responses containing the Max-Age option (Reverse proxy)
:Configuration: CoAP_CFG_03
:Pre-test conditions:
- Proxy offers a cache
- Proxy is configured as a reverse-proxy for the server
- Servers resource vary in time and supports Max-Age option
- Proxy’s cache is cleared
- Server offers a resource /test that varies in time, with a Max-Age set to 30s
- Step 1 (stimulus) A confirmable GET request is sent to Proxy from Client
- Step 2 (check) Proxy Sends request containing:
- Type = 0 (CON)
- Code = 1 (GET)
- Step 3 (check) Server sends response containing:
- Code = 69 (2.05 Content)
- Option type = ETag
- Option value = ETag value
- Option type = Max-age
- Option value
- Step 4 (verify) Proxy forwards response to client
- Step 5 (stimulus) A confirmable GET request is sent to proxy from Client before Max-Age expires
- Step 6 (check) Proxy does not forward any request to the server
- Step 7 (check) Proxy sends response to client
- Step 8 (verify) Response contains:
- Option type = Max-age
- Option Value = new Max-age
- Payload cached
"""
max_age = 30 # seconds
payload = "TD_COAP_CORE_29"
r = coap.get(self.proxy.url + "/test", confirmable=True)
self.assertEqual(self.proxy.msg_sent[0].msgType, msgType.con)
self.assertEqual(self.proxy.msg_sent[0].code, codes.get)
self.assertEqual(self.proxy.msg_received[0].code, codes.content)
self.assertEqual(self.proxy.msg_received[0].options[options.etag], 42) # TODO: Set this right
self.assertEqual(self.proxy.msg_received[0].options[options.maxAge], max_age)
r = coap.get(self.proxy.url + "/test", confirmable=True)
self.assertEqual(len(self.proxy.msg_sent), 1)
self.assertEqual(r.options[options.maxAge], max_age)
self.assertEqual(r.payload, payload)
if __name__ == '__main__':
unittest.main()
| isc | 4,649,022,669,152,286,000 | 38.337562 | 147 | 0.609459 | false |
r-owen/TUI | TUI/TCC/FocalPlaneWindow.py | 1 | 17426 | #!/usr/bin/env python
"""Display a cartoon of the focal plane showing:
* Instrument nominal center and x and y axes
* Boresight position
* Object orientation
* Spider orientation
To do:
- modify to use preferences for colors (need 3 colors for 3 axes,
also colors for current, target and ?potential? position).
- implement proper handling of TAI time in the angle tuplets;
for now angle is ignored
- add zero tick-mark to rotator wrap scale
- add display for boresight, scale, image limits,
or remove the stuff (it should be displayed somewhere, but how????)
History:
2002-12-05 ROwen Added URL-based help.
2002-12-23 ROwen Fixed a bug in setInstLim exposed by pychecker.
2003-03-05 ROwen Modified to use simplified KeyVariables.
2003-04-02 ROwen Modified to use the TCC model; added inst axis.
2003-04-12 ROwen Modified to open its own window with addWindow.
2003-06-09 ROwen Modified to not require dispatcher.
2003-06-25 ROwen Modified test case to handle message data as a dict
2003-10-30 ROwen Added display of rotator target position.
2004-05-18 ROwen Bug fix: resize was not handled well (I'm not sure why,
but I fixed it by switching from pack to grid).
Changed FocalPlaneWdg.configure to _configureEvt.
Stopped importing sys since it wasn't used.
2004-08-11 ROwen Modified for updated RO.Wdg.CtxMenu.
2004-10-22 ROwen Stopped using RO.Wdg.PatchedCanvas; it's no longer needed.
2005-06-06 ROwen Bug fix: if rotator limits changed the current and target
rotator position might not be centered on the spiral.
2005-06-08 ROwen Changed Axis to a new style class.
2011-06-17 ROwen Added WindowName constant.
Changed "type" to "msgType" in parsed message dictionaries (in test code only).
2012-07-10 ROwen Removed use of update_idletasks in test code.
"""
import Tkinter
import tkFont
import RO.MathUtil
import RO.Wdg
import RO.CanvasUtil
import TUI.TCC.TCCModel
_HelpPage = "Telescope/FocalPlaneWin.html"
WindowName = "TCC.Focal Plane"
def addWindow(tlSet):
"""Create the window for TUI.
"""
tlSet.createToplevel(
name = WindowName,
defGeom = "201x201+636+22",
wdgFunc = FocalPlaneWdg,
)
class Axis(object):
def __init__(self,
cnv,
name,
lengths,
labels,
ctr = [None, None],
color = "black",
arrowShape = (3,5,1),
longestLabel = None, # longest label this axis will ever have;
# if unspecified, uses the longest label in labels
mirrors = (1, 1),
):
"""Creates an axis display.
Inputs:
cnv: canvas on which to draw
name: name of axis (used as a tag on the canvas)
lengths (duple): length of axis display along x, y (pix)
labels (duple): text for x, y axis label
ctr (duple): center point on canvas (pix)
color: Tk color name
arrowShape: see Tkinter create_line arrowshape parameter; used for x and y
maxLabelWidth: the widest a label can be (in pixels)
mirrors (duple): controls (left-right, up-down) mirroring;
1 to not mirror, -1 to mirror
"""
self.ang = None
self.cnv = cnv
self.name = name
self.lengths = lengths
self.labels = labels
self.ctr = ctr
self.color = color
self.arrowShape = arrowShape
self.mirrors = mirrors
self.font = Tkinter.Entry()["font"]
self.fontObj = tkFont.Font(font = self.font)
if longestLabel:
self.maxLabelWidth = self.fontObj.measure(longestLabel)
else:
self.maxLabelWidth = max(
self.fontObj.measure(labels[0]),
self.fontObj.measure(labels[1]),
)
def setAng(self, ang, isCurrent=True, **kargs):
self.ang = ang
self.draw()
def setMirrors(self, mirrors):
self.mirrors = mirrors
self.draw()
def getRadius(self):
return self.maxLabelWidth + max(self.lengths)
def draw(self):
self.cnv.delete(self.name)
if (None in self.ctr) or (self.ang == None):
return
self.cnv.create_line(
self.ctr[0] + self.lengths[0] * RO.MathUtil.cosd(self.ang) * self.mirrors[0],
self.ctr[1] - self.lengths[0] * RO.MathUtil.sind(self.ang) * self.mirrors[1],
self.ctr[0], self.ctr[1],
self.ctr[0] - self.lengths[1] * RO.MathUtil.sind(self.ang) * self.mirrors[0],
self.ctr[1] - self.lengths[1] * RO.MathUtil.cosd(self.ang) * self.mirrors[1],
arrow = "both",
arrowshape = self.arrowShape,
fill = self.color,
tag = self.name,
)
labelRads = [2 + (self.fontObj.measure(label) / 2) for label in self.labels]
self.cnv.create_text(
self.ctr[0] + (self.lengths[0] + labelRads[0]) * RO.MathUtil.cosd(self.ang) * self.mirrors[0],
self.ctr[1] - (self.lengths[0] + labelRads[0]) * RO.MathUtil.sind(self.ang) * self.mirrors[1],
text = self.labels[0],
font = self.font,
fill = self.color,
tag = self.name,
)
self.cnv.create_text(
self.ctr[0] - (self.lengths[1] + labelRads[1]) * RO.MathUtil.sind(self.ang) * self.mirrors[0],
self.ctr[1] - (self.lengths[1] + labelRads[1]) * RO.MathUtil.cosd(self.ang) * self.mirrors[1],
text = self.labels[1],
font = self.font,
fill = self.color,
tag = self.name,
)
class FocalPlaneWdg (Tkinter.Frame):
"""A widget for displaying relative angles on the focal plane,
e.g. the direction of North, and whether there is a mirror-image flip
(based on instrument scale).
To be done:
- Add boresight position and focal plane shape IF I can figure out
a way display this info without wasting a ton of space.
- Be smarter about sizing the widget or don't make it resizable.
At present axes are drawn at a fixed length and the rotator wrap
is drawn outside that area (and resizes with the window).
- Display numeric values for the various things.
- Allow showing north-based azimuth
- Consider moving rotator wrap elsewhere.
"""
FPMargin = 15 # number of pixels between FP and wrap displays
WrapMargin = 2 # number of pixels outside wrap display
WrapItemRad = 3 # radius of largest indicator in wrap display
WrapScaleDRad = 10 # change in radius from beginning to end of wrap scale
WrapDRad = WrapScaleDRad + (2 * WrapItemRad) # number of pixels for wrap display annulus, excluding margin
def __init__(self,
master,
width = 201,
height = 201,
**kargs
):
Tkinter.Frame.__init__(self, master)
self.model = TUI.TCC.TCCModel.getModel()
self.instNameWdg = RO.Wdg.StrLabel(
master = self,
helpURL = _HelpPage,
anchor="c",
)
self.instNameWdg.grid(row=0, column=0)
self.cnv = Tkinter.Canvas(
master = self,
width = width,
height = height,
selectborderwidth = 0,
highlightthickness = 0)
RO.Wdg.addCtxMenu(
wdg = self.cnv,
helpURL = _HelpPage,
)
self.cnv.grid(row=1, column=0, sticky="nsew")
self.rowconfigure(1, weight=1)
self.columnconfigure(0, weight=1)
# instance variables:
# ctr: position of center of canvas, in pixels
# size: size of canvas, in pixels
# scale: scale of canvas, in pixels per deg
# boresight: position of boresight, in deg
self.ctr = [None, None]
self.frameSize = [None, None]
self.fpRad = None
self.wrapRadIn = None
self.wrapRadOut = None
self.scale = None
self.border = int(self.cnv["highlightthickness"]) + int(self.cnv["selectborderwidth"])
self.rotCurrent = None
self.rotTarget = None
self.instAxis = Axis(
cnv = self.cnv,
name = "inst",
lengths = (50, 50),
labels = ("X", "Y"),
ctr = self.ctr,
color = "dark green",
)
self.instAxis.setAng(0.0)
self.horizonAxis = Axis(
cnv = self.cnv,
name = "horizon",
lengths = (20, 20),
labels = ("Az", "Alt"),
ctr = self.ctr,
color = "blue",
)
self.userAxis = Axis(
cnv = self.cnv,
name = "user",
lengths = (35, 35),
labels = ("E", "N"),
ctr = self.ctr,
color = "black",
longestLabel = "long", # longitude for galactic coords
)
self.sign = [None, None]
self.boresight = None
self.instScale = None
self.instCtr = None
self.instLim = None
self.instName = None
self.objInstAng = None
self.spiderInstAng = None
self.rotWrapGauge = RO.CanvasUtil.Spiral (
cnv = self.cnv,
xctr = 1, yctr = 1,
begRad = 0, endRad = 0, # not yet ready to draw; canvas size unknown
begAng = None, endAng = None, # rotator limits unknown
angOff = +90.0,
angScale = -1.0,
)
self.cnv.bind('<Configure>', self._configureEvt)
# create RO key variables for the various quanities being displayed
self.model.instName.addROWdg(self.instNameWdg)
self.model.objInstAng.addPosCallback(self.userAxis.setAng)
self.model.spiderInstAng.addPosCallback(self.horizonAxis.setAng)
self.model.axePos.addIndexedCallback(self.setRotCurrent, 2)
self.model.tccPos.addIndexedCallback(self.setRotTarget, 2)
self.model.rotLim.addCallback(self.setRotLim)
self.model.iimScale.addCallback(self.setInstScale)
self._setSize()
# def setBoresight(self, posDeg, isCurrent=True, **kargs):
# """Set the boresight position: degrees"""
# self.boresight = posDeg
# self.draw()
def setCoordSys(self, coordSys, isCurrent=True, **kargs):
"""Sets the coordinate system
Inputs:
coordSys: a duple consisting of:
coordinate system name
date (a number)
"""
lcname = coordSys[0].lower()
if lcname in ("icrs", "fk4", "fk5", "geo"):
userLabels = ("E", "N")
elif lcname in ("gal", "ecl"):
userLabels = ("Long", "Lat")
else:
# user coordinate system is az/alt, but that is already shown
userLabels = (None, None)
self.userAxis.labels = userLabels
self.userAxis.draw()
def setInstScale(self, instScale, isCurrent=True, **kargs):
"""Set the instrument scale: instrument pixels/degree on the sky"""
self.instScale = instScale
mirrors = [1, 1]
for ind in range(2):
if instScale[ind]:
mirrors[ind] = RO.MathUtil.sign(instScale[ind])
self.horizonAxis.setMirrors(mirrors)
self.userAxis.setMirrors(mirrors)
self.instAxis.setMirrors(mirrors)
self.draw()
# def setInstCtr(self, instCtr, isCurrent=True, **kargs):
# """Set the instrument center: instrument pixels"""
# self.instCtr = instCtr
# self.draw()
#
# def setInstLim(self, instLim, isCurrent=True, **kargs):
# """Set the instrument limits: [min x, min y, max x, max y] in inst pixels"""
# self.instLim = instLim
# self.draw()
def setRotLim(self, rotLim, isCurrent=True, **kargs):
"""Sets the rotator limits. rotLim = minPos, maxPos and other values which are ignored"""
self.rotWrapGauge.setAngLim(rotLim[0], rotLim[1])
self._drawRotCurrent()
self._drawRotTarget()
def setRotCurrent(self, rotCurrent, isCurrent=True, **kargs):
"""Update rotator's current mount position.
"""
self.rotCurrent = rotCurrent
self._drawRotCurrent()
def setRotTarget(self, rotTarget, isCurrent=True, **kargs):
"""Update rotator's target mount position.
"""
self.rotTarget = rotTarget
self._drawRotTarget()
def _setSize(self):
self.frameSize[0] = self.cnv.winfo_width() - (2 * self.border)
self.frameSize[1] = self.cnv.winfo_height() - (2 * self.border)
frameRad = min(self.frameSize) / 2
for ind in range(2):
self.ctr[ind] = self.frameSize[ind] / 2
endRad = frameRad - (FocalPlaneWdg.WrapMargin + FocalPlaneWdg.WrapItemRad)
endRad = max (endRad, 0)
begRad = endRad - FocalPlaneWdg.WrapScaleDRad
begRad = max (begRad, 0)
self.fpRad = begRad - (FocalPlaneWdg.WrapItemRad + FocalPlaneWdg.FPMargin)
self.fpRad = max(self.fpRad, 0)
# rotWrapGauge geometry; beg and end radius only refer to the spiral;
# WrapItemRad provides additional room for the items on the spiral
self.rotWrapGauge.setGeom(
xctr = self.ctr[0],
yctr = self.ctr[1],
begRad = begRad,
endRad = endRad,
redraw = 0,
)
# self._printInfo()
def _printInfo(self):
print "FocalPlaneWdg"
print "window size = ", self.cnv.winfo_width(), self.cnv.winfo_height()
print "frameSize = ", self.frameSize, "pixels"
print "ctr = ", self.ctr
print "fpRad = ", self.fpRad
print "border = ", self.border
print ""
print "boresight = ", self.boresight, "deg"
print "instScale = ", self.instScale
print "instCtr = ", self.instCtr
print "instLim = ", self.instLim
print "instName = ", self.instName
print "objInstAng = ", self.objInstAng
print "spiderInstAng = ", self.spiderInstAng
# drawing methods
def clear(self):
self.cnv.delete('all')
def _configureEvt(self, event = None):
"""Handle the <Configure> event.
"""
self._setSize()
self.clear()
self.draw()
def _drawRotCurrent(self):
"""Draw current rotator mount position on wrap gauge display"""
color = "black"
tag = "rotCurrent"
self.cnv.delete(tag)
if self.rotCurrent == None:
return
x, y = self.rotWrapGauge.angToXY(self.rotCurrent)
if None in (x, y):
return
RO.CanvasUtil.ctrCircle (self.cnv, x, y,
rad = FocalPlaneWdg.WrapItemRad,
width = 3,
outline = color,
tag = tag,
)
def _drawRotTarget(self):
"""Draw target rotator mount position on wrap gauge display"""
color = "black"
tag = "rotTarget"
self.cnv.delete(tag)
if self.rotTarget == None:
return
x, y = self.rotWrapGauge.angToXY(self.rotTarget)
if None in (x, y):
return
RO.CanvasUtil.ctrPlus (self.cnv, x, y,
rad = FocalPlaneWdg.WrapItemRad,
holeRad = 0,
width = 3,
fill = color,
tag = tag,
)
def _drawAxes(self):
"""Draw the focal plane x/y axes
(everything except the rotator wrap and points displayed on it).
"""
self.horizonAxis.draw()
self.userAxis.draw()
self.instAxis.draw()
def draw(self):
"""Redraw everything on the canvas.
"""
# print "draw called"
# self._printInfo()
self._drawAxes()
self.rotWrapGauge.draw()
self._drawRotCurrent()
self._drawRotTarget()
if __name__ == '__main__':
import random
import TUI.TUIModel
root = RO.Wdg.PythonTk()
# root = Tkinter.Tk()
kd = TUI.TUIModel.getModel(True).dispatcher
minAng = -350.0
maxAng = 350.0
def animFunc(ang1=0, ang2=0):
ang1 = ang1 + 45
if ang1 > 360:
ang1 = 45
ang2 = ang2 + 45
if ang2 > 360:
return
rotAng = float(random.randint(int(minAng), int(maxAng)))
dataDict = {
"ObjInstAng": (ang1, 0, 1),
"SpiderInstAng": (ang2, 0, 1),
"AxePos": (0, 0, rotAng),
"inst": ("SPICam",),
}
msgDict = {"cmdr":"me", "cmdID":11, "actor":"tcc", "msgType":":", "data":dataDict}
kd.dispatch(msgDict)
root.after(200, animFunc, ang1, ang2)
testFrame = FocalPlaneWdg (root)
testFrame.pack(fill = "both", expand = "yes")
Tkinter.Button(root, text="Demo", command=animFunc).pack(side="top")
# initial data
dataDict = {
"CoordSys": ("ICRS", None),
"RotLim": (-360, 360, 3, 0.3, 0.3),
"ObjInstAng": (0, 0, 1),
"SpiderInstAng": (0, 0, 1),
"AxePos": (0, 0, 45),
"inst": ("SPICam",),
"IImScale": (-3000, 3000),
}
msgDict = {"cmdr":"me", "cmdID":11, "actor":"tcc", "msgType":":", "data":dataDict}
kd.dispatch(msgDict)
root.mainloop()
| bsd-3-clause | 937,103,507,879,249,400 | 33.370809 | 110 | 0.571847 | false |
aio-libs/aiohttp | aiohttp/worker.py | 1 | 7750 | """Async gunicorn worker for aiohttp.web"""
import asyncio
import os
import re
import signal
import sys
from types import FrameType
from typing import Any, Awaitable, Callable, Optional, Union # noqa
from gunicorn.config import AccessLogFormat as GunicornAccessLogFormat
from gunicorn.workers import base
from aiohttp import web
from .helpers import set_result
from .web_app import Application
from .web_log import AccessLogger
try:
import ssl
SSLContext = ssl.SSLContext
except ImportError: # pragma: no cover
ssl = None # type: ignore[assignment]
SSLContext = object # type: ignore[misc,assignment]
__all__ = ("GunicornWebWorker", "GunicornUVLoopWebWorker", "GunicornTokioWebWorker")
class GunicornWebWorker(base.Worker): # type: ignore[misc,no-any-unimported]
DEFAULT_AIOHTTP_LOG_FORMAT = AccessLogger.LOG_FORMAT
DEFAULT_GUNICORN_LOG_FORMAT = GunicornAccessLogFormat.default
def __init__(self, *args: Any, **kw: Any) -> None: # pragma: no cover
super().__init__(*args, **kw)
self._task = None # type: Optional[asyncio.Task[None]]
self.exit_code = 0
self._notify_waiter = None # type: Optional[asyncio.Future[bool]]
def init_process(self) -> None:
# create new event_loop after fork
self.loop = asyncio.new_event_loop()
asyncio.set_event_loop(self.loop)
super().init_process()
def run(self) -> None:
self._task = self.loop.create_task(self._run())
try: # ignore all finalization problems
self.loop.run_until_complete(self._task)
except Exception:
self.log.exception("Exception in gunicorn worker")
self.loop.run_until_complete(self.loop.shutdown_asyncgens())
self.loop.close()
sys.exit(self.exit_code)
async def _run(self) -> None:
if isinstance(self.wsgi, Application):
app = self.wsgi
elif asyncio.iscoroutinefunction(self.wsgi):
app = await self.wsgi()
else:
raise RuntimeError(
"wsgi app should be either Application or "
"async function returning Application, got {}".format(self.wsgi)
)
access_log = self.log.access_log if self.cfg.accesslog else None
runner = web.AppRunner(
app,
logger=self.log,
keepalive_timeout=self.cfg.keepalive,
access_log=access_log,
access_log_format=self._get_valid_log_format(self.cfg.access_log_format),
)
await runner.setup()
ctx = self._create_ssl_context(self.cfg) if self.cfg.is_ssl else None
assert runner is not None
server = runner.server
assert server is not None
for sock in self.sockets:
site = web.SockSite(
runner,
sock,
ssl_context=ctx,
shutdown_timeout=self.cfg.graceful_timeout / 100 * 95,
)
await site.start()
# If our parent changed then we shut down.
pid = os.getpid()
try:
while self.alive: # type: ignore[has-type]
self.notify()
cnt = server.requests_count
if self.cfg.max_requests and cnt > self.cfg.max_requests:
self.alive = False
self.log.info("Max requests, shutting down: %s", self)
elif pid == os.getpid() and self.ppid != os.getppid():
self.alive = False
self.log.info("Parent changed, shutting down: %s", self)
else:
await self._wait_next_notify()
except BaseException:
pass
await runner.cleanup()
def _wait_next_notify(self) -> "asyncio.Future[bool]":
self._notify_waiter_done()
loop = self.loop
assert loop is not None
self._notify_waiter = waiter = loop.create_future()
self.loop.call_later(1.0, self._notify_waiter_done, waiter)
return waiter
def _notify_waiter_done(
self, waiter: Optional["asyncio.Future[bool]"] = None
) -> None:
if waiter is None:
waiter = self._notify_waiter
if waiter is not None:
set_result(waiter, True)
if waiter is self._notify_waiter:
self._notify_waiter = None
def init_signals(self) -> None:
# Set up signals through the event loop API.
self.loop.add_signal_handler(
signal.SIGQUIT, self.handle_quit, signal.SIGQUIT, None
)
self.loop.add_signal_handler(
signal.SIGTERM, self.handle_exit, signal.SIGTERM, None
)
self.loop.add_signal_handler(
signal.SIGINT, self.handle_quit, signal.SIGINT, None
)
self.loop.add_signal_handler(
signal.SIGWINCH, self.handle_winch, signal.SIGWINCH, None
)
self.loop.add_signal_handler(
signal.SIGUSR1, self.handle_usr1, signal.SIGUSR1, None
)
self.loop.add_signal_handler(
signal.SIGABRT, self.handle_abort, signal.SIGABRT, None
)
# Don't let SIGTERM and SIGUSR1 disturb active requests
# by interrupting system calls
signal.siginterrupt(signal.SIGTERM, False)
signal.siginterrupt(signal.SIGUSR1, False)
def handle_quit(self, sig: int, frame: FrameType) -> None:
self.alive = False
# worker_int callback
self.cfg.worker_int(self)
# wakeup closing process
self._notify_waiter_done()
def handle_abort(self, sig: int, frame: FrameType) -> None:
self.alive = False
self.exit_code = 1
self.cfg.worker_abort(self)
sys.exit(1)
@staticmethod
def _create_ssl_context(cfg: Any) -> "SSLContext":
"""Creates SSLContext instance for usage in asyncio.create_server.
See ssl.SSLSocket.__init__ for more details.
"""
if ssl is None: # pragma: no cover
raise RuntimeError("SSL is not supported.")
ctx = ssl.SSLContext(cfg.ssl_version)
ctx.load_cert_chain(cfg.certfile, cfg.keyfile)
ctx.verify_mode = cfg.cert_reqs
if cfg.ca_certs:
ctx.load_verify_locations(cfg.ca_certs)
if cfg.ciphers:
ctx.set_ciphers(cfg.ciphers)
return ctx
def _get_valid_log_format(self, source_format: str) -> str:
if source_format == self.DEFAULT_GUNICORN_LOG_FORMAT:
return self.DEFAULT_AIOHTTP_LOG_FORMAT
elif re.search(r"%\([^\)]+\)", source_format):
raise ValueError(
"Gunicorn's style options in form of `%(name)s` are not "
"supported for the log formatting. Please use aiohttp's "
"format specification to configure access log formatting: "
"http://docs.aiohttp.org/en/stable/logging.html"
"#format-specification"
)
else:
return source_format
class GunicornUVLoopWebWorker(GunicornWebWorker):
def init_process(self) -> None:
import uvloop
# Setup uvloop policy, so that every
# asyncio.get_event_loop() will create an instance
# of uvloop event loop.
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
super().init_process()
class GunicornTokioWebWorker(GunicornWebWorker):
def init_process(self) -> None: # pragma: no cover
import tokio
# Setup tokio policy, so that every
# asyncio.get_event_loop() will create an instance
# of tokio event loop.
asyncio.set_event_loop_policy(tokio.EventLoopPolicy())
super().init_process()
| apache-2.0 | -3,391,215,136,565,814,000 | 31.291667 | 85 | 0.600129 | false |
geometalab/OSMTagFinder | OSMTagFinder/test/idpresettest.py | 1 | 4794 | # -*- coding: utf-8 -*-
'''
Created on 01.12.2014
@author: Simon Gwerder
'''
import json
import timeit
import requests
from ordered_set import OrderedSet
from utilities import utils
from utilities.retry import retry
class IDPreset:
name = None
terms = []
tags = []
def __init__(self):
self.name = None
self.terms = []
self.tags = []
class IDPresetsSetup:
idPresets = OrderedSet()
def getIDPresets(self):
return self.idPresets
def __init__(self, presetFilePath):
if presetFilePath is None: return
fileHandler = open(presetFilePath)
jsonData = json.load(fileHandler)
iterList = []
for item in jsonData:
iterList.append(item)
iterList.sort()
for item in iterList:
if item.count('/') > 1: continue
idPreset = IDPreset()
if 'name' in jsonData[item]:
idPreset.name = jsonData[item]['name']
else:
continue
if 'tags' in jsonData[item]:
for key in jsonData[item]['tags']:
if key is not 'name':
tag = key + '=' + jsonData[item]['tags'][key]
idPreset.tags.append(tag)
else:
continue
idPreset.terms.append(idPreset.name)
if 'terms' in jsonData[item]:
for term in jsonData[item]['terms']:
idPreset.terms.append(term)
self.idPresets.append(idPreset)
fileHandler.close()
class TestRun:
tagFinderAPI = 'http://localhost:5000/api/search?q='
@retry(Exception, tries=3)
def apiCallTagfinder(self, searchTerm):
response = requests.get(self.tagFinderAPI + searchTerm)
#response = urllib.urlopen(self.tagFinderAPI + searchTerm)
if response.status_code < 400:
return response.json()
return None
def getTagDictFromCall(self, responseJson):
retDict = { }
for tfTag in responseJson:
prefLabel = tfTag['prefLabel']
if not '=' in prefLabel: # is a key
prefLabel = prefLabel + '=*'
retDict[prefLabel] = tfTag['searchMeta']
return retDict
def __init__(self, idPresetsSetup):
print 'IDEDITOR PRESET TESTS'
current = 1
testTotal = len(idPresetsSetup.getIDPresets())
nameTotal = len(idPresetsSetup.getIDPresets())
altTermTotal = -nameTotal # they are also contained in the terms list, for convenient search
testFound = 0
nameFound = 0
altTermFound = 0
for idPreset in idPresetsSetup.getIDPresets():
titleStr = '\n\nTest ' + str(current) + '/' + str(len(idPresetsSetup.getIDPresets())) + ' - Name: ' + idPreset.name
print titleStr
print '=' * 60
print 'Tags: ' + ", ".join(idPreset.tags)
found = False
for term in idPreset.terms:
responseJson = self.apiCallTagfinder(term)
if responseJson is None:
print 'Call failed!'
else:
foundList = self.getTagDictFromCall(responseJson)
interSectionSet = set(idPreset.tags).intersection(set(foundList.keys()))
if len(interSectionSet) == 0:
print '{0}{1:<20s}{2}'.format('Term: ', term, ' > none found')
else:
found = True
print '{0}{1:<20s}{2}{3}'.format('Term: ', term, ' > found: ', ', '.join(interSectionSet))
#for searchMeta in foundList.values():
# print searchMeta
if term is idPreset.name:
nameFound = nameFound + 1
else:
altTermFound = altTermFound + 1
altTermTotal = altTermTotal + 1
if found:
testFound = testFound + 1
current = current + 1
print '\n\n'
print '=' * 60
print '=' * 60
print 'Found test tags : ' + str(testFound) + '/' + str(testTotal)
print 'Found \"names\" : ' + str(nameFound) + '/' + str(nameTotal)
print 'Found \"terms\" : ' + str(altTermFound) + '/' + str(altTermTotal)
if __name__ == '__main__':
startTime = timeit.default_timer()
setup = IDPresetsSetup(utils.testDir() + 'blackboxtests.json')
#TagFinder needs to be running. Can also start TagFinder locally here.
TestRun(setup)
endTime = timeit.default_timer()
elapsed = endTime - startTime
print '\nTime elapsed running test: ' + str(elapsed / 60) + ' mins'
| mit | -5,014,648,183,321,253,000 | 29.341772 | 127 | 0.534209 | false |
mbourqui/django-publications-bootstrap | publications_bootstrap/migrations/0004_catalog_fk_publication.py | 1 | 1253 | # -*- coding: utf-8 -*-
# Generated by Django 1.11.1 on 2017-07-04 09:18
from __future__ import unicode_literals
from __future__ import unicode_literals
from django.db import migrations
from django.db import models
app_label = 'publications_bootstrap'
def forwards(apps, schema_editor):
Catalog = apps.get_model(app_label, "Catalog")
for catalog in Catalog.objects.all():
for publication in catalog.publication_set.all():
catalog.publications.add(publication)
def backwards(apps, schema_editor):
Catalog = apps.get_model(app_label, "Catalog")
for catalog in Catalog.objects.all():
for publication in catalog.publications.all():
publication.catalog_set.add(catalog)
class Migration(migrations.Migration):
dependencies = [
('publications_bootstrap', '0003_db_index'),
]
operations = [
migrations.AddField(
model_name='catalog',
name='publications',
field=models.ManyToManyField(blank=True, db_index=True, to='publications_bootstrap.Publication'),
),
migrations.RunPython(forwards, backwards),
migrations.RemoveField(
model_name='publication',
name='catalogs',
),
]
| mit | 7,605,762,743,376,821,000 | 28.139535 | 109 | 0.652035 | false |
undocume/undocume | home/migrations/0004_auto__add_field_service_city.py | 1 | 6463 | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Service.city'
db.add_column(u'home_service', 'city',
self.gf('django.db.models.fields.CharField')(max_length=50, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Service.city'
db.delete_column(u'home_service', 'city')
models = {
u'home.category': {
'Meta': {'ordering': "['name']", 'object_name': 'Category'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'image': ('django.db.models.fields.CharField', [], {'max_length': '200', 'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'home.categorytranslate': {
'Meta': {'object_name': 'CategoryTranslate'},
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.Category']"}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.Language']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'home.information': {
'Meta': {'ordering': "['name']", 'object_name': 'Information'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'informationtype': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.InformationType']"}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'home.informationtranslate': {
'Meta': {'object_name': 'InformationTranslate'},
'description': ('django.db.models.fields.CharField', [], {'max_length': '500'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'information': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.Information']"}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.Language']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'home.informationtype': {
'Meta': {'ordering': "['name']", 'object_name': 'InformationType'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'home.language': {
'Meta': {'ordering': "['name']", 'object_name': 'Language'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
},
u'home.service': {
'Meta': {'ordering': "['name']", 'object_name': 'Service'},
'Type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.TypeOrganization']"}),
'address': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'category': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.Category']"}),
'city': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'contact': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'contactemail': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'contactnumber': ('django.db.models.fields.CharField', [], {'max_length': '30', 'null': 'True', 'blank': 'True'}),
'created': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'fee': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '100'}),
'phone': ('django.db.models.fields.CharField', [], {'max_length': '50', 'null': 'True', 'blank': 'True'}),
'ss': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'state': ('django.db.models.fields.CharField', [], {'max_length': '2', 'null': 'True', 'blank': 'True'}),
'updated': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'blank': 'True'}),
'web': ('django.db.models.fields.CharField', [], {'max_length': '100', 'null': 'True', 'blank': 'True'}),
'zipcode': ('django.db.models.fields.CharField', [], {'max_length': '5', 'null': 'True', 'blank': 'True'})
},
u'home.servicetranslate': {
'Meta': {'object_name': 'ServiceTranslate'},
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.Language']"}),
'service': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['home.Service']"})
},
u'home.typeorganization': {
'Meta': {'ordering': "['name']", 'object_name': 'TypeOrganization'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '50'})
}
}
complete_apps = ['home'] | mit | -1,723,778,893,577,949,000 | 64.292929 | 126 | 0.540771 | false |
Pr0Ger/PyAPNs2 | test/test_credentials.py | 1 | 1110 | # This only tests the TokenCredentials test case, since the
# CertificateCredentials would be mocked out anyway.
# Namely:
# - timing out of the token
# - creating multiple tokens for different topics
import pytest
from freezegun import freeze_time
from apns2.credentials import TokenCredentials
TOPIC = 'com.example.first_app'
@pytest.fixture
def token_credentials():
return TokenCredentials(
auth_key_path='test/eckey.pem',
auth_key_id='1QBCDJ9RST',
team_id='3Z24IP123A',
token_lifetime=30, # seconds
)
def test_token_expiration(token_credentials):
with freeze_time('2012-01-14 12:00:00'):
header1 = token_credentials.get_authorization_header(TOPIC)
# 20 seconds later, before expiration, same JWT
with freeze_time('2012-01-14 12:00:20'):
header2 = token_credentials.get_authorization_header(TOPIC)
assert header1 == header2
# 35 seconds later, after expiration, new JWT
with freeze_time('2012-01-14 12:00:40'):
header3 = token_credentials.get_authorization_header(TOPIC)
assert header3 != header1
| mit | -5,559,139,220,243,753,000 | 29 | 67 | 0.703604 | false |
PlanTool/plantool | GUI/Environment.py | 1 | 14771 | #coding:utf-8
import re
import nltk
import gensim
import logging
import numpy as np
import mysql.connector
from gensim.models.word2vec import Word2Vec
class Environment:
def __init__(self, args):
#logging.debug( 'Initializing the Environment...' )
model_dir = args.model_dir
vec_model = args.vec_model
self.words_num = args.words_num
self.wordvec = args.wordvec
self.vec_length = args.vec_length
self.num_actions = args.num_actions
self.action_rate = args.action_rate
self.penal_radix = args.penal_radix
self.action_label = args.action_label
self.non_action_label = args.non_action_label
self.reward_assign = [float(r) for r in args.reward_assign.split()]
assert len(self.reward_assign) == 4
self.db = mysql.connector.connect(user=args.user,password=args.passwd,database=args.db)
self.cur = self.db.cursor()
self.actionDB = args.actionDB.split()
self.max_text_num = [int(i) for i in args.max_text_num.split()]
self.test_text_num = args.test_text_num
self.size = sum(self.max_text_num) - self.test_text_num*len(self.max_text_num)
#logging.debug( 'self.actionDB',self.actionDB )
#logging.debug( 'env.save_state.size',self.size )
self.test_text_name = []
for i in range(len(self.max_text_num)):
temp = []
while(len(temp) < self.test_text_num):
rand = np.random.randint(1, self.max_text_num[i])
if rand not in temp:
temp.append(rand)
#temp = np.random.randint(1, self.max_text_num[i], size=self.test_text_num)
self.test_text_name.append(list(temp))
#logging.debug( 'self.test_text_name',self.test_text_name )
assert len(self.test_text_name) == len(self.max_text_num) == len(self.actionDB)
self.model = Word2Vec.load_word2vec_format(model_dir + vec_model, binary=False)
self.saved_states = np.zeros((self.size,self.words_num,self.wordvec))
self.saved_text_vec = np.zeros((self.size,self.words_num,self.wordvec))
self.saved_text_length = [] #np.zeros(self.size,dtype=int)
self.text_length = 0
self.total_text = 0
self.current_text = 0
self.half_tags = 1
self.test_index = 0
def create_text_matrix(self, text_dir):
tags = []
word_vec = []
text_vec = np.zeros((self.words_num,self.wordvec))
raw_text = open(text_dir).read()
words = []
w_of_s = []
#words = re.findall(r'[\w\-\_]+', raw_text)
a = re.sub(r'\. ', '.\n', raw_text)
b = re.sub(r'\? ', '?\n', a)
c = re.sub(r'\! ', '!\n', b)
d = re.split(r'\n', c)
for e in d:
#print '\n',e
tokens = nltk.tokenize.word_tokenize(e)
#print tokens
w_of_s.append(tokens)
words.extend(tokens)
#assert 1==0
self.text_length = len(words)
self.saved_text_length.append(self.text_length)
for j in range(self.words_num):
if j < self.text_length:
w = words[j]
if len(self.model[w]):
word_vec = self.model[w]
#concatenate the word vectors and tags
word_vec = np.concatenate((word_vec, np.zeros(self.wordvec-self.vec_length)))
else:
#if a word is not in the word2vec model, make it zeros
word_vec = np.zeros(self.vec_length)
word_vec = np.concatenate((word_vec, np.zeros(self.wordvec-self.vec_length)))
else:
#a text with shorter length will be padding with zeros
word_vec = np.zeros(self.wordvec)
text_vec[j] = word_vec
word_vec = []
return words, d, w_of_s, text_vec
def _getTaggedtexts(self):
logging.debug( 'Getting tagged texts in Environment...' )
get_data = "select * from " + self.actionDB[self.text_num[0]] + " where text_num=" + str(self.text_num[1]) + " order by sent_num"
self.cur.execute(get_data)
result = self.cur.fetchall()
assert len(result) > 0
tags = []
words = []
text_vec = np.zeros((self.words_num,self.wordvec))
word_vec = []
for i in range(len(result)):
#get sentences from database
sent_lower = result[i][2][0].lower() + result[i][2][1:]
words_of_sent = re.split(r' ',sent_lower)
temp_tags_of_sent = re.split(r' ',result[i][3])
#get the tags from database
tags_of_sent = []
for t in temp_tags_of_sent:
if t == '1':
tags_of_sent.append(self.action_label)
else:
tags_of_sent.append(self.non_action_label)
words.extend(words_of_sent)
tags.extend(tags_of_sent)
self.text_length = len(words)
self.saved_text_length.append(self.text_length)
for j in range(self.words_num):
if j < self.text_length:
w = words[j]
if len(self.model[w]):
word_vec = self.model[w]
#concatenate the word vectors and tags
if self.half_tags:
word_vec = np.concatenate((word_vec,[tags[j] for ind in xrange(self.wordvec-self.vec_length)]))
else:
word_vec = np.concatenate((word_vec,[tags[j]]))
else:
#if a word is not in the word2vec model, make it zeros
if self.half_tags:
word_vec = np.zeros(self.vec_length)
word_vec = np.concatenate((word_vec,[self.non_action_label for ind in xrange(self.wordvec-self.vec_length)]))
else:
word_vec = np.zeros(self.wordvec-1)
word_vec = np.concatenate((word_vec,[self.non_action_label]))
else:
#a text with shorter length will be padding with zeros
if self.half_tags:
word_vec = np.concatenate((np.zeros(self.vec_length),[self.non_action_label for ind in xrange(self.wordvec-self.vec_length)]))
else:
word_vec = np.concatenate((np.zeros(self.wordvec-1),[self.non_action_label]))
#word_vec = np.ones(self.wordvec)
text_vec[j] = word_vec
word_vec = []
return text_vec
def train_init(self):
self.text_num = [0, 0] #select the first text of the first table in database
#get the word vectos of tagged text
self.text_vec = self._getTaggedtexts()
self.state = self.text_vec.copy()#!!!!!NB!!!NB!!!!NB!!!!
if self.half_tags:
self.state[:,self.vec_length:] = 0
else:
self.state[:,-1] = 0
def test_one_init(self, text_dir):
print '\ntest_one_init.....\n'
self.words, self.sents, self.w_of_s, self.text_vec = self.create_text_matrix(text_dir)
self.state = self.text_vec.copy()#!!!!!NB!!!NB!!!!NB!!!!
self.state[:,self.vec_length:] = 0
def test_init(self):
self.text_num[0] = 0
self.text_num[1] = self.test_text_name[0][0]
self.test_index += 1
self.text_vec = self._getTaggedtexts()
self.state = self.text_vec.copy()#!!!!!NB!!!NB!!!!NB!!!!
if self.half_tags:
self.state[:,self.vec_length:] = 0
else:
self.state[:,-1] = 0
def restart_test(self):
if self.test_index < self.test_text_num -1:
self.text_num[1] = self.test_text_name[self.text_num[0]][self.test_index]
self.test_index += 1
else:
if self.text_num[0] < len(self.test_text_name) -1:
self.text_num[0] += 1
self.text_num[1] = self.test_text_name[self.text_num[0]][0]
self.test_index = 1
else:
self.text_num[0] = 0
self.text_num[1] = self.test_text_name[0][0]
self.test_index = 1
self.text_vec = self._getTaggedtexts()
self.state = self.text_vec.copy()#!!!!!NB!!!NB!!!!NB!!!!
if self.half_tags:
self.state[:,self.vec_length:] = 0
else:
self.state[:,-1] = 0
def restart(self):
self.saved_states[self.current_text] = self.state
self.saved_text_vec[self.current_text] = self.text_vec
self.total_text = max(self.total_text,self.current_text + 1)
self.current_text = (self.current_text + 1)%self.size
if self.text_num[1] < self.max_text_num[self.text_num[0]]-self.test_text_num-1:
self.text_num[1] += 1
else:
#choose the first text in the next table
if self.text_num[0] < len(self.actionDB)-1:
self.text_num[0] += 1
self.text_num[1] = 0
else:
self.text_num = [0, 0] #return to the initial text
while(self.text_num[1] in self.test_text_name[self.text_num[0]]):
if self.text_num[1] < self.max_text_num[self.text_num[0]]-self.test_text_num-1:
self.text_num[1] += 1
else:
#choose the first text in the next table
if self.text_num[0] < len(self.actionDB)-1:
self.text_num[0] += 1
self.text_num[1] = 0
else:
self.text_num = [0, 0] #return to the initial text
self.text_vec = self._getTaggedtexts()
self.state = self.text_vec.copy()#!!!!!NB!!!NB!!!!NB!!!!
if self.half_tags:
self.state[:,self.vec_length:] = 0
else:
self.state[:,-1] = 0
def act(self, action):
# Performs action and returns reward
action = int(action)
#even num refers to tagging action, odd num refer to non-action
if action%2 == 1:
if self.half_tags:
self.state[action/2,self.vec_length:] = self.action_label
else:
self.state[action/2,-1] = self.action_label
#if action/2 < len(self.words):
# print '-----tag an action %s-----'%self.words[action/2]
else:
if self.half_tags:
self.state[action/2,self.vec_length:] = self.non_action_label
else:
self.state[action/2,-1] = self.non_action_label
t_a_count = 0 #amount of tagged actions
for t in self.state[:,-1]:
if t == self.action_label:
t_a_count += 1
t_a_rate = float(t_a_count)/self.words_num
if self.text_vec[action/2,-1] == self.state[action/2,-1]:
if self.text_vec[action/2,-1] == self.action_label:
reward = self.reward_assign[0]
else:
reward = self.reward_assign[1]
if t_a_rate <= self.action_rate:
reward += self.penal_radix*t_a_rate*t_a_rate #2 1 -1 -2 5*0.15*0.15
else:
reward -= self.penal_radix*t_a_rate*t_a_rate
else:
if self.text_vec[action/2,-1] == self.non_action_label:
reward = self.reward_assign[2]
else:
reward = self.reward_assign[3]
reward -= self.penal_radix*t_a_rate*t_a_rate
return reward
def getState(self):
# Gets current text state
return self.state
def isTerminal(self):
# Returns if tag_actions is done
#if all the words of a text have been tagged, then terminate
if 0 in self.state[:,-1]:
return False
else:
return True
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("--num_actions", type=int, default=10, help="Total actions of this task.")
parser.add_argument("--words_num", type=int, default=5, help="")
parser.add_argument("--wordvec", type=int, default=22, help="")
parser.add_argument("--vec_length", type=int, default=20, help="Word vector dimension.")
parser.add_argument("--channel", type=int, default=1, help="")
parser.add_argument("--batch_size", type=int, default=32, help="Batch size for neural network.")
parser.add_argument("--loops", type=int, default=10000, help="Number of loops in testing.")
parser.add_argument("--model_dir", default="/home/fengwf/Documents/", help="The directory of word vector model.")
parser.add_argument("--vec_model", default='mymodel5-5-20', help="Word vector model name.")
parser.add_argument("--actionDB", default='tag_actions tag_actions1 tag_actions2 tag_actions4', help="Tables' names in database test.")
parser.add_argument("--max_text_num", default='35 20 33 111', help="Max text num of database tables.")
parser.add_argument("--reward_assign", default='2.0 1.0 -1.0 -2.0', help="How the assign the rewards.")
parser.add_argument("--action_rate", type=float, default=0.15, help="Average actions percentage in a text.")
parser.add_argument("--action_label", type=int, default=2, help="An integer refer to the label of actions.")
parser.add_argument("--non_action_label", type=int, default=1, help="An integer refer to the label of non-actions.")
parser.add_argument("--test_text_num", type=int, default=8, help="How many testing steps after each epoch.")
parser.add_argument("--penal_radix", type=float, default=5.0, help="Penalty radix according to action rate.")
parser.add_argument("--user", default='fengwf', help="Mysql account, user name.")
parser.add_argument("--passwd", default='123', help="Mysql password.")
parser.add_argument("--db", default='test', help="Mysql database name.")
args = parser.parse_args()
env = Environment(args)
env.test_one_init('./test_inputs/1.txt')
print env.state
#ws, tm = env.create_text_matrix('./test_inputs/1.txt')
#for w in ws:
# print w
#print tm
'''
for i in range(200):
env.restart()
env.test_init()
for j in range(40):
env.restart_test()
'''
| gpl-2.0 | -2,504,602,556,269,840,000 | 41.190058 | 147 | 0.534899 | false |
dchad/malware-detection | vs/unpack.py | 1 | 37916 | ##############################################################
# Python script to attempt automatic unpacking/decrypting of #
# malware samples using WinAppDbg. #
# #
# unpack.py v2016.01.25 #
# http://malwaremusings.com/scripts/unpack.py #
##############################################################
import sys
import traceback
import winappdbg
import time
import struct
import ctypes
# Log file which we log info to
logfile = None
class MyEventHandler(winappdbg.EventHandler):
###
# A. Declaring variables
###
# A.1 used to keep track of allocated executable memory
allocedmem = {}
# A.2 used to indicate that we've found the entry point
entrypt = 0x00000000
#
# variables used to find and disassemble unpacking loop
#
# A.3 used to indicate that we're single stepping
tracing = -1
# A.4 remember the last two eip values
lasteip = [0x00000000,0x00000000]
# A.5 lowest eip address we see
lowesteip = 0xffffffff
# A.6 highest eip address we see
highesteip = 0x00000000
# A.7 list of addresses which we've disassembled
disasmd = []
# A.8 keeps track of addresses and instructions
# that write to the allocated memory block(s)
writeaddrs = {}
#
# variables used to keep track of created processes
#
# A.9 keeps track of created processes to map
# hProcess from WriteProcessMemory() back to
# process name
createdprocesses = {}
# A.10 keeps track of processes that were created
# with the CREATE_SUSPENDED flag set
createsuspended = {}
#
# variables used for logging
#
# A.11 used to keep a log of events
eventlog = []
###
# B. Class methods (functions)
###
### B.1
# get_funcargs(event)
# query winappdbg to get the function arguments
#
# return a tuple consisting of the return address
# and a sub-tuple of function arguments
###
def get_funcargs(self,event):
h = event.hook
t = event.get_thread()
tid = event.get_tid()
return (t.get_pc(),h.get_params(tid))
### B.2
# guarded_read(d,t,addr,size)
# read memory after checking for, and if necessary,
# disabling memory breakpoints
#
# returns a string of data
###
def guarded_read(self,d,t,addr,size):
# keep track of breakpoints that we disabled
# so that we can enable them again after we've
# finished
reenablebps = []
# initialise the variable to hold the read
# memory data
data = ""
# check that the requested size is sane
if (size > 0):
p = t.get_process()
# check to see if the requested address falls within
# any of the existing memory breakpoints by checking
# if either the requested start address or end address
# is covered by any breakpoint
mem_bps = d.get_all_page_breakpoints()
for (pid,pgbp) in mem_bps:
(startaddr,endaddr) = pgbp.get_span()
if (pid == p.get_pid()) and (pgbp.is_here(addr) or pgbp.is_here(addr + size - 1)):
log("[D] Memory read in guarded memory. Disabling breakpoint: %s" % pgbp)
pgbp.disable(p,t)
reenablebps.append(pgbp)
# read the memory
data = p.read(addr,size)
# enable all of the breakpoints that we disabled
if (len(reenablebps) > 0):
for pgbp in reenablebps:
log("[D] Re-enabling breakpoint: %s" % pgbp)
pgbp.enable(p,t)
# return the read memory as a string
return data
###
# C. API Hooks
###
### C.1
# apiHooks: winappdbg defined hash of API calls to hook
#
# Each entry is indexed by library name and is an array of
# tuples consisting of API call name and number of args
###
apiHooks = {
"kernel32.dll":[
("VirtualAlloc",4),
("VirtualAllocEx",5),
("IsDebuggerPresent",0),
("CreateProcessA",10),
("CreateProcessW",10),
("WriteProcessMemory",5)
],
"advapi32.dll":[
("CryptDecrypt",6)
],
"wininet.dll":[
("InternetOpenA",5),
("InternetOpenW",5)
],
"ntdll.dll":[
("RtlDecompressBuffer",6)
],
"secur32.dll":[
("EncryptMessage",4),
("DecryptMessage",4)
]
}
###
# API hook callback functions
#
# These are defined by winappdbg and consist of functions
# named pre_<apifuncname> and post_<apifuncname> which are
# called on entry to, and on exit from, the given API
# function (<apifuncname>), respectively.
###
# C.2
# VirtualAlloc() hook(s)
#
def post_VirtualAllocEx(self,event,retval):
try:
# C.2.1 Get the return address and arguments
(ra,(hProcess,lpAddress,dwSize,flAllocationType,flProtect)) = self.get_funcargs(event)
# Get an instance to the debugger which triggered the event
# and also the process id and thread id of the process to which
# the event pertains
d = event.debug
pid = event.get_pid()
tid = event.get_tid()
# Log the fact that we've seen a VirtualAllocEx() call
log("[*] <%d:%d> 0x%x: VirtualAllocEx(0x%x,0x%x,0x%x (%d),0x%x,0x%03x) = 0x%x" % (pid,tid,ra,hProcess,lpAddress,dwSize,dwSize,flAllocationType,flProtect,retval))
# C.2.2 All the memory protection bits which include EXECUTE
# permission use bits 4 - 7, which is nicely matched
# by masking (ANDing) it with 0xf0 and checking for a
# non-zero result
if (flProtect & 0x0f0):
log("[-] Request for EXECUTEable memory")
# We can only set page guards on our own process
# otherwise page guard exception will occur in
# system code when this process attempts to write
# to the allocated memory.
# This causes ZwWriteVirtualMemory() to fail
# We can, however, set a page guard on it when
# this process creates the remote thread, as it
# will have presumably stopped writing to the
# other process' memory at that point.
# C.2.2.1 Check that this VirtualAllocEx() call is for
# the current process (hProcess == -1), and if
# so, ask the winappdbg debugger instance to
# create a page guard on the memory region.
# Also add information about the allocated region
# to our allocedmem hash, indexed by pid and
# base address.
if (hProcess == 0xffffffff):
d.watch_buffer(pid,retval,dwSize - 1,self.guard_page_exemem)
self.allocedmem[(pid,retval)] = dwSize
# C.2.3 Create a JSON event log entry
self.eventlog.append({
"time": time.time(),
"name": "VirtualAllocEx",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {
"hProcess": hProcess,
"lpAddress": lpAddress,
"dwSize": dwSize,
"flAllocationType": flAllocationType,
"flProtect": flProtect
},
"ret": retval
})
except:
traceback.print_exc()
raise
def post_VirtualAlloc(self,event,retval):
try:
# C.2.4 Get the return address and arguments
(ra,(lpAddress,dwSize,flAllocationType,flProtect)) = self.get_funcargs(event)
# Get an instance to the debugger which triggered the event
# and also the process id and thread id of the process to which
# the event pertains
d = event.debug
pid = event.get_pid()
tid = event.get_tid()
# Log the fact that we've seen a VirtualAlloc() call
# This is so that we get the address in the debuggee code from which it was called
# where as if we just let the VirtualAllocEx() hook log it, the address from
# which it was called is inside the VirtualAlloc() code in kernel32.dll
log("[*] <%d:%d> 0x%x: VirtualAlloc(0x%x,0x%x (%d),0x%x,0x%03x) = 0x%x" % (pid,tid,ra,lpAddress,dwSize,dwSize,flAllocationType,flProtect,retval))
# C.2.5 Create a JSON event log entry
self.eventlog.append({
"time": time.time(),
"name": "VirtualAlloc",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {
"lpAddress": lpAddress,
"dwSize": dwSize,
"flAllocationType": flAllocationType,
"flProtect": flProtect
},
"ret": retval
})
except:
traceback.print_exc()
raise
# C.3
# CryptDecrypt() hook(s)
#
def pre_CryptDecrypt(self,event,*args):
# C.3.1 Get the return address and arguments
(ra,hKey,hHash,Final,dwFlags,pbData,pdwDataLen) = (args[0],args[1],args[2],args[3],args[4],args[5],args[6])
# C.3.2 Get a Process object and dereference the pdwDataLen argument to read the buffer size
p = event.get_process()
buffsize = p.read_uint(pdwDataLen)
# C.3.3 Save a copy of the encrypted data
filename = "%s.memblk0x%x.enc" % (sys.argv[1],pbData)
log("[-] Dumping %d bytes of encrypted memory at 0x%x to %s" % (buffsize,pbData,filename))
databuff = open(filename,"wb")
databuff.write(p.read(pbData,buffsize));
databuff.close()
def post_CryptDecrypt(self,event,retval):
# C.3.4 Get the return address and arguments
(ra,(hKey,hHash,Final,dwFlags,pbData,pdwDataLen)) = self.get_funcargs(event)
# Get a Process object, and dereference the pdwDataLen argument
p = event.get_process()
buffsize = p.read_uint(pdwDataLen)
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> 0x%x: CryptDecrypt(0x%x,0x%x,0x%x,0x%x,0x%x,0x%x (%d)) = %d" % (pid,tid,ra,hKey,hHash,Final,dwFlags,pbData,buffsize,buffsize,retval))
# C.3.5 Save a copy of the decrypted data
filename_enc = "%s.memblk0x%x.enc" % (sys.argv[1],pbData)
filename = "%s.memblk0x%x.dec" % (sys.argv[1],pbData)
log("[-] Dumping %d bytes of decrypted memory at 0x%x to %s" % (buffsize,pbData,filename))
databuff = open(filename,"wb")
databuff.write(p.read(pbData,buffsize))
databuff.close()
# C.3.6 Create a JSON event log entry
pid = event.get_pid()
tid = event.get_tid()
self.eventlog.append({
"time": time.time(),
"name": "CryptDecrypt",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {
"hKey": hKey,
"hHash": hHash,
"Final": Final,
"dwFlags": dwFlags,
"pbData": pdwDataLen
},
"ret": retval,
"info": {
"filename_enc": filename_enc,
"filename_dec": filename
}
})
# C.4
# RtlDecompressBuffer() hook(s)
#
def pre_RtlDecompressBuffer(self,event,*args):
try:
# C.4.1 Get the return address and arguments
(ra,CompressionFormat,UncompressedBuffer,UncompressedBufferSize,CompressedBuffer,CompressedBufferSize,FinalUncompressedSize) = (args[0],args[1],args[2],args[3],args[4],args[5],args[6])
p = event.get_process()
# C.4.2 Save a copy of the compressed data
filename = "%s.memblk0x%x.comp" % (sys.argv[1],CompressedBuffer)
log("[-] Dumping %d bytes of compressed memory at 0x%x to %s" % (CompressedBufferSize,CompressedBuffer,filename))
databuff = open(filename,"wb")
databuff.write(p.read(CompressedBuffer,CompressedBufferSize));
databuff.close()
except:
traceback.print_exc()
raise
def post_RtlDecompressBuffer(self,event,retval):
try:
# C.4.3 Get the return address and arguments
(ra,(CompressionFormat,UncompressedBuffer,UncompressedBufferSize,CompressedBuffer,CompressedBufferSize,FinalUncompressedSize)) = self.get_funcargs(event)
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> 0x%x: RtlDecompressBuffer(0x%x,0x%x,0x%x,0x%x,0x%x,0x%x): %d" % (pid,tid,ra,CompressionFormat,UncompressedBuffer,UncompressedBufferSize,CompressedBuffer,CompressedBufferSize,FinalUncompressedSize,retval))
# Get a Process object, and dereference the FinalUncompressedSize argument
p = event.get_process()
buffsize = p.read_uint(FinalUncompressedSize)
# C.4.4 save a copy of the decompressed data
filename_comp = "%s.memblk0x%x.comp" % (sys.argv[1],CompressedBuffer)
filename = "%s.memblk0x%x.decomp" % (sys.argv[1],UncompressedBuffer)
log("[-] Dumping %d bytes of decompressed memory at 0x%x to %s" % (buffsize,UncompressedBuffer,filename))
databuff = open(filename,"wb")
databuff.write(p.read(UncompressedBuffer,buffsize))
databuff.close()
# C.4.5 Create a JSON event log entry
self.eventlog.append({
"time": time.time(),
"name": "RtlDecompressBuffer",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {
"CompressionFormat": CompressionFormat,
"UncompressedBuffer": UncompressedBuffer,
"UncompressedBufferSize": UncompressedBufferSize,
"CompressedBuffer": CompressedBuffer,
"CompressedBufferSize": CompressedBufferSize,
"FinalUncompressedSize": FinalUncompressedSize
},
"ret": retval,
"info": {
"filename_comp": filename_comp,
"filename_decomp": filename
}
})
except:
traceback.print_exc()
raise
# C.5
# CreateProcess() hook(s)
#
def post_CreateProcess(self,event,retval,fUnicode):
try:
# C.5.1 Get the return address and arguments
(ra,(lpApplicationName,lpCommandLine,lpProcessAttributes,lpThreadAttributes,bInheritHandles,dwCreationFlags,lpEnvironment,lpCurrentDirectory,lpStartupInfo,lpProcessInformation)) = self.get_funcargs(event)
p = event.get_process()
t = event.get_thread()
pid = event.get_pid()
tid = event.get_tid()
# C.5.2 Dereference arguments
# Use the Process object to dereference the lpApplicationName and lpCommandLine arguments
# as either ASCII or WCHAR depending on the fUnicode argument
# (and hence whether we were called from post_CreateProcessA() or post_CreateProcessW() respectively
szApplicationName = p.peek_string(lpApplicationName,fUnicode)
szCommandLine = p.peek_string(lpCommandLine,fUnicode)
# If the lpProcessInformation argument is a valid pointer...
if (lpProcessInformation):
# ... dereference it to get the ProcessInformation structure
d = event.debug
ProcessInformation = self.guarded_read(d,t,lpProcessInformation,16)
# Extract the various fields from the ProcessInformation structure
hProcess = struct.unpack("<L",ProcessInformation[0:4])[0]
hThread = struct.unpack("<L",ProcessInformation[4:8])[0]
dwProcessId = struct.unpack("<L",ProcessInformation[8:12])[0]
dwThreadId = struct.unpack("<L",ProcessInformation[12:16])[0]
else:
log("[E] lpProcessInformation is null")
log("[*] <%d:%d> 0x%x: CreateProcess(\"%s\",\"%s\",0x%x): %d (0x%x, 0x%x, <%d:%d>)" % (pid,tid,ra,szApplicationName,szCommandLine,dwCreationFlags,retval,hProcess,hThread,dwProcessId,dwThreadId))
# C.5.3 Check if the process is being created in a suspended state (CREATE_SUSPENDED flag)...
if (dwCreationFlags & 0x4):
# ... hook the ResumeThread() API call
# so that we are notified when it is resumed
d = event.debug
stat = d.hook_function(pid,"ResumeThread",preCB = self.hook_createsuspendedresume,paramCount = 1)
self.createsuspended[(pid,hThread)] = dwProcessId
log("[-] CREATE_SUSPENDED. Hooking ResumeThread() (%d)" % stat)
# C.5.4 Keep track of processes that were created, so we know which
# process any WriteProcessMemory() calls are writing to
self.createdprocesses[hProcess] = {
"time": time.time(),
"ppid": pid,
"ptid": tid,
"paddr": ra,
"ApplicationName":szApplicationName,
"CommandLine": szCommandLine,
"CreationFlags": dwCreationFlags,
"hProcess": hProcess,
"hThread": hThread,
"ProcessId": dwProcessId,
"ThreadId": dwThreadId
}
# C.5.5 Create a JSON event log entry
self.eventlog.append({
"time": time.time(),
"name": "CreateProcess",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {
"ApplicationName":szApplicationName,
"CommandLine": szCommandLine,
"CreationFlags": dwCreationFlags,
"hProcess": hProcess,
"hThread": hThread,
"ProcessId": dwProcessId,
"ThreadId": dwThreadId
},
"info": {
"fUnicode":fUnicode
},
"ret": retval
})
except:
traceback.print_exc()
raise
# C.5.6 post_CreateProcessA() and post_CreateProcessW()
# Actual hook call-back function called by WinAppDbg
# To save duplicating code between this and post_CreateProcessW()
# both of them call post_CreateProcess() with a parameter, fUnicode,
# which specifies whether the strings are ASCII (CreateProcessA())
# or WCHAR (CreateProcessW())
def post_CreateProcessA(self,event,retval):
self.post_CreateProcess(event,retval,False)
def post_CreateProcessW(self,event,retval):
self.post_CreateProcess(event,retval,True)
# hook_createsuspendedresume() is a call-back function called when
# ResumeThread() is call by a process which has created a suspended
# process
def hook_createsuspendedresume(self,event,*args):
# C.5.7 Get the return address and arguments
(ra,(hThread,)) = self.get_funcargs(event)
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> 0x%x: ResumeThread(0x%x)" % (pid,tid,ra,hThread))
# C.5.8 Find the process id of the resumed process
if ((pid,hThread) in self.createsuspended):
pidresumed = self.createsuspended[(pid,hThread)]
log("[-] New suspended process (pid %d) resumed" % pidresumed)
# C.6
# WriteProcessMemory() hook(s)
#
def post_WriteProcessMemory(self,event,retval):
# C.6.1 Get the return address and arguments
try:
(ra,(hProcess,lpBaseAddress,lpBuffer,nSize,lpNumberOfBytesWritten)) = self.get_funcargs(event)
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> 0x%x: WriteProcessMemory(0x%x,0x%x,0x%x,0x%x,0x%x): %d" % (pid,tid,ra,hProcess,lpBaseAddress,lpBuffer,nSize,lpNumberOfBytesWritten,retval))
d = event.debug
t = event.get_thread()
# C.6.2 Dereference lpNumberOfBytesWritten to get the number of bytes written to the target process'
# address space
if (lpNumberOfBytesWritten):
NumberOfBytesWritten = struct.unpack("<L",self.guarded_read(d,t,lpNumberOfBytesWritten,4))[0]
else:
NumberOfBytesWritten = None
# C.6.3 Get process information that was saved by CreateProcess() hook
if (hProcess in self.createdprocesses):
ProcessId = self.createdprocesses[hProcess]["ProcessId"]
ApplicationName = self.createdprocesses[hProcess]["ApplicationName"]
CommandLine = self.createdprocesses[hProcess]["CommandLine"]
else:
log("[W] hProcess not in createdprocesses[]")
ProcessId = None
ApplicationName = None
CommandLine = None
d = event.debug
t = event.get_thread()
# C.6.4 Save a copy of the written memory
pid = event.get_pid()
tid = event.get_tid()
filename = "%s.memblk0x%x-%d.wpm" % (sys.argv[1],lpBaseAddress,ProcessId)
log("[-] Dumping %d bytes of memory at %d:0x%x written to %d:0x%x to %s" % (nSize,pid,lpBuffer,ProcessId,lpBaseAddress,filename))
databuff = open(filename,"wb")
databuff.write(self.guarded_read(d,t,lpBuffer,nSize))
databuff.close()
# C.6.5 Create a JSON event log entry
self.eventlog.append({
"time": time.time(),
"name": "WriteProcessMemory",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {
"hProcess": hProcess,
"lpBaseAddress": lpBaseAddress,
"lpBuffer": lpBuffer,
"nSize": nSize,
"lpNumberOfBytesWritten": lpNumberOfBytesWritten,
"NumberOfBytesWritten": NumberOfBytesWritten
},
"ret": retval,
"info": {
"filename": filename,
"targetprocesspid": ProcessId,
"targetprocessname": ApplicationName,
"targetprocesscmdline": CommandLine
}
})
except:
traceback.print_exc()
raise
# C.7
# IsDebuggerPresent() hook(s)
# (mainly added so that AutoIt compiled scripts would run, but also useful
# as an anti-anti-malware technique)
#
def post_IsDebuggerPresent(self,event,retval):
# C.7.1 Get the return address and arguments
(ra,noargs) = self.get_funcargs(event)
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> 0x%x: IsDebuggerPresent(): 0x%x" % (pid,tid,ra,retval))
log("[-] Returning 0")
# C.7.2 Changed the 'eax' register (return value) to '0' (no debugger present)
# just before we continue running the calling thread
t = event.get_thread()
t.set_register("Eax",0x0)
# C.7.3 Create a JSON event log entry
self.eventlog.append({
"time": time.time(),
"name": "IsDebuggerPresent",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {},
"ret": retval,
"info": {}
})
# C.8
# InternetOpen() hook(s)
#
def post_InternetOpen(self,event,retval,fUnicode):
# C.8.1 Get the return address and arguments
(ra,(lpszAgent,dwAccessType,lpszProxyName,lpszProxyBypass,dwFlags)) = self.get_funcargs(event)
pid = event.get_pid()
tid = event.get_tid()
# C.8.2 Dereference arguments
p = event.get_process()
szAgent = p.peek_string(lpszAgent,fUnicode)
szProxyName = p.peek_string(lpszProxyName,fUnicode)
szProxyBypass = p.peek_string(lpszProxyBypass,fUnicode)
log("[*] <%d:%d> 0x%x: InternetOpen(\"%s\",0x%x,\"%s\",\"%s\",0x%x) = 0x%x" % (pid,tid,ra,szAgent,dwAccessType,szProxyName,szProxyBypass,dwFlags,retval))
# C.8.3 Create a JSON event log entry
self.eventlog.append({
"time": time.time(),
"name": "InternetOpen",
"type": "Win32 API",
"pid": pid,
"tid": tid,
"addr": ra,
"args": {},
"ret": retval,
"info": {}
})
def post_InternetOpenA(self,event,retval):
self.post_InternetOpen(event,retval,False)
def post_InternetOpenW(self,event,retval):
self.post_InternetOpen(event,retval,True)
def pre_EncryptMessage(self,event,*args):
# C.?.1 Get the return address and arguments
try:
(ra,phContext,fQOP,pMessage,MessageSeqNo) = (args[0],args[1],args[2],args[3],args[4])
pid = event.get_pid()
tid = event.get_tid()
# Right -- this is going to get annoying
# pMessage is a pointer to a SecBufferDesc structure
# which describes an array of SecBuffer structures
p = event.get_process()
l = p.get_label_at_address(ra)
# really ought to use a ctypes struct for this!
ulVersion = p.peek_uint(pMessage)
cBuffers = p.peek_uint(pMessage + 4)
pBuffers = p.peek_uint(pMessage + 8)
log("[*] <%d:%d> %s 0x%x: EncryptMessage(...)" % (pid,tid,l,ra))
log("[D] ulVersion: %d" % ulVersion)
log("[D] cBuffers: %d" % cBuffers)
log("[D] pBuffers: 0x%x" % pBuffers)
# dump buffer list
for i in range(0,cBuffers):
cbBuffer = p.peek_uint(pBuffers + (i * 12) + 0)
BufferType = p.peek_uint(pBuffers + (i * 12) + 4)
pvBuffer = p.peek_uint(pBuffers + (i * 12) + 8)
if (BufferType == 1): # SECBUFFER_DATA
# we have data to save
filename = sys.argv[1] + ".encmsg0x%08x-%d" % (pvBuffer,pid)
f = open(filename,"ab")
f.write(p.peek(pvBuffer,cbBuffer))
f.close()
log("[D]")
log("[D] cbBuffer: 0x%x (%d)" % (cbBuffer,cbBuffer))
log("[D] BufferType: 0x%x" % BufferType)
log("[D] pvBuffer: 0x%x" % pvBuffer)
except:
traceback.print_exc()
raise
def post_DecryptMessage(self,event,retval):
# C.?.1 Get the return address and arguments
try:
(ra,(phContext,pMessage,MessageSeqNo,pfQOP)) = self.get_funcargs(event)
pid = event.get_pid()
tid = event.get_tid()
# Right -- this is going to get annoying
# pMessage is a pointer to a SecBufferDesc structure
# which describes an array of SecBuffer structures
p = event.get_process()
# really ought to use a ctypes struct for this!
ulVersion = p.peek_uint(pMessage)
cBuffers = p.peek_uint(pMessage + 4)
pBuffers = p.peek_uint(pMessage + 8)
log("[*] <%d:%d> 0x%x: DecryptMessage(...)" % (pid,tid,ra))
log("[D] ulVersion: %d" % ulVersion)
log("[D] cBuffers: %d" % cBuffers)
log("[D] pBuffers: 0x%x" % pBuffers)
# dump buffer list
for i in range(0,cBuffers):
cbBuffer = p.peek_uint(pBuffers + (i * 12) + 0)
BufferType = p.peek_uint(pBuffers + (i * 12) + 4)
pvBuffer = p.peek_uint(pBuffers + (i * 12) + 8)
if (BufferType == 1): # SECBUFFER_DATA
# we have data to save
filename = sys.argv[1] + ".decmsg0x%08x-%d" % (pvBuffer,pid)
f = open(filename,"ab")
f.write(p.peek(pvBuffer,cbBuffer))
f.close()
log("[D]")
log("[D] cbBuffer: 0x%x (%d)" % (cbBuffer,cbBuffer))
log("[D] BufferType: 0x%x" % BufferType)
log("[D] pvBuffer: 0x%x" % pvBuffer)
except:
traceback.print_exc()
raise
###
# D. winappdbg debug event handlers
###
### D.1
# create_process
#
# winappdbg defined callback function to handle process creation events
###
def create_process(self,event):
p = event.get_process()
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> Create process event for pid %d (%s)" % (pid,tid,p.get_pid(),p.get_image_name()))
log("[-] command line: %s" % p.get_command_line())
#log("[D] Create process event for pid %d (%d)" % (pid,tid))
self.eventlog.append({
"time": time.time(),
"name": event.get_event_name(),
"type": "WinAppDbg Event",
"pid": pid,
"tid": tid,
"info": {
"pid": p.get_pid(),
"module_base": event.get_module_base(),
"filename": event.get_filename(),
"cmdline": p.get_command_line()
},
})
### D.2
# exit_process
#
# winappdbg defined callback function to handle process exit events
###
def exit_process(self,event):
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> Exit process event for %s: 0x%x" % (pid,tid,event.get_filename(),event.get_exit_code()))
self.eventlog.append({
"time": time.time(),
"name": event.get_event_name(),
"type": "WinAppDbg Event",
"pid": pid,
"tid": tid,
"info": {
"module_base": event.get_module_base(),
"filename": event.get_filename(),
"exitcode": event.get_exit_code()
},
})
### D.3
# create_thread
#
# winappdbg defined callback function to handle thread creation events
###
def create_thread(self,event):
pid = event.get_pid()
tid = event.get_tid()
t = event.get_thread()
name = t.get_name()
log("[*] <%d:%d> Create thread event \"%s\" @ 0x%x" % (pid,tid,name,event.get_start_address()))
self.eventlog.append({
"time": time.time(),
"name": event.get_event_name(),
"type": "WinAppDbg Event",
"pid": pid,
"tid": tid,
"info": {
"startaddress": event.get_start_address(),
"threadname": name
},
})
### D.4
# exit_thread
#
# winappdbg defined callback function to handle thread exit events
###
def exit_thread(self,event):
pid = event.get_pid()
tid = event.get_tid()
t = event.get_thread()
name = t.get_name()
log("[*] <%d:%d> Exit thread event \"%s\"" % (pid,tid,name,))
self.eventlog.append({
"time": time.time(),
"name": event.get_event_name(),
"type": "WinAppDbg Event",
"pid": pid,
"tid": tid,
"info": {
"threadname": name
},
})
### D.5
# load_dll
#
# winappdbg defined callback function to handle DLL load events
###
def load_dll(self,event):
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> Load DLL event: %s" % (pid,tid,event.get_filename()))
self.eventlog.append({
"time": time.time(),
"name": event.get_event_name(),
"type": "WinAppDbg Event",
"pid": pid,
"tid": tid,
"info": {
"module_base": event.get_module_base(),
"filename": event.get_filename(),
},
})
### D.6
# event
#
# winappdbg defined callback function to handle any remaining events
###
def event(self,event):
pid = event.get_pid()
tid = event.get_tid()
log("[*] <%d:%d> Unhandled event: %s" % (pid,tid,event.get_event_name()))
###
# E. winappdbg debug exception handlers
###
### E.1
# guard_page
#
# winappdbg defined callback function to handle guard page exceptions
###
def guard_page_exemem(self,exception):
try:
f_type = exception.get_fault_type()
e_addr = exception.get_exception_address()
f_addr = exception.get_fault_address()
# get the process and thread ids
pid = exception.get_pid()
tid = exception.get_tid()
# It is interesting to log this, but it generates a lot of log
# output and slows the whole process down
#log("[!] <%d:%d> 0x%x: GUARD_PAGE(%d) exception for address 0x%x" % (pid,tid,e_addr,f_type,f_addr))
#log("[*] VirtualAlloc()d memory address 0x%x accessed (%d) from 0x%x (%s)" % (f_addr,f_type,e_addr,instr))
# E.1.2 Was it a memory write operation?
if (f_type == winappdbg.win32.EXCEPTION_WRITE_FAULT):
# E.1.2.1 Use the writeaddrs[] array to check to see
# if we have already logged access from this
# address, as unpacking is generally done in
# a loop and we don't want to log the same
# instructions for each iteration
if not e_addr in self.writeaddrs:
p = exception.get_process()
t = exception.get_thread()
label = p.get_label_at_address(e_addr)
instr = t.disassemble_instruction(e_addr)[2].lower()
log("[*] VirtualAlloc()d memory address 0x%x written from 0x%x (%s): %s" % (f_addr,e_addr,label,instr))
self.writeaddrs[e_addr] = instr
# E.1.2.2 Use the tracing variable to see if we have
# already started tracing, that is single
# stepping. If not, enable it, and make a note
# of the fact by setting the tracing variable
# to True
if (self.tracing == -1):
self.tracing = 0
d = exception.debug
log("[-] Enabling tracing")
d.start_tracing(exception.get_tid())
# E.1.3 Was it a memory instruction fetch (execute) operation,
# and if so, are we still looking for the entry point address?
if (f_type == winappdbg.win32.EXCEPTION_EXECUTE_FAULT) and (self.entrypt == 0):
self.entrypt = e_addr
t = exception.get_thread()
jmpinstr = t.disassemble_instruction(self.lasteip[0])[2].lower()
# E.1.3.1 Log what we've found
#log("[D] lasteip[1]: 0x%x" % self.lasteip[1])
log("[*] Found unpacked entry point at 0x%x called from 0x%x (%s) (after executing %d instructions)" % (self.entrypt,self.lasteip[0],jmpinstr,self.tracing))
log("[-] Unpacking loop at 0x%x - 0x%x" % (self.lowesteip,self.highesteip))
pid = exception.get_pid()
tid = exception.get_tid()
elog = ({
"time": time.time(),
"name": "unpacking loop found",
"type": "unpack event",
"pid": pid,
"tid": tid,
"info": {
"unpacked_entry_point": self.entrypt,
"callingaddr": self.lasteip[0],
"callinginstr": jmpinstr
},
})
# E.1.3.2
for (mem_pid,memblk) in self.allocedmem:
if (mem_pid == pid):
size = self.allocedmem[(mem_pid,memblk)]
endaddr = memblk + size - 1
if (e_addr >= memblk) and (e_addr <= endaddr):
# E.1.3.3 Log what we're doing and delete the memory breakpoint
log("[-] Dumping %d bytes of memory range 0x%x - 0x%x" % (size,memblk,endaddr))
d = exception.debug
d.dont_watch_buffer(exception.get_pid(),memblk,size - 1)
# E.1.3.4 Disable single-step debugging
self.tracing = -1
d.stop_tracing(exception.get_tid())
# E.1.3.5 Reset unpacking loop variables
self.entrypt = 0x00000000
#del self.lasteip
self.lasteip = [0x00000000,0x00000000]
self.lowesteip = 0xffffffff
self.highest = 0x00000000
# E.1.3.6 Dump the memory block to a file
p = exception.get_process()
filename = sys.argv[1] + ".memblk0x%08x" % memblk
dumpfile = open(filename,"wb")
dumpfile.write(p.read(memblk,size))
dumpfile.close()
elog["info"]["filename"] = filename
self.eventlog.append(elog)
except Exception as e:
traceback.print_exc()
raise
### E.2
# single_step
#
# winappdbg defined callback function to handle single step exceptions
###
def single_step(self,exception):
try:
# E.2.1 Get the exception address
e_addr = exception.get_exception_address()
# E.2.2 If we have just looped back (eip has gone backward)
if (e_addr < self.lasteip[1]):
# Remember this lower address as the lowest loop address
if self.lowesteip == 0xffffffff: self.lowesteip = e_addr
# ... and the address we just jumped from as the highest loop address
if self.highesteip == 0x00000000: self.highesteip = self.lasteip[1]
# E.2.3 If we are executing an instruction within the bounds of the loop
# and we haven't already disassembled this address, then do so
if (e_addr >= self.lowesteip) and (e_addr <= self.highesteip) and (not e_addr in self.disasmd):
t = exception.get_thread()
disasm = t.disassemble_instruction(e_addr)
instr = disasm[2].lower()
log(" 0x%x: %s" % (e_addr,instr))
self.disasmd.append(e_addr)
# E.2.4 Remember the last two instruction addresses (eip values)
# We need to remember the last two in order to be able to
# disassemble the instruction that jumped to the original
# entry point in the unpacked code
self.lasteip[0] = self.lasteip[1]
self.lasteip[1] = e_addr
# E.2.5 Increment the instruction counter, and check to see if
# we have reached our limit of 250,000 instructions.
# If so, assume that there is no unpacking loop and stop
# tracing (to speed up execution).
self.tracing += 1
if (self.tracing >= 250000):
log("[E] Reached tracing limit of 250000 instructions")
d = exception.debug
pid = exception.get_pid()
d.break_at(pid,e_addr,self.bp_stoptracing)
self.tracing = -1
except Exception as e:
traceback.print_exc()
raise
# E.2.6 bp_stoptracing()
# Set as a breakpoint handler when we want to stop tracing, as we can't
# disable single-step tracing from within the single-step call-back function.
def bp_stoptracing(self,exception):
log("[D] Single-step instruction limit reached -- stopping tracing")
d = exception.debug
tid = exception.get_tid()
pid = exception.get_pid()
d.stop_tracing(tid)
d.dont_break_at(pid,exception.get_exception_address())
### E.3
# exception
#
# winappdbg defined callback function to handle remaining exceptions
###
def exception(self,exception):
log("[*] Unhandled exception at 0x%x: %s" % (exception.get_exception_address(),exception.get_exception_name()))
#log("[-] 0x%x fault at 0x%x" % (exception.get_fault_type(),exception.get_fault_address()))
#
#### end of MyEventHandler class
#
###
# F. Miscellaneous functions
###
### F.1
# log(msg):
###
def log(msg):
global logfile
print(msg)
if not logfile:
logfile = open(sys.argv[1] + ".log","w")
if logfile:
logfile.write(msg + "\n")
logfile.flush()
#logfile.log_text(msg)
### F.2
# simple_debugger(argv):
###
def simple_debugger(filename):
global logfile
try:
handler = MyEventHandler()
#logfile = winappdbg.textio.Logger(filename + ".log",verbose = True)
except:
traceback.print_exc()
with winappdbg.Debug(handler,bKillOnExit = True, bHostileCode = False) as debug:
log("[*] Starting %s" % filename)
debug.execl(filename,bFollow = False)
log("[*] Starting debug loop")
debug.loop()
log("[*] Terminating")
log("[D] Number of created processes: %d" % len(handler.createdprocesses))
for i in range(0,len(handler.eventlog)):
log("%s" % handler.eventlog[i])
###
# G. Start of script execution
###
log("[*] Started at %s" % time.strftime("%Y-%m-%d %H:%M:%S"))
simple_debugger(sys.argv[1])
log("[*] Completed at %s" % time.strftime("%Y-%m-%d %H:%M:%S"))
| gpl-3.0 | -5,576,724,572,586,196,000 | 29.260176 | 227 | 0.600406 | false |
MyRobotLab/pyrobotlab | home/CheekyMonkey/tracking-arduino.py | 1 | 2678 | # A script to test opencv tracking in MyRobotLab with an Arduino connected to a Raspberry Pi 3
# as at mrl development build version 2489
# a mashup of code taken from Mats:
# https://github.com/MyRobotLab/pyrobotlab/blob/master/home/Mats/Tracking.py
# and also from Grog:
# http://myrobotlab.org/content/tracking-results
#
from org.myrobotlab.opencv import OpenCVFilterPyramidDown
#Define the x and y tracking servo pins
#articulated neck servos
centreneckPin = 1 # vertical motion
mainneckPin = 2 # horizontal motion
xPin = 9; # horizontal motion
yPin = 10; # vertical motion
#set which camera to use. In my case, 0 references the Raspberry Pi camera
cameraIndex = 0
# set the port to which the Arduino is connected
arduinoPort = '/dev/ttyUSB0'
# start a tracker service instance
tracker = Runtime.start("tracker", "Tracking");
tracker.connect(arduinoPort, xPin, yPin, cameraIndex);
x = tracker.getX();
# invert if necessary
# x.setInverted(True);
x.setVelocity(20)
x.setMinMax(60,90)
#x.setInverted(True);
x.setRest(85)
x.rest()
y = tracker.getY();
y.setVelocity(20)
y.setInverted(True);
y.setMinMax(60,75)
y.setRest(70)
y.rest()
#start an Arduino service instance
#arduino = Runtime.start("tracker.controller","Arduino")
#define a tracker PID instance
pid = Runtime.start("tracker.pid","Pid")
#set the x and y PID values
#pid.setPID("x", 20.0, 5.0, 0.1);
#pid.setPID("y", 20.0, 5.0, 0.1);
opencv = Runtime.start("tracker.opencv","OpenCV")
pid.setPID("x", 5.0, 1.0, 0.1);
pid.setPID("y", 5.0, 1.0, 0.1);
#get the tracker opencv service instance
#opencv = Runtime.getService("tracker.opencv")
sleep(2);
#opencv.addFilter("PyramidDown1","PyramidDown")
#opencv.addFilter("Gray1","Gray")
#as at mrl development build 2423 this next piece is required on the Raspberry Pi (3) under javacv1.3
#for opencv to return video frames
#frameGrabberType = "org.bytedeco.javacv.FFmpegFrameGrabber";
#opencv.captureFromResourceFile("/dev/video0");
#opencv.setFrameGrabberType(frameGrabberType);
#opencv.broadcastState();
#sleep(3);
#rest for a bit
#sleep(3);
tracker.y.setInverted(True);
# additional PyramidDown filter for improved framerate on the Pi (~15 fps)
PreFilterPyramidDown = OpenCVFilterPyramidDown("PreFilterPyramidDown")
tracker.preFilters.add(PreFilterPyramidDown)
tracker.opencv.setDisplayFilter("PreFilterPyramidDown")
#start the opencv video frame capture
opencv.capture();
#opencv.addFilter("lkOpticalTrack1","LKOpticalTrack")
#opencv.setDisplayFilter("lkOpticalTrack1")
#sleep(1)
#opencv.invokeFilterMethod("lkOpticalTrack1","samplePoint",160,120)
#start tracking
#1# tracker.startLKTracking()
#2# tracker.findFace()
#3# tracker.faceDetect()
| apache-2.0 | -6,644,644,271,138,608,000 | 25.514851 | 102 | 0.754668 | false |
ehooo/email_backup | email_backup/core/test/test_connector.py | 1 | 29237 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from unittest import TestCase
from mock import Mock, patch, call
from email_backup.core.connector import (
get_email_content,
Email,
EmailConnectorInterface
)
from email.parser import Parser
from datetime import date, datetime
import binascii
import imaplib
import locale
import six
import os
BASE_DIR = os.path.dirname(os.path.abspath(__file__))
if not six.PY2: # pragma: no cover
unicode = str
class GetEmailContentTest(TestCase):
def test_multi(self):
multi_email_file = os.path.join(BASE_DIR, 'files', 'multi_email.eml')
multi_email = Parser().parse(open(multi_email_file))
email_content = '*Test Body*\r\n\r\n-- \r\nSignature with link <http://domain.test>\r\n'
read_content = get_email_content(multi_email)
self.assertEqual(email_content, read_content)
def test_plain(self):
plain_email_file = os.path.join(BASE_DIR, 'files', 'plain_email.eml')
plain_email = Parser().parse(open(plain_email_file))
email_content = 'Plain body\r\n'
read_content = get_email_content(plain_email)
self.assertEqual(email_content, read_content)
def test_japan(self):
japan_email_file = os.path.join(BASE_DIR, 'files', 'japan_email.eml')
japan_email = Parser().parse(open(japan_email_file))
email_content = u'テスト body\r\n'
read_content = get_email_content(japan_email)
self.assertEqual(email_content, read_content)
class EmailTest(TestCase):
def setUp(self):
self.multi_email_file = os.path.join(BASE_DIR, 'files', 'multi_email.eml')
self.plain_email_file = os.path.join(BASE_DIR, 'files', 'plain_email.eml')
self.japan_email_file = os.path.join(BASE_DIR, 'files', 'japan_email.eml')
self.connector = Mock(spec=EmailConnectorInterface)
self.connector.header.return_value = ''
self.connector.read.return_value = ''
self.email = Email(self.connector, 1, 'test')
def test_string(self):
self.assertEqual(str(self.email), '[1] test')
self.assertEqual(unicode(self.email), u'[1] test')
def test_sever_id(self):
self.assertEqual(self.email.server_id, 1)
def _test_load(self):
self.assertFalse(self.email._header)
self.assertFalse(self.email._full)
self.email.load()
self.assertEqual(self.connector.chdir.call_count, 1)
self.assertEqual(self.connector.chdir.call_args, call(self.email.directory))
self.assertEqual(self.connector.read.call_count, 1)
self.assertEqual(self.connector.read.call_args, call(self.email.id))
self.assertTrue(self.email._header)
self.assertTrue(self.email._full)
def test_load_plain(self):
self.connector.read.return_value = open(self.plain_email_file).read()
self._test_load()
def test_load_multi(self):
self.connector.read.return_value = open(self.multi_email_file).read()
self._test_load()
def test_load_japan(self):
self.connector.read.return_value = open(self.japan_email_file).read()
self._test_load()
def _test_load_headers(self):
self.assertFalse(self.email._header)
self.assertFalse(self.email._full)
self.email.load(True)
self.assertEqual(self.connector.chdir.call_count, 1)
self.assertEqual(self.connector.chdir.call_args, call(self.email.directory))
self.assertEqual(self.connector.header.call_count, 1)
self.assertEqual(self.connector.header.call_args, call(self.email.id))
self.assertTrue(self.email._header)
self.assertFalse(self.email._full)
def test_load_header_plain(self):
self.connector.header.return_value = open(self.plain_email_file).read()
self._test_load_headers()
def test_load_header_multi(self):
self.connector.header.return_value = open(self.multi_email_file).read()
self._test_load_headers()
def test_load_header_japan(self):
self.connector.header.return_value = open(self.japan_email_file).read()
self._test_load_headers()
def test_load_no_msg(self):
self._test_load()
def _test_get_common(self):
self.assertEqual(self.connector.chdir.call_count, 2)
self.assertEqual(self.connector.chdir.call_args, call(self.email.directory))
self.assertEqual(self.connector.header.call_count, 1)
self.assertEqual(self.connector.header.call_args, call(self.email.id))
def test_get_date_no_msg(self):
value = self.email.get('date')
self._test_get_common()
self.assertIsNone(value)
def test_get_date_plain(self):
self.connector.header.return_value = open(self.plain_email_file).read()
value = self.email.get('date')
self._test_get_common()
self.assertEqual(value, datetime(2017, 7, 31, 20, 25, 18))
def test_get_date_multi(self):
self.connector.header.return_value = open(self.multi_email_file).read()
value = self.email.get('date')
self._test_get_common()
self.assertEqual(value, datetime(2017, 7, 31, 14, 18, 46))
def test_get_date_japan(self):
self.connector.header.return_value = open(self.japan_email_file).read()
value = self.email.get('date')
self._test_get_common()
self.assertEqual(value, datetime(2017, 7, 31, 11, 30, 37))
def test_get_from_plain(self):
self.connector.header.return_value = open(self.plain_email_file).read()
value = self.email.get('from')
self._test_get_common()
self.assertEqual(value, '[email protected]')
def test_get_from_multi(self):
self.connector.header.return_value = open(self.multi_email_file).read()
value = self.email.get('from')
self._test_get_common()
self.assertEqual(value, '[email protected]')
def test_get_from_japan(self):
self.connector.header.return_value = open(self.japan_email_file).read()
value = self.email.get('from')
self._test_get_common()
self.assertEqual(value, '[email protected]')
def test_get_subject_plain(self):
self.connector.header.return_value = open(self.plain_email_file).read()
value = self.email.get('subject')
self._test_get_common()
self.assertEqual(value, 'Test plain')
def test_get_subject_multi(self):
self.connector.header.return_value = open(self.multi_email_file).read()
value = self.email.get('subject')
self._test_get_common()
self.assertEqual(value, 'Test subject')
def test_get_subject_japan(self):
self.connector.header.return_value = open(self.japan_email_file).read()
value = self.email.get('subject')
self._test_get_common()
self.assertEqual(value, u'テスト')
def test_subject_encode_error(self):
self.email.email = Mock()
self.email.email.get.return_value = '=?UTF-8?B?test?='
subject = self.email.subject()
self.assertEqual(self.email.email.get.return_value, subject)
@patch('email_backup.core.connector.base64.decodestring')
def test_subject_binascii_error(self, raise_call_mock):
self.email.email = Mock()
self.email.email.get.return_value = '=?UTF-8?B?test?='
raise_call_mock.side_effect = binascii.Error
subject = self.email.subject()
self.assertEqual(raise_call_mock.call_count, 1)
self.assertEqual(raise_call_mock.call_args, call(six.b('test')))
self.assertEqual(self.email.email.get.return_value, subject)
def test_get_generic_plain(self):
self.connector.header.return_value = open(self.plain_email_file).read()
value = self.email.get('Message-id')
self.assertEqual(self.connector.chdir.call_count, 1)
self.assertEqual(self.connector.chdir.call_args, call(self.email.directory))
self.assertEqual(self.connector.header.call_count, 1)
self.assertEqual(self.connector.header.call_args, call(self.email.id))
self.assertEqual(value, '<[email protected]>')
def test_get_generic_multi(self):
self.connector.header.return_value = open(self.multi_email_file).read()
value = self.email.get('message-ID')
self.assertEqual(self.connector.chdir.call_count, 1)
self.assertEqual(self.connector.chdir.call_args, call(self.email.directory))
self.assertEqual(self.connector.header.call_count, 1)
self.assertEqual(self.connector.header.call_args, call(self.email.id))
self.assertEqual(value, '<[email protected]>')
def test_get_generic_japan(self):
self.connector.header.return_value = open(self.japan_email_file).read()
value = self.email.get('Message-ID')
self.assertEqual(self.connector.chdir.call_count, 1)
self.assertEqual(self.connector.chdir.call_args, call(self.email.directory))
self.assertEqual(self.connector.header.call_count, 1)
self.assertEqual(self.connector.header.call_args, call(self.email.id))
self.assertEqual(value, u'<[email protected]>')
def _test_content_common(self):
self.assertEqual(self.connector.chdir.call_count, 1)
self.assertEqual(self.connector.chdir.call_args, call(self.email.directory))
self.assertEqual(self.connector.read.call_count, 1)
self.assertEqual(self.connector.read.call_args, call(self.email.id))
def test_content_plain(self):
self.connector.read.return_value = open(self.plain_email_file).read()
value = self.email.content()
self._test_content_common()
email_content = 'Plain body\r\n'
self.assertEqual(email_content, value)
def test_content_multi(self):
self.connector.read.return_value = open(self.multi_email_file).read()
value = self.email.content()
self._test_content_common()
email_content = '*Test Body*\r\n\r\n-- \r\nSignature with link <http://domain.test>\r\n'
self.assertEqual(email_content, value)
def test_content_japan(self):
self.connector.read.return_value = open(self.japan_email_file).read()
value = self.email.content()
self._test_content_common()
email_content = u'テスト body\r\n'
self.assertEqual(email_content, value)
def test_attaches_plain(self):
self.connector.read.return_value = open(self.plain_email_file).read()
value = self.email.attaches()
self.assertEqual(value, 0)
def test_attaches_multi(self):
self.connector.read.return_value = open(self.multi_email_file).read()
value = self.email.attaches()
self.assertEqual(value, 2)
def test_attaches_japan(self):
self.connector.read.return_value = open(self.japan_email_file).read()
value = self.email.attaches()
self.assertEqual(value, 0)
class OpenTest(TestCase):
@patch('email_backup.core.connector.imaplib')
def test_open(self, imap4_mock):
host, port = 'imap.host.test', 143
user, password = 'user', 'password'
login_mock = Mock()
login_mock.login = Mock()
imap4_mock.IMAP4.return_value = login_mock
conn = EmailConnectorInterface(host, port, False, user, password)
conn.open()
self.assertEqual(imap4_mock.IMAP4.call_count, 1)
self.assertEqual(imap4_mock.IMAP4.call_args, call(host, port))
self.assertEqual(login_mock.login.call_count, 1)
self.assertEqual(login_mock.login.call_args, call(user, password))
@patch('email_backup.core.connector.imaplib')
def test_open_ssl(self, imap4_mock):
host, port = 'imap.host.test', 993
user, password = 'user', 'password'
login_mock = Mock()
login_mock.login = Mock()
imap4_mock.IMAP4_SSL.return_value = login_mock
conn = EmailConnectorInterface(host, port, True, user, password)
conn.open()
self.assertEqual(imap4_mock.IMAP4_SSL.call_count, 1)
self.assertEqual(imap4_mock.IMAP4_SSL.call_args, call(host, port))
self.assertEqual(login_mock.login.call_count, 1)
self.assertEqual(login_mock.login.call_args, call(user, password))
class CloseTest(TestCase):
def setUp(self):
host, port, ssl, user, password = 'imap.host.test', 143, False, 'user', 'password'
self.conn = EmailConnectorInterface(host, port, ssl, user, password)
def test_close(self):
connection = Mock()
self.conn.connection = connection
sock_mock = Mock()
self.conn.connection.socket.return_value = sock_mock
self.conn.close()
self.assertEqual(connection.close.call_count, 1)
self.assertEqual(connection.close.call_args, call())
self.assertEqual(connection.logout.call_count, 1)
self.assertEqual(connection.logout.call_args, call())
self.assertEqual(connection.socket.call_count, 1)
self.assertEqual(connection.socket.call_args, call())
self.assertEqual(sock_mock.close.call_count, 1)
self.assertEqual(sock_mock.close.call_args, call())
self.assertIsNone(self.conn.connection)
def test_close_not_open(self):
self.conn.close()
def test_close_wrong(self):
self.conn.connection = Mock()
self.conn.connection.close.side_effect = imaplib.IMAP4.error
self.conn.close()
class DirectoriesTest(TestCase):
def setUp(self):
host, port, ssl, user, password = 'imap.host.test', 143, False, 'user', 'password'
self.conn = EmailConnectorInterface(host, port, ssl, user, password)
self.conn.connection = Mock()
def test_directories_not_open(self):
self.conn.connection = None
result = self.conn.directories()
self.assertEqual(result, [])
def test_directories_empty(self):
self.conn.connection.list = Mock()
self.conn.connection.list.return_value = ('OK', [])
result = self.conn.directories()
self.assertEqual(self.conn.connection.list.call_count, 1)
self.assertEqual(self.conn.connection.list.call_args, call())
self.assertEqual(result, [])
def test_directories_with_valid(self):
valid_without_tildes = ['valid dir', '[Also]valid', 'And_this', 'or-this', 'or/dir', 'with.points']
valid = []
for v in valid_without_tildes:
valid.append('"{}"'.format(v))
invalid = ['"\tinvalid"', '"invalid\r"', '"invalid\n"', '"invalid?"', 'invalid']
data = valid[:]
data.extend(invalid)
self.conn.connection.list = Mock()
self.conn.connection.list.return_value = ('OK', data)
result = self.conn.directories()
self.assertEqual(result, valid_without_tildes)
self.assertEqual(self.conn.connection.list.call_count, 1)
self.assertEqual(self.conn.connection.list.call_args, call())
class FetchTest(TestCase):
def setUp(self):
host, port, ssl, user, password = 'imap.host.test', 143, False, 'user', 'password'
self.conn = EmailConnectorInterface(host, port, ssl, user, password)
self.conn.connection = Mock()
self.conn.connection.fetch = Mock()
def test_read_not_open(self):
self.conn.connection = None
email_id = 1
result = self.conn.read(email_id)
self.assertEqual(result, None)
def test_read(self):
msg = 'Message'
data = (('', msg), '')
email_id = 1
self.conn.connection.fetch.return_value = ('OK', data)
result = self.conn.read(email_id)
self.assertEqual(result, msg)
self.assertEqual(self.conn.connection.fetch.call_count, 1)
self.assertEqual(self.conn.connection.fetch.call_args, call(email_id, '(RFC822)'))
def test_header_not_open(self):
self.conn.connection = None
email_id = 1
result = self.conn.header(email_id)
self.assertEqual(result, None)
def test_header(self):
msg = 'Message'
data = (('', msg), '')
email_id = 1
self.conn.connection.fetch.return_value = ('OK', data)
result = self.conn.header(email_id)
self.assertEqual(result, msg)
self.assertEqual(self.conn.connection.fetch.call_count, 1)
self.assertEqual(self.conn.connection.fetch.call_args, call(email_id, '(BODY.PEEK[HEADER])'))
def test_wrong_zero(self):
email_id = 0
result = self.conn.header(email_id)
self.assertIsNone(result)
self.assertEqual(self.conn.connection.fetch.call_count, 0)
def test_wrong_index_out(self):
data = [None]
email_id = 99999
self.conn.connection.fetch.return_value = ('OK', data)
result = self.conn.header(email_id)
self.assertIsNone(result)
self.assertEqual(self.conn.connection.fetch.call_count, 1)
self.assertEqual(self.conn.connection.fetch.call_args, call(email_id, '(BODY.PEEK[HEADER])'))
result = self.conn.read(email_id)
self.assertIsNone(result)
self.assertEqual(self.conn.connection.fetch.call_count, 2)
self.assertIn(call(email_id, '(RFC822)'), self.conn.connection.fetch.call_args_list)
class ChDirTest(TestCase):
def setUp(self):
host, port, ssl, user, password = 'imap.host.test', 143, False, 'user', 'password'
self.conn = EmailConnectorInterface(host, port, ssl, user, password)
self.conn.connection = Mock()
self.conn.connection.select = Mock()
def test_chdir_not_open(self):
self.conn.connection = None
result = self.conn.chdir('dir')
self.assertEqual(result, 0)
def test_chdir(self):
directory = 'dir'
ret_val = '5'
self.conn.connection.select.return_value = ('OK', [ret_val])
result = self.conn.chdir(directory)
self.assertEqual(result, 5)
self.assertEqual(self.conn.connection.select.call_count, 1)
self.assertEqual(self.conn.connection.select.call_args, call(directory))
def test_wrong_chdir(self):
directory = 'not exist'
ret_val = ['[NONEXISTENT] Unknown Mailbox: (Failure)']
self.conn.connection.select.return_value = ('NO', ret_val)
result = self.conn.chdir(directory)
self.assertEqual(result, 0)
self.assertEqual(self.conn.connection.select.call_count, 1)
class DeleteTest(TestCase):
def setUp(self):
host, port, ssl, user, password = 'imap.host.test', 143, False, 'user', 'password'
self.conn = EmailConnectorInterface(host, port, ssl, user, password)
self.conn.connection = Mock()
self.conn.connection.store = Mock()
def test_mark_delete_not_open(self):
self.conn.connection = None
self.conn.mark_delete(1)
def test_do_delete_not_open(self):
self.conn.connection = None
self.conn.do_delete()
def test_mark_delete(self):
email_id = 5
self.conn.mark_delete(email_id)
self.assertEqual(self.conn.connection.store.call_count, 1)
self.assertEqual(self.conn.connection.store.call_args, call(email_id, '+FLAGS', '\\Deleted'))
def test_do_delete(self):
self.conn.do_delete()
self.assertEqual(self.conn.connection.expunge.call_count, 1)
self.assertEqual(self.conn.connection.expunge.call_args, call())
def test_mark_delete_wrong(self):
email_id = 0
self.conn.mark_delete(email_id)
self.assertEqual(self.conn.connection.store.call_count, 0)
class GetEmailsTest(TestCase): # pragma: no cover
def setUp(self):
host, port, ssl, user, password = 'imap.host.test', 143, False, 'user', 'password'
self.conn = EmailConnectorInterface(host, port, ssl, user, password)
self.conn.connection = Mock()
def test_get_emails_not_open(self):
self.conn.connection = None
generator = self.conn.get_emails(None)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
def test_get_emails_without_extra(self):
directory = 'directory'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
generator = self.conn.get_emails(directory)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
def test_get_emails_with_just_read(self):
directory = 'directory'
ret_val = ''
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
self.conn.connection.search = Mock()
self.conn.connection.search.return_value = ('OK', (ret_val, ))
generator = self.conn.get_emails(directory, just_read=True)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
self.assertEqual(self.conn.connection.search.call_count, 1)
self.assertEqual(self.conn.connection.search.call_args, call(None, '(SEEN)'))
def test_get_emails_with_before(self):
directory = 'directory'
ret_val = ''
before = date(2017, 1, 1)
before_str = '01-Jan-2017'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
self.conn.connection.search = Mock()
self.conn.connection.search.return_value = ('OK', (ret_val, ))
generator = self.conn.get_emails(directory, before=before)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
self.assertEqual(self.conn.connection.search.call_count, 1)
self.assertEqual(self.conn.connection.search.call_args, call(None, '(before "{}")'.format(before_str)))
def test_get_emails_with_before_as_string_on_esES(self):
directory = 'directory'
ret_val = ''
locale.setlocale(locale.LC_TIME, 'es_ES.UTF-8')
before_str_es = '1-Ene-2017'
before_str = '01-Jan-2017'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
self.conn.connection.search = Mock()
self.conn.connection.search.return_value = ('OK', (ret_val, ))
generator = self.conn.get_emails(directory, before=before_str_es)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
self.assertEqual(self.conn.connection.search.call_count, 1)
self.assertEqual(self.conn.connection.search.call_args, call(None, '(before "{}")'.format(before_str)))
def test_get_emails_with_before_as_string(self):
directory = 'directory'
ret_val = ''
before_str = '01-Jan-2017'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
self.conn.connection.search = Mock()
self.conn.connection.search.return_value = ('OK', (ret_val, ))
generator = self.conn.get_emails(directory, before=before_str)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
self.assertEqual(self.conn.connection.search.call_count, 1)
self.assertEqual(self.conn.connection.search.call_args, call(None, '(before "{}")'.format(before_str)))
def test_get_emails_with_wrong_before_as_string(self):
directory = 'directory'
before_str = 'THIS_IS_NOT_DATE'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
generator = self.conn.get_emails(directory, before=before_str)
if not six.PY2:
self.assertRaises(ValueError, next, generator)
else:
self.assertRaises(ValueError, generator.next)
def test_get_emails_with_wrong_before(self):
directory = 'directory'
before_str = object
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
generator = self.conn.get_emails(directory, before=before_str)
if not six.PY2:
self.assertRaises(ValueError, next, generator)
else:
self.assertRaises(ValueError, generator.next)
@patch('email_backup.core.connector.locale.setlocale')
def test_get_emails_with_wrong_locale(self, setlocale_mock):
setlocale_mock.side_effect = locale.Error
directory = 'directory'
ret_val = ''
before_str = '01-Jan-2017'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
self.conn.connection.search = Mock()
self.conn.connection.search.return_value = ('OK', (ret_val, ))
generator = self.conn.get_emails(directory, before=before_str)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
self.assertEqual(self.conn.connection.search.call_count, 0)
def test_get_emails_with_before_and_just_read(self):
directory = 'directory'
ret_val = ''
before_str = '01-Jan-2017'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
self.conn.connection.search = Mock()
self.conn.connection.search.return_value = ('OK', (ret_val, ))
generator = self.conn.get_emails(directory, before=before_str, just_read=True)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
self.assertEqual(self.conn.connection.search.call_count, 1)
self.assertEqual(self.conn.connection.search.call_args,
call(None, '(before "{}")'.format(before_str), '(SEEN)'))
def test_get_emails_with_response(self):
directory = 'directory'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 1
generator = self.conn.get_emails(directory)
if not six.PY2:
email = next(generator)
else:
email = generator.next()
self.assertEqual(email.id, 1)
self.assertEqual(email.connector, self.conn)
self.assertEqual(email.directory, directory)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
def test_get_emails_with_response_and_query(self):
directory = 'directory'
ret_val = '1 10'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
self.conn.connection.search = Mock()
self.conn.connection.search.return_value = ('OK', (ret_val, ))
generator = self.conn.get_emails(directory, just_read=True)
if not six.PY2:
email = next(generator)
else:
email = generator.next()
self.assertEqual(email.id, '1')
self.assertEqual(email.connector, self.conn)
self.assertEqual(email.directory, directory)
if not six.PY2:
email = next(generator)
else:
email = generator.next()
self.assertEqual(email.id, '10')
self.assertEqual(email.connector, self.conn)
self.assertEqual(email.directory, directory)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
self.assertEqual(self.conn.connection.search.call_count, 1)
self.assertEqual(self.conn.connection.search.call_args, call(None, '(SEEN)'))
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
def test_get_emails_wrong_dir(self):
directory = 'not exist'
self.conn.chdir = Mock()
self.conn.chdir.return_value = 0
generator = self.conn.get_emails(directory)
if not six.PY2:
self.assertRaises(StopIteration, next, generator)
else:
self.assertRaises(StopIteration, generator.next)
self.assertEqual(self.conn.chdir.call_count, 1)
self.assertEqual(self.conn.chdir.call_args, call(directory))
| gpl-3.0 | 7,655,963,208,932,830,000 | 38.916667 | 111 | 0.640611 | false |
heiths/allura | Allura/allura/tests/unit/spam/test_mollom.py | 1 | 4255 | # -*- coding: utf-8 -*-
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import mock
import unittest
import urllib
from bson import ObjectId
from allura.lib.spam.mollomfilter import MOLLOM_AVAILABLE, MollomSpamFilter
@unittest.skipIf(not MOLLOM_AVAILABLE, "Mollom not available")
class TestMollom(unittest.TestCase):
@mock.patch('allura.lib.spam.mollomfilter.Mollom')
def setUp(self, mollom_lib):
self.mollom = MollomSpamFilter({})
def side_effect(*args, **kw):
# side effect to test that data being sent to
# mollom can be successfully urlencoded
urllib.urlencode(kw.get('data', {}))
return dict(spam=2)
self.mollom.service.checkContent = mock.Mock(side_effect=side_effect,
return_value=dict(spam=2))
self.fake_artifact = mock.Mock(**{'url.return_value': 'artifact url'})
self.fake_user = mock.Mock(display_name=u'Søme User',
email_addresses=['user@domain'],
_id=ObjectId())
self.fake_headers = dict(
USER_AGENT='some browser',
REFERER='some url')
self.content = u'spåm text'
self.expected_data = dict(
postBody=self.content.encode('utf8'),
authorIP='some ip')
self.artifact = mock.Mock()
self.artifact.spam_check_id = 'test_id'
self.artifact.project_id = ObjectId()
self.artifact.ref = None
@mock.patch('allura.lib.spam.mollomfilter.c')
@mock.patch('allura.lib.spam.mollomfilter.request')
def test_check(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.mollom.check(self.content, artifact=self.artifact)
self.mollom.service.checkContent.assert_called_once_with(
**self.expected_data)
@mock.patch('allura.lib.spam.mollomfilter.c')
@mock.patch('allura.lib.spam.mollomfilter.request')
def test_check_with_user(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = None
self.mollom.check(self.content, user=self.fake_user,
artifact=self.artifact)
expected_data = self.expected_data
expected_data.update(authorName=u'Søme User'.encode('utf8'),
authorMail='user@domain')
self.mollom.service.checkContent.assert_called_once_with(
**self.expected_data)
@mock.patch('allura.lib.spam.mollomfilter.c')
@mock.patch('allura.lib.spam.mollomfilter.request')
def test_check_with_implicit_user(self, request, c):
request.headers = self.fake_headers
request.remote_addr = 'some ip'
c.user = self.fake_user
self.mollom.check(self.content, artifact=self.artifact)
expected_data = self.expected_data
expected_data.update(authorName=u'Søme User'.encode('utf8'),
authorMail='user@domain')
self.mollom.service.checkContent.assert_called_once_with(
**self.expected_data)
def test_submit_spam(self):
self.mollom.submit_spam('test', artifact=self.artifact)
assert self.mollom.service.sendFeedback.call_args[0] == (
'test_id', 'spam'), self.mollom.service.sendFeedback.call_args[0]
| apache-2.0 | 4,667,967,762,782,171,000 | 41.51 | 79 | 0.630205 | false |
tomsercu/metarunlog | metarunlog/util.py | 1 | 1967 | # Metarunlog, experiment management tool.
# Author: Tom Sercu
# Date: 2015-01-23
import datetime
import subprocess
def nowstring(sec=True, ms= False):
tstr = datetime.datetime.now().isoformat()
if not ms:
tstr = tstr.split('.')[0]
if not sec:
tstr = tstr.rsplit(':',1)[0]
return tstr
def sshify(cmd, sshHost, sshPass, vfh=None):
cleancmd = ''
if sshHost:
#cmd = 'ssh -t {} "{}"'.format(sshHost, cmd) #works but messes up terminal
#cmd = 'ssh {} "shopt -s huponexit; {}"'.format(sshHost, cmd) # doesnt work to kill job on exit
cmd = 'ssh {} "{}"'.format(sshHost, cmd)
#TODO use paramiko or pexpect see http://stackoverflow.com/questions/4669204/send-ctrl-c-to-remote-processes-started-via-subprocess-popen-and-ssh
if sshPass:
cleancmd = "sshpass -p '{}' {}".format('***', cmd)
cmd = "sshpass -p '{}' {}".format(sshPass, cmd)
# printing
if not cleancmd: cleancmd = cmd
if vfh: vfh.write(cleancmd + '\n')
return cmd
def _decode_list(data):
rv = []
for item in data:
if isinstance(item, unicode):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.iteritems():
if isinstance(key, unicode):
key = key.encode('utf-8')
if isinstance(value, unicode):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
def get_commit():
cline = subprocess.check_output("git log -n1 --oneline", shell=True)
#print "cline: ", cline
cline = cline.split()
return (cline[0], " ".join(cline[1:]))
| mit | -735,737,418,120,828,800 | 30.725806 | 153 | 0.582613 | false |
Seedstars/python-iso8583 | ISO8583/ISO8583.py | 1 | 56605 | """
(C) Copyright 2009 Igor V. Custodio
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
__author__ = 'Igor Vitorio Custodio <[email protected]>'
__version__ = '1.3.1'
__licence__ = 'GPL V3'
from ISOErrors import *
import struct
class ISO8583:
"""Main Class to work with ISO8583 packages.
Used to create, change, send, receive, parse or work with ISO8593 Package version 1993.
It's 100% Python :)
Enjoy it!
Thanks to: Vulcanno IT Solutions <http://www.vulcanno.com.br>
Licence: GPL Version 3
More information: http://code.google.com/p/iso8583py/
Example:
from ISO8583.ISO8583 import ISO8583
from ISO8583.ISOErrors import *
iso = ISO8583()
try:
iso.setMTI('0800')
iso.setBit(2,2)
iso.setBit(4,4)
iso.setBit(12,12)
iso.setBit(21,21)
iso.setBit(17,17)
iso.setBit(49,986)
iso.setBit(99,99)
except ValueToLarge, e:
print ('Value too large :( %s' % e)
except InvalidMTI, i:
print ('This MTI is wrong :( %s' % i)
print ('The Message Type Indication is = %s' %iso.getMTI())
print ('The Bitmap is = %s' %iso.getBitmap())
iso.showIsoBits();
print ('This is the ISO8583 complete package %s' % iso.getRawIso())
print ('This is the ISO8583 complete package to sent over the TCPIP network %s' % iso.getNetworkISO())
"""
# Attributes
# Bitsto be set 00000000 -> _BIT_POSITION_1 ... _BIT_POSITION_8
_BIT_POSITION_1 = 128 # 10 00 00 00
_BIT_POSITION_2 = 64 # 01 00 00 00
_BIT_POSITION_3 = 32 # 00 10 00 00
_BIT_POSITION_4 = 16 # 00 01 00 00
_BIT_POSITION_5 = 8 # 00 00 10 00
_BIT_POSITION_6 = 4 # 00 00 01 00
_BIT_POSITION_7 = 2 # 00 00 00 10
_BIT_POSITION_8 = 1 # 00 00 00 01
# Array to translate bit to position
_TMP = [0, _BIT_POSITION_8, _BIT_POSITION_1, _BIT_POSITION_2, _BIT_POSITION_3, _BIT_POSITION_4, _BIT_POSITION_5,
_BIT_POSITION_6, _BIT_POSITION_7]
_BIT_DEFAULT_VALUE = 0
# ISO8583 contants
_BITS_VALUE_TYPE = {}
# Every _BITS_VALUE_TYPE has:
# _BITS_VALUE_TYPE[N] = [ X,Y, Z, W,K]
# N = bitnumber
# X = smallStr representation of the bit meanning
# Y = large str representation
# Z = type of the bit (B, N, A, AN, ANS, LL, LLL)
# W = size of the information that N need to has
# K = type os values a, an, n, ansb, b
_BITS_VALUE_TYPE[1] = ['BME', 'Bit Map Extended', 'B', 16, 'b']
_BITS_VALUE_TYPE[2] = ['2', 'Primary account number (PAN)', 'LL', 19, 'n']
_BITS_VALUE_TYPE[3] = ['3', 'Precessing code', 'N', 6, 'n']
_BITS_VALUE_TYPE[4] = ['4', 'Amount transaction', 'N', 12, 'n']
_BITS_VALUE_TYPE[5] = ['5', 'Amount reconciliation', 'N', 12, 'n']
_BITS_VALUE_TYPE[6] = ['6', 'Amount cardholder billing', 'N', 12, 'n']
_BITS_VALUE_TYPE[7] = ['7', 'Date and time transmission', 'N', 10, 'n']
_BITS_VALUE_TYPE[8] = ['8', 'Amount cardholder billing fee', 'N', 8, 'n']
_BITS_VALUE_TYPE[9] = ['9', 'Conversion rate reconciliation', 'N', 8, 'n']
_BITS_VALUE_TYPE[10] = ['10', 'Conversion rate cardholder billing', 'N', 8, 'n']
_BITS_VALUE_TYPE[11] = ['11', 'Systems trace audit number', 'N', 6, 'n']
_BITS_VALUE_TYPE[12] = ['12', 'Date and time local transaction', 'N', 6, 'n']
_BITS_VALUE_TYPE[13] = ['13', 'Date effective', 'N', 4, 'n']
_BITS_VALUE_TYPE[14] = ['14', 'Date expiration', 'N', 4, 'n']
_BITS_VALUE_TYPE[15] = ['15', 'Date settlement', 'N', 4, 'n']
_BITS_VALUE_TYPE[16] = ['16', 'Date conversion', 'N', 4, 'n']
_BITS_VALUE_TYPE[17] = ['17', 'Date capture', 'N', 4, 'n']
_BITS_VALUE_TYPE[18] = ['18', 'Message error indicator', 'N', 4, 'n']
_BITS_VALUE_TYPE[19] = ['19', 'Country code acquiring institution', 'N', 3, 'n']
_BITS_VALUE_TYPE[20] = ['20', 'Country code primary account number (PAN)', 'N', 3, 'n']
_BITS_VALUE_TYPE[21] = ['21', 'Transaction life cycle identification data', 'ANS', 3, 'n']
_BITS_VALUE_TYPE[22] = ['22', 'Point of service data code', 'N', 3, 'n']
_BITS_VALUE_TYPE[23] = ['23', 'Card sequence number', 'N', 3, 'n']
_BITS_VALUE_TYPE[24] = ['24', 'Function code', 'N', 3, 'n']
_BITS_VALUE_TYPE[25] = ['25', 'Message reason code', 'N', 2, 'n']
_BITS_VALUE_TYPE[26] = ['26', 'Merchant category code', 'N', 2, 'n']
_BITS_VALUE_TYPE[27] = ['27', 'Point of service capability', 'N', 1, 'n']
_BITS_VALUE_TYPE[28] = ['28', 'Date reconciliation', 'N', 8, 'n']
_BITS_VALUE_TYPE[29] = ['29', 'Reconciliation indicator', 'N', 8, 'n']
_BITS_VALUE_TYPE[30] = ['30', 'Amounts original', 'N', 8, 'n']
_BITS_VALUE_TYPE[31] = ['31', 'Acquirer reference number', 'N', 8, 'n']
_BITS_VALUE_TYPE[32] = ['32', 'Acquiring institution identification code', 'LL', 11, 'n']
_BITS_VALUE_TYPE[33] = ['33', 'Forwarding institution identification code', 'LL', 11, 'n']
_BITS_VALUE_TYPE[34] = ['34', 'Electronic commerce data', 'LL', 28, 'n']
_BITS_VALUE_TYPE[35] = ['35', 'Track 2 data', 'LL', 37, 'n']
_BITS_VALUE_TYPE[36] = ['36', 'Track 3 data', 'LLL', 104, 'n']
_BITS_VALUE_TYPE[37] = ['37', 'Retrieval reference number', 'N', 12, 'an']
_BITS_VALUE_TYPE[38] = ['38', 'Approval code', 'N', 6, 'an']
_BITS_VALUE_TYPE[39] = ['39', 'Action code', 'A', 2, 'an']
_BITS_VALUE_TYPE[40] = ['40', 'Service code', 'N', 3, 'an']
_BITS_VALUE_TYPE[41] = ['41', 'Card acceptor terminal identification', 'N', 8, 'ans']
_BITS_VALUE_TYPE[42] = ['42', 'Card acceptor identification code', 'A', 15, 'ans']
_BITS_VALUE_TYPE[43] = ['43', 'Card acceptor name/location', 'A', 40, 'asn']
_BITS_VALUE_TYPE[44] = ['44', 'Additional response data', 'LL', 25, 'an']
_BITS_VALUE_TYPE[45] = ['45', 'Track 1 data', 'LL', 76, 'an']
_BITS_VALUE_TYPE[46] = ['46', 'Amounts fees', 'LLL', 999, 'an']
_BITS_VALUE_TYPE[47] = ['47', 'Additional data national', 'LLL', 999, 'an']
_BITS_VALUE_TYPE[48] = ['48', 'Additional data private', 'LLL', 999, 'an']
_BITS_VALUE_TYPE[49] = ['49', 'Verification data', 'A', 3, 'a']
_BITS_VALUE_TYPE[50] = ['50', 'Currency code, settlement', 'AN', 3, 'an']
_BITS_VALUE_TYPE[51] = ['51', 'Currency code, cardholder billing', 'A', 3, 'a']
_BITS_VALUE_TYPE[52] = ['52', 'Personal identification number (PIN) data', 'B', 16, 'b']
_BITS_VALUE_TYPE[53] = ['53', 'Security related control information', 'LL', 18, 'n']
_BITS_VALUE_TYPE[54] = ['54', 'Amounts additional', 'LLL', 120, 'an']
_BITS_VALUE_TYPE[55] = ['55', 'Integrated circuit card (ICC) system related data', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[56] = ['56', 'Original data elements', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[57] = ['57', 'Authorisation life cycle code', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[58] = ['58', 'Authorising agent institution identification code', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[59] = ['59', 'Transport data', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[60] = ['60', 'Reserved for national use', 'LL', 7, 'ans']
_BITS_VALUE_TYPE[61] = ['61', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[62] = ['62', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[63] = ['63', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[64] = ['64', 'Message authentication code (MAC) field', 'B', 16, 'b']
_BITS_VALUE_TYPE[65] = ['65', 'Bitmap tertiary', 'B', 16, 'b']
_BITS_VALUE_TYPE[66] = ['66', 'Settlement code', 'N', 1, 'n']
_BITS_VALUE_TYPE[67] = ['67', 'Extended payment data', 'N', 2, 'n']
_BITS_VALUE_TYPE[68] = ['68', 'Receiving institution country code', 'N', 3, 'n']
_BITS_VALUE_TYPE[69] = ['69', 'Settlement institution county code', 'N', 3, 'n']
_BITS_VALUE_TYPE[70] = ['70', 'Network management Information code', 'N', 3, 'n']
_BITS_VALUE_TYPE[71] = ['71', 'Message number', 'N', 4, 'n']
_BITS_VALUE_TYPE[72] = ['72', 'Data record', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[73] = ['73', 'Date action', 'N', 6, 'n']
_BITS_VALUE_TYPE[74] = ['74', 'Credits, number', 'N', 10, 'n']
_BITS_VALUE_TYPE[75] = ['75', 'Credits, reversal number', 'N', 10, 'n']
_BITS_VALUE_TYPE[76] = ['76', 'Debits, number', 'N', 10, 'n']
_BITS_VALUE_TYPE[77] = ['77', 'Debits, reversal number', 'N', 10, 'n']
_BITS_VALUE_TYPE[78] = ['78', 'Transfer number', 'N', 10, 'n']
_BITS_VALUE_TYPE[79] = ['79', 'Transfer, reversal number', 'N', 10, 'n']
_BITS_VALUE_TYPE[80] = ['80', 'Inquiries number', 'N', 10, 'n']
_BITS_VALUE_TYPE[81] = ['81', 'Authorizations, number', 'N', 10, 'n']
_BITS_VALUE_TYPE[82] = ['82', 'Credits, processing fee amount', 'N', 12, 'n']
_BITS_VALUE_TYPE[83] = ['83', 'Credits, transaction fee amount', 'N', 12, 'n']
_BITS_VALUE_TYPE[84] = ['84', 'Debits, processing fee amount', 'N', 12, 'n']
_BITS_VALUE_TYPE[85] = ['85', 'Debits, transaction fee amount', 'N', 12, 'n']
_BITS_VALUE_TYPE[86] = ['86', 'Credits, amount', 'N', 15, 'n']
_BITS_VALUE_TYPE[87] = ['87', 'Credits, reversal amount', 'N', 15, 'n']
_BITS_VALUE_TYPE[88] = ['88', 'Debits, amount', 'N', 15, 'n']
_BITS_VALUE_TYPE[89] = ['89', 'Debits, reversal amount', 'N', 15, 'n']
_BITS_VALUE_TYPE[90] = ['90', 'Original data elements', 'N', 42, 'n']
_BITS_VALUE_TYPE[91] = ['91', 'File update code', 'AN', 1, 'an']
_BITS_VALUE_TYPE[92] = ['92', 'File security code', 'N', 2, 'n']
_BITS_VALUE_TYPE[93] = ['93', 'Response indicator', 'N', 5, 'n']
_BITS_VALUE_TYPE[94] = ['94', 'Service indicator', 'AN', 7, 'an']
_BITS_VALUE_TYPE[95] = ['95', 'Replacement amounts', 'AN', 42, 'an']
_BITS_VALUE_TYPE[96] = ['96', 'Message security code', 'AN', 8, 'an']
_BITS_VALUE_TYPE[97] = ['97', 'Amount, net settlement', 'N', 16, 'n']
_BITS_VALUE_TYPE[98] = ['98', 'Payee', 'ANS', 25, 'ans']
_BITS_VALUE_TYPE[99] = ['99', 'Settlement institution identification code', 'LL', 11, 'n']
_BITS_VALUE_TYPE[100] = ['100', 'Receiving institution identification code', 'LL', 11, 'n']
_BITS_VALUE_TYPE[101] = ['101', 'File name', 'ANS', 17, 'ans']
_BITS_VALUE_TYPE[102] = ['102', 'Account identification 1', 'LL', 28, 'ans']
_BITS_VALUE_TYPE[103] = ['103', 'Account identification 2', 'LL', 28, 'ans']
_BITS_VALUE_TYPE[104] = ['104', 'Transaction description', 'LLL', 100, 'ans']
_BITS_VALUE_TYPE[105] = ['105', 'Reserved for ISO use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[106] = ['106', 'Reserved for ISO use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[107] = ['107', 'Reserved for ISO use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[108] = ['108', 'Reserved for ISO use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[109] = ['109', 'Reserved for ISO use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[110] = ['110', 'Reserved for ISO use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[111] = ['111', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[112] = ['112', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[113] = ['113', 'Reserved for private use', 'LL', 11, 'n']
_BITS_VALUE_TYPE[114] = ['114', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[115] = ['115', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[116] = ['116', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[117] = ['117', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[118] = ['118', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[119] = ['119', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[120] = ['120', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[121] = ['121', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[122] = ['122', 'Reserved for national use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[123] = ['123', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[124] = ['124', 'Info Text', 'LLL', 255, 'ans']
_BITS_VALUE_TYPE[125] = ['125', 'Network management information', 'LL', 50, 'ans']
_BITS_VALUE_TYPE[126] = ['126', 'Issuer trace id', 'LL', 6, 'ans']
_BITS_VALUE_TYPE[127] = ['127', 'Reserved for private use', 'LLL', 999, 'ans']
_BITS_VALUE_TYPE[128] = ['128', 'Message authentication code (MAC) field', 'B', 16, 'b']
################################################################################################
# Default constructor of the ISO8583 Object
def __init__(self, iso="", debug=False):
"""Default Constructor of ISO8583 Package.
It inicialize a "brand new" ISO8583 package
Example: To Enable debug you can use:
pack = ISO8583(debug=True)
@param: iso a String that represents the ASCII of the package. The same that you need to pass to setIsoContent() method.
@param: debug (True or False) default False -> Used to print some debug infos. Only use if want that messages!
"""
# Bitmap internal representation
self.BITMAP = []
# Values
self.BITMAP_VALUES = []
# Bitmap ASCII representantion
self.BITMAP_HEX = ''
# MTI
self.MESSAGE_TYPE_INDICATION = '';
# Debug ?
self.DEBUG = debug
self.__inicializeBitmap()
self.__inicializeBitmapValues()
if iso != "":
self.setIsoContent(iso)
################################################################################################
################################################################################################
# Return bit type
def getBitType(self, bit):
"""Method that return the bit Type
@param: bit -> Bit that will be searched and whose type will be returned
@return: str that represents the type of the bit
"""
return self._BITS_VALUE_TYPE[bit][2]
################################################################################################
################################################################################################
# Return bit limit
def getBitLimit(self, bit):
"""Method that return the bit limit (Max size)
@param: bit -> Bit that will be searched and whose limit will be returned
@return: int that indicate the limit of the bit
"""
return self._BITS_VALUE_TYPE[bit][3]
################################################################################################
################################################################################################
# Return bit value type
def getBitValueType(self, bit):
"""Method that return the bit value type
@param: bit -> Bit that will be searched and whose value type will be returned
@return: str that indicate the valuye type of the bit
"""
return self._BITS_VALUE_TYPE[bit][4]
################################################################################################
################################################################################################
# Return large bit name
def getLargeBitName(self, bit):
"""Method that return the large bit name
@param: bit -> Bit that will be searched and whose name will be returned
@return: str that represents the name of the bit
"""
return self._BITS_VALUE_TYPE[bit][1]
################################################################################################
################################################################################################
# Set the MTI
def setTransationType(self, type):
"""Method that set Transation Type (MTI)
@param: type -> MTI to be setted
@raise: ValueToLarge Exception
"""
type = "%s" % type
if len(type) > 4:
type = type[0:3]
raise ValueToLarge('Error: value up to size! MTI limit size = 4')
typeT = "";
if len(type) < 4:
for cont in range(len(type), 4):
typeT += "0"
self.MESSAGE_TYPE_INDICATION = "%s%s" % (typeT, type)
################################################################################################
################################################################################################
# setMTI too
def setMTI(self, type):
"""Method that set Transation Type (MTI)
In fact, is an alias to "setTransationType" method
@param: type -> MTI to be setted
"""
self.setTransationType(type)
################################################################################################
################################################################################################
# Method that put "zeros" inside bitmap
def __inicializeBitmap(self):
"""Method that inicialize/reset a internal bitmap representation
It's a internal method, so don't call!
"""
if self.DEBUG == True:
print('Init bitmap')
if len(self.BITMAP) == 16:
for cont in range(0, 16):
self.BITMAP[cont] = self._BIT_DEFAULT_VALUE
else:
for cont in range(0, 16):
self.BITMAP.append(self._BIT_DEFAULT_VALUE)
################################################################################################
################################################################################################
# init with "0" the array of values
def __inicializeBitmapValues(self):
"""Method that inicialize/reset a internal array used to save bits and values
It's a internal method, so don't call!
"""
if self.DEBUG == True:
print('Init bitmap_values')
if len(self.BITMAP_VALUES) == 128:
for cont in range(0, 129):
self.BITMAP_VALUES[cont] = self._BIT_DEFAULT_VALUE
else:
for cont in range(0, 129):
self.BITMAP_VALUES.append(self._BIT_DEFAULT_VALUE)
################################################################################################
################################################################################################
# Set a value to a bit
def setBit(self, bit, value):
"""Method used to set a bit with a value.
It's one of the most important method to use when using this library
@param: bit -> bit number that want to be setted
@param: value -> the value of the bit
@return: True/False default True -> To be used in the future!
@raise: BitInexistent Exception, ValueToLarge Exception
"""
if self.DEBUG == True:
print('Setting bit inside bitmap bit[%s] = %s') % (bit, value)
if bit < 1 or bit > 128:
raise BitInexistent("Bit number %s dosen't exist!" % bit)
# caculate the position insede bitmap
pos = 1
if self.getBitType(bit) == 'LL':
self.__setBitTypeLL(bit, value)
if self.getBitType(bit) == 'LLL':
self.__setBitTypeLLL(bit, value)
if self.getBitType(bit) == 'N':
self.__setBitTypeN(bit, value)
if self.getBitType(bit) == 'A':
self.__setBitTypeA(bit, value)
if self.getBitType(bit) == 'ANS' or self.getBitType(bit) == 'B':
self.__setBitTypeANS(bit, value)
if self.getBitType(bit) == 'B':
self.__setBitTypeB(bit, value)
# Continuation bit?
if bit > 64:
self.BITMAP[0] = self.BITMAP[0] | self._TMP[2] # need to set bit 1 of first "bit" in bitmap
if (bit % 8) == 0:
pos = (bit / 8) - 1
else:
pos = (bit / 8)
# need to check if the value can be there .. AN , N ... etc ... and the size
self.BITMAP[pos] = self.BITMAP[pos] | self._TMP[(bit % 8) + 1]
return True
################################################################################################
################################################################################################
# print bitmap
def showBitmap(self):
"""Method that print the bitmap in ASCII form
Hint: Try to use getBitmap method and format your own print :)
"""
self.__buildBitmap()
# printing
print(self.BITMAP_HEX)
################################################################################################
################################################################################################
# Build a bitmap
def __buildBitmap(self):
"""Method that build the bitmap ASCII
It's a internal method, so don't call!
"""
self.BITMAP_HEX = ''
for c in range(0, 16):
if (self.BITMAP[0] & self._BIT_POSITION_1) != self._BIT_POSITION_1:
# Only has the first bitmap
if self.DEBUG == True:
print('%d Bitmap = %d(Decimal) = %s (hexa) ' % (c, self.BITMAP[c], hex(self.BITMAP[c])))
tm = hex(self.BITMAP[c])[2:]
if len(tm) != 2:
tm = '0' + tm
self.BITMAP_HEX += tm
if c == 7:
break
else: # second bitmap
if self.DEBUG == True:
print('%d Bitmap = %d(Decimal) = %s (hexa) ' % (c, self.BITMAP[c], hex(self.BITMAP[c])))
tm = hex(self.BITMAP[c])[2:]
if len(tm) != 2:
tm = '0' + tm
self.BITMAP_HEX += tm
################################################################################################
################################################################################################
# Get a bitmap from str
def __getBitmapFromStr(self, bitmap):
"""Method that receive a bitmap str and transfor it to ISO8583 object readable.
@param: bitmap -> bitmap str to be readable
It's a internal method, so don't call!
"""
# Need to check if the size is correct etc...
cont = 0
if self.BITMAP_HEX != '':
self.BITMAP_HEX = ''
for x in range(0, 32, 2):
if (int(bitmap[0:2], 16) & self._BIT_POSITION_1) != self._BIT_POSITION_1: # Only 1 bitmap
if self.DEBUG == True:
print('Token[%d] %s converted to int is = %s' % (x, bitmap[x:x + 2], int(bitmap[x:x + 2], 16)))
self.BITMAP_HEX += bitmap[x:x + 2]
self.BITMAP[cont] = int(bitmap[x:x + 2], 16)
if x == 14:
break
else: # Second bitmap
if self.DEBUG == True:
print('Token[%d] %s converted to int is = %s' % (x, bitmap[x:x + 2], int(bitmap[x:x + 2], 16)))
self.BITMAP_HEX += bitmap[x:x + 2]
self.BITMAP[cont] = int(bitmap[x:x + 2], 16)
cont += 1
################################################################################################
################################################################################################
# print bit array that is present in the bitmap
def showBitsFromBitmapStr(self, bitmap):
"""Method that receive a bitmap str, process it, and print a array with bits this bitmap string represents.
Usualy is used to debug things.
@param: bitmap -> bitmap str to be analized and translated to "bits"
"""
bits = self.__inicializeBitsFromBitmapStr(bitmap)
print('Bits inside %s = %s' % (bitmap, bits))
################################################################################################
################################################################################################
# inicialize a bitmap using ASCII str
def __inicializeBitsFromBitmapStr(self, bitmap):
"""Method that receive a bitmap str, process it, and prepare ISO8583 object to understand and "see" the bits and values inside the ISO ASCII package.
It's a internal method, so don't call!
@param: bitmap -> bitmap str to be analized and translated to "bits"
"""
bits = []
for c in range(0, 16):
for d in range(1, 9):
if self.DEBUG == True:
print('Value (%d)-> %s & %s = %s' % (
d, self.BITMAP[c], self._TMP[d], (self.BITMAP[c] & self._TMP[d])))
if (self.BITMAP[c] & self._TMP[d]) == self._TMP[d]:
if d == 1: # e o 8 bit
if self.DEBUG == True:
print('Bit %s is present !!!' % ((c + 1) * 8))
bits.append((c + 1) * 8)
self.BITMAP_VALUES[(c + 1) * 8] = 'X'
else:
if (c == 0) & (d == 2): # Continuation bit
if self.DEBUG == True:
print('Bit 1 is present !!!')
bits.append(1)
else:
if self.DEBUG == True:
print('Bit %s is present !!!' % (c * 8 + d - 1))
bits.append(c * 8 + d - 1)
self.BITMAP_VALUES[c * 8 + d - 1] = 'X'
bits.sort()
return bits
################################################################################################
################################################################################################
# return a array of bits, when processing the bitmap
def __getBitsFromBitmap(self):
"""Method that process the bitmap and return a array with the bits presents inside it.
It's a internal method, so don't call!
"""
bits = []
for c in range(0, 16):
for d in range(1, 9):
if self.DEBUG == True:
print('Value (%d)-> %s & %s = %s' % (
d, self.BITMAP[c], self._TMP[d], (self.BITMAP[c] & self._TMP[d])))
if (self.BITMAP[c] & self._TMP[d]) == self._TMP[d]:
if d == 1: # e o 8 bit
if self.DEBUG == True:
print('Bit %s is present !!!' % ((c + 1) * 8))
bits.append((c + 1) * 8)
else:
if (c == 0) & (d == 2): # Continuation bit
if self.DEBUG == True:
print('Bit 1 is present !!!')
bits.append(1)
else:
if self.DEBUG == True:
print('Bit %s is present !!!' % (c * 8 + d - 1))
bits.append(c * 8 + d - 1)
bits.sort()
return bits
################################################################################################
################################################################################################
# Set of type LL
def __setBitTypeLL(self, bit, value):
"""Method that set a bit with value in form LL
It put the size in front of the value
Example: pack.setBit(99,'123') -> Bit 99 is a LL type, so this bit, in ASCII form need to be 03123. To understand, 03 is the size of the information and 123 is the information/value
@param: bit -> bit to be setted
@param: value -> value to be setted
@raise: ValueToLarge Exception
It's a internal method, so don't call!
"""
value = "%s" % value
if len(value) > 99:
# value = value[0:99]
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
if len(value) > self.getBitLimit(bit):
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
size = "%s" % len(value)
self.BITMAP_VALUES[bit] = "%s%s" % (size.zfill(2), value)
################################################################################################
################################################################################################
# Set of type LLL
def __setBitTypeLLL(self, bit, value):
"""Method that set a bit with value in form LLL
It put the size in front of the value
Example: pack.setBit(104,'12345ABCD67890') -> Bit 104 is a LLL type, so this bit, in ASCII form need to be 01412345ABCD67890.
To understand, 014 is the size of the information and 12345ABCD67890 is the information/value
@param: bit -> bit to be setted
@param: value -> value to be setted
@raise: ValueToLarge Exception
It's a internal method, so don't call!
"""
value = "%s" % value
if len(value) > 999:
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
if len(value) > self.getBitLimit(bit):
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
size = "%s" % len(value)
self.BITMAP_VALUES[bit] = "%s%s" % (size.zfill(3), value)
################################################################################################
################################################################################################
# Set of type N,
def __setBitTypeN(self, bit, value):
"""Method that set a bit with value in form N
It complete the size of the bit with a default value
Example: pack.setBit(3,'30000') -> Bit 3 is a N type, so this bit, in ASCII form need to has size = 6 (ISO especification) so the value 30000 size = 5 need to receive more "1" number.
In this case, will be "0" in the left. In the package, the bit will be sent like '030000'
@param: bit -> bit to be setted
@param: value -> value to be setted
@raise: ValueToLarge Exception
It's a internal method, so don't call!
"""
value = "%s" % value
if len(value) > self.getBitLimit(bit):
value = value[0:self.getBitLimit(bit)]
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
self.BITMAP_VALUES[bit] = value.zfill(self.getBitLimit(bit))
################################################################################################
################################################################################################
# Set of type A
def __setBitTypeA(self, bit, value):
"""Method that set a bit with value in form A
It complete the size of the bit with a default value
Example: pack.setBit(3,'30000') -> Bit 3 is a A type, so this bit, in ASCII form need to has size = 6 (ISO especification) so the value 30000 size = 5 need to receive more "1" number.
In this case, will be "0" in the left. In the package, the bit will be sent like '030000'
@param: bit -> bit to be setted
@param: value -> value to be setted
@raise: ValueToLarge Exception
It's a internal method, so don't call!
"""
value = "%s" % value
if len(value) > self.getBitLimit(bit):
value = value[0:self.getBitLimit(bit)]
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
self.BITMAP_VALUES[bit] = value.zfill(self.getBitLimit(bit))
################################################################################################
################################################################################################
# Set of type B
def __setBitTypeB(self, bit, value):
"""Method that set a bit with value in form B
It complete the size of the bit with a default value
Example: pack.setBit(3,'30000') -> Bit 3 is a B type, so this bit, in ASCII form need to has size = 6 (ISO especification) so the value 30000 size = 5 need to receive more "1" number.
In this case, will be "0" in the left. In the package, the bit will be sent like '030000'
@param: bit -> bit to be setted
@param: value -> value to be setted
@raise: ValueToLarge Exception
It's a internal method, so don't call!
"""
value = "%s" % value
if len(value) > self.getBitLimit(bit):
value = value[0:self.getBitLimit(bit)]
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
self.BITMAP_VALUES[bit] = value.zfill(self.getBitLimit(bit))
################################################################################################
################################################################################################
# Set of type ANS
def __setBitTypeANS(self, bit, value):
"""Method that set a bit with value in form ANS
It complete the size of the bit with a default value
Example: pack.setBit(3,'30000') -> Bit 3 is a ANS type, so this bit, in ASCII form need to has size = 6 (ISO especification) so the value 30000 size = 5 need to receive more "1" number.
In this case, will be "0" in the left. In the package, the bit will be sent like '030000'
@param: bit -> bit to be setted
@param: value -> value to be setted
@raise: ValueToLarge Exception
It's a internal method, so don't call!
"""
value = "%s" % value
if len(value) > self.getBitLimit(bit):
value = value[0:self.getBitLimit(bit)]
raise ValueToLarge('Error: value up to size! Bit[%s] of type %s limit size = %s' % (
bit, self.getBitType(bit), self.getBitLimit(bit)))
self.BITMAP_VALUES[bit] = value.zfill(self.getBitLimit(bit))
################################################################################################
################################################################################################
# print os bits insede iso
def showIsoBits(self):
"""Method that show in detail a list of bits , values and types inside the object
Example: output to
(...)
iso.setBit(2,2)
iso.setBit(4,4)
(...)
iso.showIsoBits()
(...)
Bit[2] of type LL has limit 19 = 012
Bit[4] of type N has limit 12 = 000000000004
(...)
"""
for cont in range(0, 129):
if self.BITMAP_VALUES[cont] != self._BIT_DEFAULT_VALUE:
print("Bit[%s] of type %s has limit %s = %s" % (
cont, self.getBitType(cont), self.getBitLimit(cont), self.BITMAP_VALUES[cont]))
################################################################################################
################################################################################################
# print Raw iso
def showRawIso(self):
"""Method that print ISO8583 ASCII complete representation
Example:
iso = ISO8583()
iso.setMTI('0800')
iso.setBit(2,2)
iso.setBit(4,4)
iso.setBit(12,12)
iso.setBit(17,17)
iso.setBit(99,99)
iso.showRawIso()
output (print) -> 0800d010800000000000000000002000000001200000000000400001200170299
Hint: Try to use getRawIso method and format your own print :)
"""
resp = self.getRawIso()
print(resp)
################################################################################################
################################################################################################
# Return raw iso
def getRawIso(self):
"""Method that return ISO8583 ASCII complete representation
Example:
iso = ISO8583()
iso.setMTI('0800')
iso.setBit(2,2)
iso.setBit(4,4)
iso.setBit(12,12)
iso.setBit(17,17)
iso.setBit(99,99)
str = iso.getRawIso()
print ('This is the ASCII package %s' % str)
output (print) -> This is the ASCII package 0800d010800000000000000000002000000001200000000000400001200170299
@return: str with complete ASCII ISO8583
@raise: InvalidMTI Exception
"""
self.__buildBitmap()
if self.MESSAGE_TYPE_INDICATION == '':
raise InvalidMTI('Check MTI! Do you set it?')
resp = "";
resp += self.MESSAGE_TYPE_INDICATION
resp += self.BITMAP_HEX
for cont in range(0, 129):
if self.BITMAP_VALUES[cont] != self._BIT_DEFAULT_VALUE:
resp = "%s%s" % (resp, self.BITMAP_VALUES[cont])
return resp
################################################################################################
################################################################################################
# Redefine a bit
def redefineBit(self, bit, smallStr, largeStr, bitType, size, valueType):
"""Method that redefine a bit structure in global scope!
Can be used to personalize ISO8583 structure to another specification (ISO8583 1987 for example!)
Hint: If you have a lot of "ValueToLarge Exception" maybe the especification that you are using is different of mine. So you will need to use this method :)
@param: bit -> bit to be redefined
@param: smallStr -> a small String representantion of the bit, used to build "user friendly prints", example "2" for bit 2
@param: largeStr -> a large String representantion of the bit, used to build "user friendly prints" and to be used to inform the "main use of the bit",
example "Primary account number (PAN)" for bit 2
@param: bitType -> type the bit, used to build the values, example "LL" for bit 2. Need to be one of (B, N, AN, ANS, LL, LLL)
@param: size -> limit size the bit, used to build/complete the values, example "19" for bit 2.
@param: valueType -> value type the bit, used to "validate" the values, example "n" for bit 2. This mean that in bit 2 we need to have only numeric values.
Need to be one of (a, an, n, ansb, b)
@raise: BitInexistent Exception, InvalidValueType Exception
"""
if self.DEBUG == True:
print('Trying to redefine the bit with (self,%s,%s,%s,%s,%s,%s)' % (
bit, smallStr, largeStr, bitType, size, valueType))
# validating bit position
if bit == 1 or bit == 64 or bit < 0 or bit > 128:
raise BitInexistent("Error %d cannot be changed because has a invalid number!" % bit)
# need to validate if the type and size is compatible! example slimit = 100 and type = LL
if bitType == "B" or bitType == "N" or bitType == "AN" or bitType == "ANS" or bitType == "LL" or bitType == "LLL":
if valueType == "a" or valueType == "n" or valueType == "ansb" or valueType == "ans" or valueType == "b" or valueType == "an":
self._BITS_VALUE_TYPE[bit] = [smallStr, largeStr, bitType, size, valueType]
if self.DEBUG == True:
print('Bit %d redefined!' % bit)
else:
raise InvalidValueType(
"Error bit %d cannot be changed because %s is not a valid valueType (a, an, n ansb, b)!" % (
bit, valueType))
# return
else:
raise InvalidBitType(
"Error bit %d cannot be changed because %s is not a valid bitType (Hex, N, AN, ANS, LL, LLL)!" % (
bit, bitType))
# return
################################################################################################
################################################################################################
# a partir de um trem de string, pega o MTI
def __setMTIFromStr(self, iso):
"""Method that get the first 4 characters to be the MTI.
It's a internal method, so don't call!
"""
self.MESSAGE_TYPE_INDICATION = iso[0:4]
if self.DEBUG == True:
print('MTI found was %s' % self.MESSAGE_TYPE_INDICATION)
################################################################################################
################################################################################################
# return the MTI
def getMTI(self):
"""Method that return the MTI of the package
@return: str -> with the MTI
"""
# Need to validate if the MTI was setted ...etc ...
return self.MESSAGE_TYPE_INDICATION
################################################################################################
################################################################################################
# Return the bitmap
def getBitmap(self):
"""Method that return the ASCII Bitmap of the package
@return: str -> with the ASCII Bitmap
"""
if self.BITMAP_HEX == '':
self.__buildBitmap()
return self.BITMAP_HEX
################################################################################################
################################################################################################
# return the Varray of values
def getValuesArray(self):
"""Method that return an internal array of the package
@return: array -> with all bits, presents or not in the bitmap
"""
return self.BITMAP_VALUES
################################################################################################
################################################################################################
# Receive a str and interpret it to bits and values
def __getBitFromStr(self, strWithoutMtiBitmap):
"""Method that receive a string (ASCII) without MTI and Bitmaps (first and second), understand it and remove the bits values
@param: str -> with all bits presents whithout MTI and bitmap
It's a internal method, so don't call!
"""
if self.DEBUG == True:
print('This is the input string <%s>' % strWithoutMtiBitmap)
offset = 0;
# jump bit 1 because it was alread defined in the "__inicializeBitsFromBitmapStr"
for cont in range(2, 129):
if self.BITMAP_VALUES[cont] != self._BIT_DEFAULT_VALUE:
if self.DEBUG == True:
print('String = %s offset = %s bit = %s' % (strWithoutMtiBitmap[offset:], offset, cont))
if self.getBitType(cont) == 'LL':
valueSize = int(strWithoutMtiBitmap[offset:offset + 2])
if self.DEBUG == True:
print('Size of the message in LL = %s' % valueSize)
if valueSize > self.getBitLimit(cont):
print('This bit is larger thant the specification.')
# raise ValueToLarge("This bit is larger than the especification!")
self.BITMAP_VALUES[cont] = strWithoutMtiBitmap[offset:offset + 2] + strWithoutMtiBitmap[
offset + 2:offset + 2 + valueSize]
if self.DEBUG == True:
print('\tSetting bit %s value %s' % (cont, self.BITMAP_VALUES[cont]))
# fix for AppZone - their responses don't comply with specifications
if cont == 33:
offset += valueSize + 2 # replace with 17 if it fails
else:
offset += valueSize + 2
if self.getBitType(cont) == 'LLL':
valueSize = int(strWithoutMtiBitmap[offset:offset + 3])
if self.DEBUG == True:
print('Size of the message in LLL = %s' % valueSize)
if valueSize > self.getBitLimit(cont):
raise ValueToLarge("This bit is larger than the especification!")
self.BITMAP_VALUES[cont] = strWithoutMtiBitmap[offset:offset + 3] + strWithoutMtiBitmap[
offset + 3:offset + 3 + valueSize]
if self.DEBUG == True:
print('\tSetting bit %s value %s' % (cont, self.BITMAP_VALUES[cont]))
offset += valueSize + 3
# if self.getBitType(cont) == 'LLLL':
# valueSize = int(strWithoutMtiBitmap[offset:offset +4])
# if valueSize > self.getBitLimit(cont):
# raise ValueToLarge("This bit is larger than the especification!")
# self.BITMAP_VALUES[cont] = '(' + strWithoutMtiBitmap[offset:offset+4] + ')' + strWithoutMtiBitmap[offset+4:offset+4+valueSize]
# offset += valueSize + 4
if self.getBitType(cont) == 'N' or self.getBitType(cont) == 'A' or self.getBitType(
cont) == 'ANS' or self.getBitType(cont) == 'B' or self.getBitType(cont) == 'AN':
self.BITMAP_VALUES[cont] = strWithoutMtiBitmap[offset:self.getBitLimit(cont) + offset]
if self.DEBUG == True:
print('\tSetting bit %s value %s' % (cont, self.BITMAP_VALUES[cont]))
offset += self.getBitLimit(cont)
################################################################################################
################################################################################################
# Parse a ASCII iso to object
def setIsoContent(self, iso):
"""Method that receive a complete ISO8583 string (ASCII) understand it and remove the bits values
Example:
iso = '0210B238000102C080040000000000000002100000000000001700010814465469421614465701081100301000000N399915444303500019991544986020 Value not allowed009000095492'
i2 = ISO8583()
# in this case, we need to redefine a bit because default bit 42 is LL and in this especification is "N"
# the rest remain, so we use "get" :)
i2.redefineBit(42, '42', i2.getLargeBitName(42), 'N', i2.getBitLimit(42), i2.getBitValueType(42) )
i2.setIsoContent(iso2)
print ('Bitmap = %s' %i2.getBitmap())
print ('MTI = %s' %i2.getMTI() )
print ('This ISO has bits:')
v3 = i2.getBitsAndValues()
for v in v3:
print ('Bit %s of type %s with value = %s' % (v['bit'],v['type'],v['value']))
@param: str -> complete ISO8583 string
@raise: InvalidIso8583 Exception
"""
if len(iso) < 20:
raise InvalidIso8583('This is not a valid iso!!')
if self.DEBUG == True:
print('ASCII to process <%s>' % iso)
self.__setMTIFromStr(iso)
isoT = iso[4:]
self.__getBitmapFromStr(isoT)
self.__inicializeBitsFromBitmapStr(self.BITMAP_HEX)
if self.DEBUG == True:
print('This is the array of bits (before) %s ' % self.BITMAP_VALUES)
self.__getBitFromStr(iso[4 + len(self.BITMAP_HEX):])
if self.DEBUG == True:
print('This is the array of bits (after) %s ' % self.BITMAP_VALUES)
################################################################################################
################################################################################################
# Method that compare 2 isos
def __cmp__(self, obj2):
"""Method that compare two objects in "==", "!=" and other things
Example:
p1 = ISO8583()
p1.setMTI('0800')
p1.setBit(2,2)
p1.setBit(4,4)
p1.setBit(12,12)
p1.setBit(17,17)
p1.setBit(99,99)
#get the rawIso and save in the iso variable
iso = p1.getRawIso()
p2 = ISO8583()
p2.setIsoContent(iso)
print ('Is equivalent?')
if p1 == p1:
print ('Yes :)')
else:
print ('Noooooooooo :(')
@param: obj2 -> object that will be compared
@return: <0 if is not equal, 0 if is equal
"""
ret = -1 # By default is different
if (self.getMTI() == obj2.getMTI()) and (self.getBitmap() == obj2.getBitmap()) and (
self.getValuesArray() == obj2.getValuesArray()):
ret = 0
return ret
################################################################################################
################################################################################################
# Method that return a array with bits and values inside the iso package
def getBitsAndValues(self):
"""Method that return an array of bits, values, types etc.
Each array value is a dictionary with: {'bit':X ,'type': Y, 'value': Z} Where:
bit: is the bit number
type: is the bit type
value: is the bit value inside this object
so the Generic array returned is: [ (...),{'bit':X,'type': Y, 'value': Z}, (...)]
Example:
p1 = ISO8583()
p1.setMTI('0800')
p1.setBit(2,2)
p1.setBit(4,4)
p1.setBit(12,12)
p1.setBit(17,17)
p1.setBit(99,99)
v1 = p1.getBitsAndValues()
for v in v1:
print ('Bit %s of type %s with value = %s' % (v['bit'],v['type'],v['value']))
@return: array of values.
"""
ret = []
for cont in range(2, 129):
if self.BITMAP_VALUES[cont] != self._BIT_DEFAULT_VALUE:
_TMP = {}
_TMP['bit'] = "%d" % cont
_TMP['type'] = self.getBitType(cont)
_TMP['value'] = self.BITMAP_VALUES[cont]
ret.append(_TMP)
return ret
################################################################################################
################################################################################################
# Method that return a array with bits and values inside the iso package
def getBit(self, bit):
"""Return the value of the bit
@param: bit -> the number of the bit that you want the value
@raise: BitInexistent Exception, BitNotSet Exception
"""
if bit < 1 or bit > 128:
raise BitInexistent("Bit number %s dosen't exist!" % bit)
# Is that bit set?
isThere = False
arr = self.__getBitsFromBitmap()
if self.DEBUG == True:
print('This is the array of bits inside the bitmap %s' % arr)
for v in arr:
if v == bit:
value = self.BITMAP_VALUES[bit]
isThere = True
break
if isThere:
return value
else:
raise BitNotSet("Bit number %s was not set!" % bit)
################################################################################################
################################################################################################
# Method that return ISO8583 to TCPIP network form, with the size in the beginning.
def getNetworkISO(self, bigEndian=True):
"""Method that return ISO8583 ASCII package with the size in the beginning
By default, it return the package with size represented with big-endian.
Is the same that:
import struct
(...)
iso = ISO8583()
iso.setBit(3,'300000')
(...)
ascii = iso.getRawIso()
# Example: big-endian
# To little-endian, replace '!h' with '<h'
netIso = struct.pack('!h',len(iso))
netIso += ascii
# Example: big-endian
# To little-endian, replace 'iso.getNetworkISO()' with 'iso.getNetworkISO(False)'
print ('This <%s> the same that <%s>' % (iso.getNetworkISO(),netIso))
@param: bigEndian (True|False) -> if you want that the size be represented in this way.
@return: size + ASCII ISO8583 package ready to go to the network!
@raise: InvalidMTI Exception
"""
netIso = ""
asciiIso = self.getRawIso()
if bigEndian:
netIso = struct.pack('!h', len(asciiIso))
if self.DEBUG == True:
print('Pack Big-endian')
else:
netIso = struct.pack('<h', len(asciiIso))
if self.DEBUG == True:
print('Pack Little-endian')
netIso += asciiIso
return netIso
################################################################################################
################################################################################################
# Method that recieve a ISO8583 ASCII package in the network form and parse it.
def setNetworkISO(self, iso, bigEndian=True):
"""Method that receive sie + ASCII ISO8583 package and transfor it in the ISO8583 object.
By default, it recieve the package with size represented with big-endian.
Is the same that:
import struct
(...)
iso = ISO8583()
iso.setBit(3,'300000')
(...)
# Example: big-endian
# To little-endian, replace 'iso.getNetworkISO()' with 'iso.getNetworkISO(False)'
netIso = iso.getNetworkISO()
newIso = ISO8583()
# Example: big-endian
# To little-endian, replace 'newIso.setNetworkISO()' with 'newIso.setNetworkISO(False)'
newIso.setNetworkISO(netIso)
#Is the same that:
#size = netIso[0:2]
## To little-endian, replace '!h' with '<h'
#size = struct.unpack('!h',size )
#newIso.setIsoContent(netIso[2:size])
arr = newIso.getBitsAndValues()
for v in arr:
print ('Bit %s Type %s Value = %s' % (v['bit'],v['type'],v['value']))
@param: iso -> str that represents size + ASCII ISO8583 package
@param: bigEndian (True|False) -> Codification of the size.
@raise: InvalidIso8583 Exception
"""
if len(iso) < 24:
raise InvalidIso8583('This is not a valid iso!!Invalid Size')
size = iso[0:2]
if bigEndian:
size = struct.unpack('!h', size)
if self.DEBUG == True:
print('Unpack Big-endian')
else:
size = struct.unpack('<h', size)
if self.DEBUG == True:
print('Unpack Little-endian')
if len(iso) != (size[0] + 2):
raise InvalidIso8583(
'This is not a valid iso!!The ISO8583 ASCII(%s) is less than the size %s!' % (len(iso[2:]), size[0]))
self.setIsoContent(iso[2:])
################################################################################################
| gpl-3.0 | -8,084,716,628,131,540,000 | 45.057771 | 193 | 0.478809 | false |
python-openxml/python-docx | docs/conf.py | 1 | 10904 | # -*- coding: utf-8 -*-
#
# python-docx documentation build configuration file, created by
# sphinx-quickstart on Sat Jun 29 17:34:36 2013.
#
# This file is execfile()d with the current directory set to its containing
# dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
sys.path.insert(0, os.path.abspath('..'))
from docx import __version__ # noqa
# -- General configuration ---------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
# needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.intersphinx',
'sphinx.ext.todo',
'sphinx.ext.coverage',
'sphinx.ext.viewcode'
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
# source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'python-docx'
copyright = u'2013, Steve Canny'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = __version__
# The full version, including alpha/beta/rc tags.
release = __version__
# A string of reStructuredText that will be included at the end of every source
# file that is read. This is the right place to add substitutions that should
# be available in every file.
rst_epilog = """
.. |api-Document| replace:: :class:`docx.api.Document`
.. |AttributeError| replace:: :exc:`.AttributeError`
.. |BaseStyle| replace:: :class:`.BaseStyle`
.. |BlockItemContainer| replace:: :class:`.BlockItemContainer`
.. |_Body| replace:: :class:`._Body`
.. |_Cell| replace:: :class:`._Cell`
.. |_CharacterStyle| replace:: :class:`._CharacterStyle`
.. |Cm| replace:: :class:`.Cm`
.. |ColorFormat| replace:: :class:`.ColorFormat`
.. |_Column| replace:: :class:`._Column`
.. |_Columns| replace:: :class:`._Columns`
.. |CoreProperties| replace:: :class:`.CoreProperties`
.. |datetime| replace:: :class:`.datetime.datetime`
.. |Document| replace:: :class:`.Document`
.. |DocumentPart| replace:: :class:`.DocumentPart`
.. |docx| replace:: ``python-docx``
.. |Emu| replace:: :class:`.Emu`
.. |False| replace:: :class:`False`
.. |float| replace:: :class:`.float`
.. |Font| replace:: :class:`.Font`
.. |_Footer| replace:: :class:`._Footer`
.. |FooterPart| replace:: :class:`.FooterPart`
.. |_Header| replace:: :class:`._Header`
.. |HeaderPart| replace:: :class:`.HeaderPart`
.. |ImageParts| replace:: :class:`.ImageParts`
.. |Inches| replace:: :class:`.Inches`
.. |InlineShape| replace:: :class:`.InlineShape`
.. |InlineShapes| replace:: :class:`.InlineShapes`
.. |InvalidSpanError| replace:: :class:`.InvalidSpanError`
.. |int| replace:: :class:`.int`
.. |_LatentStyle| replace:: :class:`._LatentStyle`
.. |LatentStyles| replace:: :class:`.LatentStyles`
.. |Length| replace:: :class:`.Length`
.. |None| replace:: :class:`.None`
.. |NumberingPart| replace:: :class:`.NumberingPart`
.. |_NumberingStyle| replace:: :class:`._NumberingStyle`
.. |OpcPackage| replace:: :class:`.OpcPackage`
.. |Paragraph| replace:: :class:`.Paragraph`
.. |ParagraphFormat| replace:: :class:`.ParagraphFormat`
.. |_ParagraphStyle| replace:: :class:`._ParagraphStyle`
.. |Part| replace:: :class:`.Part`
.. |Pt| replace:: :class:`.Pt`
.. |_Relationship| replace:: :class:`._Relationship`
.. |Relationships| replace:: :class:`._Relationships`
.. |RGBColor| replace:: :class:`.RGBColor`
.. |_Row| replace:: :class:`._Row`
.. |_Rows| replace:: :class:`._Rows`
.. |Run| replace:: :class:`.Run`
.. |Section| replace:: :class:`.Section`
.. |Sections| replace:: :class:`.Sections`
.. |Settings| replace:: :class:`.Settings`
.. |str| replace:: :class:`.str`
.. |Styles| replace:: :class:`.Styles`
.. |StylesPart| replace:: :class:`.StylesPart`
.. |Table| replace:: :class:`.Table`
.. |_TableStyle| replace:: :class:`._TableStyle`
.. |TabStop| replace:: :class:`.TabStop`
.. |TabStops| replace:: :class:`.TabStops`
.. |_Text| replace:: :class:`._Text`
.. |True| replace:: :class:`True`
.. |ValueError| replace:: :class:`ValueError`
"""
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['.build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
# default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
# add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
# add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
# show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
# modindex_common_prefix = []
# -- Options for HTML output ------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'armstrong'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
# html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
html_theme_path = ['_themes']
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
# html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
# html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
# html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
# html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
# html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
# html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
# html_sidebars = {}
html_sidebars = {
'**': ['localtoc.html', 'relations.html', 'sidebarlinks.html',
'searchbox.html']
}
# Additional templates that should be rendered to pages, maps page names to
# template names.
# html_additional_pages = {}
# If false, no module index is generated.
# html_domain_indices = True
# If false, no index is generated.
# html_use_index = True
# If true, the index is split into individual pages for each letter.
# html_split_index = False
# If true, links to the reST sources are added to the pages.
# html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
# html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
# html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
# html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
# html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-docxdoc'
# -- Options for LaTeX output -----------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
# 'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
# 'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
# 'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file,
# target name,
# title,
# author,
# documentclass [howto/manual]).
latex_documents = [
('index', 'python-docx.tex', u'python-docx Documentation',
u'Steve Canny', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
# latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
# latex_use_parts = False
# If true, show page references after internal links.
# latex_show_pagerefs = False
# If true, show URL addresses after external links.
# latex_show_urls = False
# Documents to append as an appendix to all manuals.
# latex_appendices = []
# If false, no module index is generated.
# latex_domain_indices = True
# -- Options for manual page output -----------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'python-docx', u'python-docx Documentation',
[u'Steve Canny'], 1)
]
# If true, show URL addresses after external links.
# man_show_urls = False
# -- Options for Texinfo output ---------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'python-docx', u'python-docx Documentation',
u'Steve Canny', 'python-docx', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
# texinfo_appendices = []
# If false, no module index is generated.
# texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
# texinfo_show_urls = 'footnote'
# Example configuration for intersphinx: refer to the Python standard library.
intersphinx_mapping = {'http://docs.python.org/3/': None}
| mit | -3,895,910,832,250,938,000 | 27.395833 | 79 | 0.680026 | false |
internap/arsenal | cellar/tests/adapters/test_memory_datastore.py | 1 | 1399 | # -*- coding: utf-8 -*-
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from cellar import adapters
from cellar.adapters.memory_datastore import MemoryDatastore
from cellar.core.resource import Resource
from cellar.core.resource_type import ResourceType
from oslotest import base
class TestManager(base.BaseTestCase):
def setUp(self):
super().setUp()
self.datastore = MemoryDatastore()
def test_save_and_load_a_resource(self):
resource = Resource("uuid", resource_type=ResourceType('pdu'), attributes={'ironic_driver': 'test'})
self.datastore.save(resource)
self.assertEqual(resource, self.datastore.load("uuid"))
self.assertEqual([resource], self.datastore.load_all())
def test_resource_not_found(self):
self.assertRaises(adapters.ResourceNotFound,
self.datastore.load, 'something that doesnt exist')
| apache-2.0 | -9,220,681,339,651,441,000 | 38.971429 | 108 | 0.723374 | false |
jim-easterbrook/pyctools | src/pyctools/components/photo/unsharpmask.py | 1 | 4337 | # Pyctools - a picture processing algorithm development kit.
# http://github.com/jim-easterbrook/pyctools
# Copyright (C) 2019-20 Pyctools contributors
#
# This program is free software: you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see
# <http://www.gnu.org/licenses/>.
__all__ = ['UnsharpMask']
__docformat__ = 'restructuredtext en'
import cv2
import numpy
from pyctools.components.interp.gaussianfilter import GaussianFilter
from pyctools.components.interp.resizecore import resize_frame
from pyctools.core.config import ConfigBool, ConfigFloat
from pyctools.core.base import Transformer
from pyctools.core.types import pt_float
class UnsharpMask(Transformer):
"""Enhance image detail using an unsharp mask.
The `unsharp mask`_ is computed by subtracting a `Gaussian blurred`_
image from the original image. Low amplitude detail can be removed
before the mask is added back to the image to sharpen it. This can
reduce the increase in noise when lots of sharpening is applied.
The ``amount`` parameter specifies how much sharpening to apply. It
is a real number rather than the percentage used in some software.
The ``radius`` parameter sets the standard deviation of the Gaussian
blurring filter.
To avoid discontinuities in the mask the ``threshold`` is used in a
"coring" function. Detail lower than the threshold is ignored,
detail above the threshold is reduced by the threshold value.
Another option to reduce noise is ``denoise``. This uses a 5x5
median filter as part of the mask computation.
Note that this component can also be used to soften an image using a
Gausssian filter. Set ``amount = -1``, ``threshold = 0``, and
``denoise = False``.
============= ===== ====
Config
============= ===== ====
``amount`` float Amount of sharpening to apply.
``radius`` float Size of blurring function.
``threshold`` float Don't sharpen low amplitude detail.
``denoise`` bool Median filter the detail to suppress noise.
============= ===== ====
.. _Gaussian blurred: https://en.wikipedia.org/wiki/Gaussian_blur
.. _unsharp mask: https://en.wikipedia.org/wiki/Unsharp_masking
"""
def initialise(self):
self.config['amount'] = ConfigFloat(value=1.0, decimals=2)
self.config['radius'] = ConfigFloat(value=2.0, decimals=1)
self.config['threshold'] = ConfigFloat(value=0.0, decimals=1)
self.config['denoise'] = ConfigBool()
def transform(self, in_frame, out_frame):
self.update_config()
amount = self.config['amount']
radius = self.config['radius']
threshold = self.config['threshold']
denoise = self.config['denoise']
data = in_frame.as_numpy(dtype=pt_float)
# median filter image before computing mask
if denoise:
mask = cv2.medianBlur(data, 5)
else:
mask = data
# blur data with Gaussian and subtract to make mask
h_filter = GaussianFilter.core(x_sigma=radius).as_numpy(dtype=pt_float)
v_filter = GaussianFilter.core(y_sigma=radius).as_numpy(dtype=pt_float)
mask = mask - resize_frame(resize_frame(
mask, h_filter, 1, 1, 1, 1), v_filter, 1, 1, 1, 1)
# core out mask values below threshold
if threshold > 0.0:
mask_p = mask - pt_float(threshold)
mask_p *= mask_p > pt_float(0)
mask_n = mask + pt_float(threshold)
mask_n *= mask_n < pt_float(0)
mask = mask_p + mask_n
# add some mask back to image
out_frame.data = data + (mask * amount)
# add audit
out_frame.set_audit(
self, 'data = UnsharpMask(data)\n', with_config=self.config)
return True
| gpl-3.0 | -3,528,944,633,430,766,000 | 40.304762 | 79 | 0.661517 | false |
bewiwi/python-ovh | docs/conf.py | 1 | 8279 | # -*- coding: utf-8 -*-
#
# Python-OVH documentation build configuration file, created by
# sphinx-quickstart on Tue Aug 26 13:44:18 2014.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.doctest',
'sphinx.ext.coverage',
'sphinx.ext.viewcode',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'Python-OVH'
copyright = u'2013-2014, OVH SAS'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '0.3'
# The full version, including alpha/beta/rc tags.
release = '0.3.3'
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'python-ovh-doc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
('index', 'Python-OVH.tex', u'Python-OVH Documentation',
u'Jean-Tiare Le Bigot', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'python-ovh', u'Python-OVH Documentation',
[u'Jean-Tiare Le Bigot'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'Python-OVH', u'Python-OVH Documentation',
u'Jean-Tiare Le Bigot', 'Python-OVH', 'OVH Rest API wrapper.',
'API'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| bsd-3-clause | -8,003,277,080,217,921,000 | 30.479087 | 79 | 0.705037 | false |
prattl/teamfinder | api/teams/api/views.py | 1 | 6335 | from common.api.permissions import IsStaffOrTeamCaptain
from common.models import Interest, Language, Position, TeamMember, Region
from teams.api.serializers import EditableFlatTeamSerializer, TeamSerializer, PlayerMembershipSerializer
from teams.models import Team
from rest_framework import permissions, status, viewsets
from rest_framework.decorators import detail_route
from rest_framework.response import Response
from .serializers import FlatTeamSerializer
class TeamViewSet(viewsets.ModelViewSet):
queryset = Team.objects.all()
serializer_class = TeamSerializer
model = Team
permission_classes = (IsStaffOrTeamCaptain, ) # TODO: Create IsStaffOrTeamCaptain permission for put/patch/delete
# TODO: Create IsStaffOrPlayer permission for post
@staticmethod
def setup_eager_loading(queryset):
queryset = queryset.select_related(
'captain',
'captain__user',
'creator',
'creator__user',
).prefetch_related(
'regions',
'available_positions',
'captain__regions',
'captain__positions',
'captain__teams',
'creator__regions',
'creator__positions',
'creator__teams',
'players__regions',
'teammember_set__player',
'teammember_set__player__user',
'teammember_set__player__regions',
'teammember_set__player__positions',
'teammember_set__player__teams',
)
return queryset
def get_serializer_class(self):
"""
If GET, HEAD, or OPTIONS return the nested serializer
If POST, PUT, PATCH, or DELETE return a flat serializer
Change the serializer based on permissions
* If method is safe, return TeamSerializer
* If user is the team captain, return EditableFlatTeamSerializer
* Else, return FlatTeamSerializer
"""
def _get_serializer_class():
if self.request.method in permissions.SAFE_METHODS:
return TeamSerializer
try:
instance = self.get_object()
except AssertionError:
pass
else:
if self.request.user == instance.captain.user:
return EditableFlatTeamSerializer
return FlatTeamSerializer
serializer_class = _get_serializer_class()
return serializer_class
def get_queryset_for_search(self, queryset):
keywords = self.request.query_params.get('keywords')
regions = self.request.query_params.getlist('regions[]')
available_positions = self.request.query_params.getlist('available_positions[]')
interests = self.request.query_params.getlist('interests[]')
languages = self.request.query_params.getlist('languages[]')
if keywords:
queryset = queryset.filter(name__icontains=keywords)
if regions:
queryset = queryset.filter(regions__in=Region.objects.filter(pk__in=regions))
if available_positions:
queryset = queryset.filter(available_positions__in=Position.objects.filter(pk__in=available_positions))
if interests:
queryset = queryset.filter(interests__in=Interest.objects.filter(pk__in=interests))
if languages:
queryset = queryset.filter(languages__in=Language.objects.filter(pk__in=languages))
return queryset.order_by('-search_score', '-updated', )
def get_queryset(self):
queryset = super().get_queryset()
queryset = self.setup_eager_loading(queryset)
search = self.request.query_params.get('search')
if search:
queryset = self.get_queryset_for_search(queryset)
return queryset
def create(self, request, *args, **kwargs):
data = request.data
# Validate with the flat serializer
serializer = FlatTeamSerializer(data=data, context={'request': request})
serializer.is_valid(raise_exception=True)
new_team = self.perform_create(serializer)
try:
player_position = Position.objects.get(pk=request.data.get('player_position'))
except Position.DoesNotExist:
player_position = None
TeamMember.objects.create(team=new_team, player=request.user.player, position=player_position)
headers = self.get_success_headers(serializer.data)
# Return a nested serializer
full_team = TeamSerializer(instance=new_team, context={'request': request})
return Response(full_team.data, status=status.HTTP_201_CREATED, headers=headers)
def perform_create(self, serializer):
return serializer.save(creator=self.request.user.player, captain=self.request.user.player)
def update(self, request, *args, **kwargs):
partial = kwargs.pop('partial', False)
instance = self.get_object()
serializer_class = self.get_serializer_class()
serializer = serializer_class(instance, data=request.data, partial=partial, context={'request': request})
serializer.is_valid(raise_exception=True)
updated_team = self.perform_update(serializer)
try:
# Try to update the requesting user's position within the team
player_position = Position.objects.get(pk=request.data.get('player_position'))
team_member = TeamMember.objects.get(team=updated_team, player=request.user.player)
if player_position != team_member.position:
team_member.position = player_position
team_member.save()
except (Position.DoesNotExist, TeamMember.DoesNotExist):
pass
full_team = TeamSerializer(instance=updated_team, context={'request': request})
return Response(full_team.data)
def perform_update(self, serializer):
return serializer.save()
@detail_route(permission_classes=(permissions.IsAuthenticated,), methods=('GET',))
def memberships(self, request, pk=None):
team = self.get_object()
serializer = PlayerMembershipSerializer(
team.teammember_set.all(), many=True, context={'request': request}
)
return Response(serializer.data, status=status.HTTP_200_OK)
| apache-2.0 | 5,376,675,978,768,944,000 | 42.993056 | 119 | 0.64562 | false |
eduble/SimpleFilesystems | taggerfs/id3library.py | 1 | 2766 | #!/usr/bin/env python
"""
This module implements a management library for your
collection of ID3-tagged mp3 files.
"""
import os
from os.path import join
from mutagen.easyid3 import EasyID3
class ID3Library:
"""Library of ID3-tagged mp3 files."""
def __init__(self):
"""Constructor."""
self._data = {}
def getTag(self, mp3file):
try:
tag = EasyID3(mp3file)
except: # no ID3 tag
tag = EasyID3()
return tag
def getTagElement(self, tag, elem):
"""Sub-routine to get one element of an ID3 tag (i.e. artist, album, ...)."""
value = None
if elem in tag:
value = tag[elem][0].encode('utf8').strip()
if value == '':
value = None
return value
def registerMP3File(self, path):
"""Registers the ID3 tag of a given mp3 file into the library."""
tag = self.getTag(path)
artist = self.getTagElement(tag,'artist')
album = self.getTagElement(tag,'album')
if artist == None:
artist = 'UnknownArtist'
if album == None:
album = 'UnknownAlbum'
if artist not in self._data:
self._data[artist] = {}
allAlbumsOfArtist = self._data[artist]
if album not in allAlbumsOfArtist:
allAlbumsOfArtist[album] = set({})
allTracksOfAlbum = allAlbumsOfArtist[album]
allTracksOfAlbum.add(path)
def registerMP3FilesFromDir(self, d):
"""Registers all files in a given directory (including files in sub-directories)."""
for dirname, dirnames, filenames in os.walk(d):
for filename in filenames:
if filename.endswith('.mp3'):
print 'adding file:', filename
path = join(dirname, filename)
self.registerMP3File(path)
def getArtists(self):
"""Outputs the list of artists the library knows about."""
return self._data.keys()
def getAlbums(self, artist):
"""Outputs the list of albums the library knows about for a given artist."""
return self._data[artist].keys()
def getFiles(self, artist, album):
"""Outputs the list of files the library knows about for a given album."""
return self._data[artist][album]
def registerArtist(self, artist):
"""Registers an artist into the library."""
self._data[artist] = {}
def registerAlbum(self, artist, album):
"""Registers an album into the library."""
self._data[artist][album] = set({})
def update(self, fullpath, old_artist, old_album,
new_artist, new_album):
"""
Updates the data (artist & album) about a given song.
In-memory and in-file (i.e. the ID3 tag) data will both be updated.
"""
# update current hierarchy
self._data[new_artist][new_album].add(fullpath)
self._data[old_artist][old_album].remove(fullpath)
# update ID3 tag
tag = self.getTag(fullpath)
tag['artist'] = new_artist
tag['album'] = new_album
tag.save(fullpath)
| lgpl-3.0 | -6,175,146,277,138,132,000 | 28.741935 | 86 | 0.665944 | false |
carpedm20/fbchat | tests/online/test_client.py | 1 | 3005 | import pytest
import fbchat
import os
pytestmark = pytest.mark.online
def test_fetch(client):
client.fetch_users()
def test_search_for_users(client):
list(client.search_for_users("test", 10))
def test_search_for_pages(client):
list(client.search_for_pages("test", 100))
def test_search_for_groups(client):
list(client.search_for_groups("test", 1000))
def test_search_for_threads(client):
list(client.search_for_threads("test", 1000))
with pytest.raises(fbchat.HTTPError, match="rate limited"):
list(client.search_for_threads("test", 10000))
def test_message_search(client):
list(client.search_messages("test", 500))
def test_fetch_thread_info(client):
list(client.fetch_thread_info(["4"]))[0]
def test_fetch_threads(client):
list(client.fetch_threads(20))
list(client.fetch_threads(200))
def test_undocumented(client):
client.fetch_unread()
client.fetch_unseen()
@pytest.fixture
def open_resource(pytestconfig):
def get_resource_inner(filename):
path = os.path.join(pytestconfig.rootdir, "tests", "resources", filename)
return open(path, "rb")
return get_resource_inner
def test_upload_and_fetch_image_url(client, open_resource):
with open_resource("image.png") as f:
((id, mimetype),) = client.upload([("image.png", f, "image/png")])
assert mimetype == "image/png"
assert client.fetch_image_url(id).startswith("http")
def test_upload_image(client, open_resource):
with open_resource("image.png") as f:
_ = client.upload([("image.png", f, "image/png")])
def test_upload_many(client, open_resource):
with open_resource("image.png") as f_png, open_resource(
"image.jpg"
) as f_jpg, open_resource("image.gif") as f_gif, open_resource(
"file.json"
) as f_json, open_resource(
"file.txt"
) as f_txt, open_resource(
"audio.mp3"
) as f_mp3, open_resource(
"video.mp4"
) as f_mp4:
_ = client.upload(
[
("image.png", f_png, "image/png"),
("image.jpg", f_jpg, "image/jpeg"),
("image.gif", f_gif, "image/gif"),
("file.json", f_json, "application/json"),
("file.txt", f_txt, "text/plain"),
("audio.mp3", f_mp3, "audio/mpeg"),
("video.mp4", f_mp4, "video/mp4"),
]
)
def test_mark_as_read(client, user, group):
client.mark_as_read([user, group], fbchat._util.now())
def test_mark_as_unread(client, user, group):
client.mark_as_unread([user, group], fbchat._util.now())
def test_move_threads(client, user, group):
client.move_threads(fbchat.ThreadLocation.PENDING, [user, group])
client.move_threads(fbchat.ThreadLocation.INBOX, [user, group])
@pytest.mark.skip(reason="need to have threads to delete")
def test_delete_threads():
pass
@pytest.mark.skip(reason="need to have messages to delete")
def test_delete_messages():
pass
| bsd-3-clause | 7,939,270,098,044,976,000 | 24.905172 | 81 | 0.628952 | false |
HugoMMRabson/fonsa | src/my/installer/__init__.py | 1 | 14952 | #!/usr/bin/python3
"""
my.installer.__init__
# TESTING "PHASE ONE", ONE LINE AT A TIME...
import os
from willywonka_installer import *
from my.installer import *
args = Object()
args.skipalltools = True
args.platform = 'RaspberryPi3'
args.debugip = '192.168.251.112'
args.usegzipo = True
args.outfile = '%s/testout.img' % WONKADIR
from my.v2and3 import connect_to_pydev_remote_debugger
connect_to_pydev_remote_debugger(args.debugip)
our_working_image = '%s/tmp/tmpfs/%s.wkg' % (WONKADIR, os.path.basename(args.outfile))
our_pristine_image = '%s/%s.pstn' % (WONKADIR, os.path.basename(args.outfile)) # TODO: %s/tmp/%s
our_golden_tarball = '%s/old/20180000_golden/%s.golden.tar.lzo' % (WONKADIR, args.platform)
for i in (our_working_image, our_pristine_image, our_golden_tarball):
os.system('mkdir -p %s' % os.path.dirname(i))
os.system('rm -f "%s"' % args.outfile)
if not os.path.exists(our_pristine_image):
obtain_decompressed_pristine_image(our_pristine_image, args.platform)
os.system('rm -f %s' % our_golden_tarball) # TEST PORPOISES
if not os.path.exists(our_golden_tarball) or 0 != os.system('find %s -mtime -%d 2>/dev/null' % (our_golden_tarball, DAYS_BEFORE_FORCING_REBUILD)):
from_a_pristine_image_generate_a_golden_tarball(pristine_image=our_pristine_image,
save_golden_tarball_here=our_golden_tarball,
skipalltools=args.skipalltools)
build_folder = '%s/mnt/src.%s' % (WONKADIR, args.platform)
print('Extracting golden tarball to our build folder => %s' % build_folder)
os.system('mkdir -p %s' % build_folder)
system_or_die('pv %s | lzop -d -c | tar -x -C %s' % (our_golden_tarball, build_folder))
if not args.skipalltools:
i_shall_call_willywonka_installer_phase_two(build_folder)
migrate_all_data_from_folder_to_resized_image(pristine_image=our_pristine_image,
src_folder=build_folder,
output_img_name=our_working_image,
use_gzip=args.usegzip,
silent=False)
"""
import datetime
import os
import sys
from my.globals.consts import RC_LOCAL_FNAME, FONSA_LOCKFILENAME, HOSTNAME_FNAME, PRODUCTION_MODEL, WONKADIR
from my.globals.exceptions import CannotFindSpareLoopdevError, WhatDoIDoWithThisPartitionError, ExternalCallBinaryError
from my.installer.rscripts import MAXIMUM_LIKELY_BOOT_PARTITION_SIZE_IN_MB, APT_GET_OPTIONALPKGS, APT_GET_PACKAGES, PIP3PACKAGES
from my.miscellany import call_binary, system_or_die, generate_temporary_filename, chroot_this, sleep
def dissociate_loopdevs_en_masse(attempts=3):
for i in range(0, 32):
os.system("umount /dev/loop%d 2> /dev/null " % i)
free_up_loopdev('/dev/loop%d' % i, attempts=attempts, silent=True)
def generate_128MB_randomized_data_file(random_data_fname):
os.system('''
fname=%s
dd if=/dev/urandom of=$fname bs=1024k count=8 2>/dev/null
cat $fname $fname $fname $fname > $fname.big 2>/dev/null
sync
cat $fname.big $fname.big $fname.big $fname.big > $fname 2>/dev/null
sync
rm -f $fname.big
''' % random_data_fname)
def download_pristine_copy_of_the_OS(downloaded_pristine_image_xz):
# print('''Beginning step 0; mtpt="%s"; goldenf="%s"; our_working_image="%s"''' % (mtpt, goldenf, our_working_image))
if not os.path.exists(downloaded_pristine_image_xz):
raise SystemError('%s not found; NEFARIOUS PORPOISES; re-enable downloader, please.' % downloaded_pristine_image_xz)
# rm -f $destfile
# echo "*** Downloading pristine OS ***"
# mount | grep "$mtpt" && echo "WARNING --- mountpoint is still mounted (start of part 0)" || true
# losetup $loopdev 2>/dev/null && die "Loop device is loopy. Why? (start of part 0)" || true
# # Have we downloaded and compressed a pristine disk image yet? (By 'pristine,' I mean 'freshly
# # downloaded from the RPi3/ODroid/NanoPi website.') If we haven't, let's do that now.
# if [ ! -e "$DOWNLOADED_PRISTINE_IMAGE.xz" ] ; then
# echo "$DOWNLOADED_PRISTINE_IMAGE.xz not found; NEFARIOUS PORPOISES; re-enable please_download_pristine_image() eventually."
# exit 111
# please_download_pristine_image $DOWNLOADED_PRISTINE_IMAGE $pristine_url
# rm -f $GOLDENF.xz
# else
# echo "$DOWNLOADED_PRISTINE_IMAGE.xz exists. Good."
# fi
# losetup $loopdev 2>/dev/null && die "Loop device is loopy. Why? (end of part 0)" || true
# fi
#
def free_up_loopdev(sparedev, attempts=3, silent=True):
if not silent:
print("Freeing up %s" % sparedev)
os.system('umount %s 2> /dev/null' % sparedev)
while attempts >= 0 and 0 == os.system('losetup | grep "%s " >/dev/null 2>/dev/null' % sparedev):
attempts -= 1
if not silent:
print('Waiting for %s to become free...' % sparedev)
os.system('umount %s 2> /dev/null' % sparedev)
os.system('sync;sync;sync; losetup -d %s 2>/dev/null; sync;sync;sync' % sparedev)
sleep(.5)
if 0 == os.system('losetup | grep %s >/dev/null 2>/dev/null' % sparedev):
print("Warning - failed to dissociated %s" % sparedev)
elif not silent:
print('%s is free. Good.' % sparedev)
def get_a_spare_loopdev():
spare_loopdev = None
for i in range(0, 32):
a_loopdev = '/dev/loop%d' % i
if 0 != os.system('losetup %s > /dev/null 2> /dev/null' % a_loopdev):
spare_loopdev = a_loopdev
break
if spare_loopdev is None:
CannotFindSpareLoopdevError('Unable to find a spare /dev/loop entry')
return spare_loopdev
def get_total_RAM_in_MB():
retcode, textout = call_binary(['free'])
if 0 != retcode:
raise ExternalCallBinaryError("Failed to get total RAM in MB")
return int(textout.split('\n')[1].split(':')[1].strip(' ').split(' ')[0])
# def is_this_my_first_run():
# return False if os.path.exists(HOSTAPDCONF_FNAME) else True
def get_all_partitions_lines_from_fdisk(imagef):
retcode, textout = call_binary(['fdisk', '-l', imagef])
if 0 != retcode:
raise ExternalCallBinaryError("Failed to get fdisk info")
fdisk_output = textout.split('\n')
return [r for r in fdisk_output if r.split(' ')[0].find('/') == 0]
def get_sector_size(imagef):
retcode, textout = call_binary(['fdisk', '-l', imagef])
if 0 != retcode:
raise ExternalCallBinaryError("Failed to run fdisk -l")
fdisk_output = textout.split('\n')
return int([r for r in fdisk_output if r.find(' * ') >= 0 and r.find(':') >= 0][0].split('=')[1].strip(' ').split(' ')[0])
def calculate_sectorsize_and_partition_size_and_stuff(imagef):
'''
diskinfo.sectorsize size of each sector (usually 512 bytes)
diskinfo.noof_parts how many partitions are there?
diskinfo.root_partno which partition# is root?
diskinfo.boot_partno which partition# is boot?
diskinfo.usr_partno which partition# is usr?
diskinfo.partinfo info on specific partitions
diskinfo.partitions[1].start_sector
diskinfo.partitions[1].start_in_bytes
diskinfo.partitions[1].end_sector
diskinfo.partitions[1].size_in_bytes
diskinfo.partitions[1].format_hexcode
diskinfo.partitions[1].format_name
...etc...
'''
diskinfo = Object()
all_lines = get_all_partitions_lines_from_fdisk(imagef)
diskinfo.sectorsize = get_sector_size(imagef)
diskinfo.boot_partno = None
diskinfo.root_partno = None
diskinfo.usr_partno = None
diskinfo.noof_parts = len(all_lines)
diskinfo.partitions = [None, None, None, None, None, None, None, None, None, None, None,
None, None, None, None, None, None, None, None, None, None, None, None, None, None]
for this_line in all_lines:
# <diskimage fname><partno> (*?) <start> <end> <sectors> <size in text> <format_hexcode> <format_name>
this_line = this_line.replace(' * ', ' ').replace(' ', ' '
).replace(' ', ' '
).replace(' ',
' ').replace(' ', ' '
).replace(' ', ' '
).replace(' ', ' '
).replace(' ', ' ')
fname_and_partno, start_sector, end_sector, noof_sectors, size_str, format_hexcode = this_line.split(' ')[:6]
format_name = ''.join([r + ' ' for r in this_line.split(' ')[7:]])
partno = int(fname_and_partno.split(imagef)[1])
diskinfo.partitions[partno] = Object()
diskinfo.partitions[partno].start_sector = int(start_sector)
diskinfo.partitions[partno].end_sector = int(end_sector)
diskinfo.partitions[partno].noof_sectors = int(noof_sectors)
diskinfo.partitions[partno].size_str = size_str
diskinfo.partitions[partno].format_hexcode = format_hexcode
diskinfo.partitions[partno].format_name = format_name
diskinfo.partitions[partno].start_in_bytes = diskinfo.partitions[partno].start_sector * diskinfo.sectorsize
diskinfo.partitions[partno].size_in_bytes = diskinfo.partitions[partno].noof_sectors * diskinfo.sectorsize
diskinfo.partitions[partno].size_in_MBs = diskinfo.partitions[partno].size_in_bytes / 1024 / 1024
if diskinfo.root_partno is None and diskinfo.boot_partno is None\
and diskinfo.partitions[partno].size_in_MBs <= MAXIMUM_LIKELY_BOOT_PARTITION_SIZE_IN_MB:
# print('Partition #%d is probably /boot' % partno)
diskinfo.boot_partno = partno
elif diskinfo.root_partno is None:
# print('Partition #%d is probably root' % partno)
diskinfo.root_partno = partno
elif diskinfo.usr_partno is None:
# print('Partition #%d is probably /usr' % partno)
diskinfo.usr_partno = partno
else:
raise WhatDoIDoWithThisPartitionError("I do not know what to do with partition #%d of %s; \
surely we have found all the partitions already; what is this sorcery?" % (partno, imagef))
return diskinfo
class Object(object):
pass
# def DOWNLOADED_PRISTINE_IMAGE(imagefile, storage_folder):
# if 0 != os.system('''
# local imagefile=%s our_mtpt=$WONKADIR/tmp/our_mtpt.$RANDOM$RANDOM storage_folder%s
# mkdir -p $our_mtpt
# mount_the_disk_image $imagefile $our_mtpt
# umount $our_mtpt/{dev/pts,dev,proc,tmp,var,sys,proc} 2> /dev/null || true
# cd $our_mtpt
# echo "Making a copy of the filesystem from the original image file --- from $our_mtpt to $storage_folder"
# mkdir -p $storage_folder
# cp -af * $storage_folder/
# cd /
# unmount_disk_image $our_mtpt
# rmdir $our_mtpt || true''' % (imagefile, storage_folder)):
# raise SystemError("Failed to download pristine image")
def copy_first_few_MB_of_existing_imagefile_and_add_zeros_to_pad_it_out(imagefile, truncatedimgfile, finalsizeofourimage):
print("Creating truncated copy of existing imagefile")
os.system('dd if=%s of=%s bs=1024k count=%d' % (imagefile, truncatedimgfile, finalsizeofourimage))
os.system('''
truncatedimgfile=%s
finalsizeofourimage=%d
echo -en "Adding zeroes to end of pristine file, to pad it out"
while [ "$(($(ls -l $truncatedimgfile | tr -s '\t' ' ' | cut -d' ' -f5)/1024/1024))" -lt "$finalsizeofourimage" ] ; do
echo -en "."
dd if=/dev/zero bs=1024 count=8192 >> $truncatedimgfile 2> /dev/null # Don't use conv=sparse, please. Don't.
[ "$?" -eq "0" ] || echo "Failed to finish resizing image. Did we run out of disk space?"
done
echo "...Padded. Yay."
''' % (truncatedimgfile, finalsizeofourimage))
def please_download_pristine_image(downloaded_pristine_image, pristine_url):
raise SystemError('not written yet')
'''
local DOWNLOADED_PRISTINE_IMAGE="$1" pristine_url="$2"
rm -f "$DOWNLOADED_PRISTINE_IMAGE"
echo "Downloading pristine image"
if echo "$pristine_url" | grep -F .tar.xz >/dev/null ; then
suffix=tar.xz
die "I have no idea how to handle .tar.xz endings. This may be an eMMC thing. Help!"
elif echo "$pristine_url" | grep -F .xz >/dev/null ; then
suffix=xz
elif echo "$pristine_url" | grep -F .gz >/dev/null ; then
suffix=gz
elif echo "$pristine_url" | grep -F .7z >/dev/null ; then
suffix=7z
elif echo "$pristine_url" | grep -F .zip >/dev/null ; then
suffix=zip
else
die "Failed to handle type of compression that $DOWNLOADED_PRISTINE_IMAGE.* uses."
fi
rm -f "$DOWNLOADED_PRISTINE_IMAGE".$suffix.tmp
die "wget $pristine_url -O "$DOWNLOADED_PRISTINE_IMAGE".$suffix.tmp"
mv -f "$DOWNLOADED_PRISTINE_IMAGE".$suffix.tmp "$DOWNLOADED_PRISTINE_IMAGE".$suffix
echo "Unzipping it"
if [ "$suffix" = "xz" ] ; then
xz -d "$DOWNLOADED_PRISTINE_IMAGE".$suffix
elif [ "$suffix" = "gz" ] ; then
gunzip "$DOWNLOADED_PRISTINE_IMAGE".$suffix
elif [ "$suffix" = "7z" ] ; then
mkdir -p $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa
7z x -o$(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa "$DOWNLOADED_PRISTINE_IMAGE".7z
mv $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa/*.* "$DOWNLOADED_PRISTINE_IMAGE"
rmdir $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa/*
rmdir $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa || true
elif [ "$suffix" = "zip" ] ; then
mkdir -p $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa
unzip "$DOWNLOADED_PRISTINE_IMAGE".zip -d $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa
mv $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa/*.* "$DOWNLOADED_PRISTINE_IMAGE"
rmdir $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa/*
rmdir $(dirname "$DOWNLOADED_PRISTINE_IMAGE")/aaaaa || true
else
die "Failed to handle type of compression that $DOWNLOADED_PRISTINE_IMAGE.* uses."
fi
[ -e "$DOWNLOADED_PRISTINE_IMAGE" ] || die "Unable to decompress $DOWNLOADED_PRISTINE_IMAGE image"
echo "Compressing it (again)"
pv -p $DOWNLOADED_PRISTINE_IMAGE | xz -5e > $DOWNLOADED_PRISTINE_IMAGE.xz
rm -f $DOWNLOADED_PRISTINE_IMAGE $DOWNLOADED_PRISTINE_IMAGE.$suffix
echo "Finished compressing it. We now have a (usable) pristine disk image."
'''
# ------------------------------------------------------------------------------------------------------------------------------------------------------------
if __name__ == "__main__":
raise SystemExit('Do not run me!')
| gpl-3.0 | -8,482,496,389,659,124,000 | 47.07717 | 158 | 0.617442 | false |
fumitoh/modelx | modelx/tests/core/cells/test_cells_setitem.py | 1 | 2424 | import pytest
import modelx as mx
from modelx import new_model, defcells
from modelx.testing.testutil import SuppressFormulaError
@pytest.fixture
def setitemsample():
space = new_model(name="samplemodel").new_space(name="samplespace")
funcdef = """def func(x): return 2 * x"""
space.new_cells(formula=funcdef)
@defcells
def fibo(x):
if x == 0 or x == 1:
return x
else:
return fibo(x - 1) + fibo[x - 2]
@defcells
def double(x):
double[x] = 2 * x
@defcells
def return_last(x):
return return_last(x - 1)
@defcells
def balance(x):
return balance(x-1) + flow(x-1)
@defcells
def flow(x):
return 10
return space
def test_setitem(setitemsample):
setitemsample.fibo[0] = 1
setitemsample.return_last[4] = 5
assert setitemsample.fibo[2] == 2
assert setitemsample.return_last(5) == 5
def test_setitem_str(setitemsample):
cells = setitemsample.new_cells(formula="lambda s: 2 * s")
cells["ABC"] = "DEF"
assert cells["ABC"] == "DEF"
def test_setitem_in_cells(setitemsample):
assert setitemsample.double[3] == 6
def test_setitem_in_formula_invalid_assignment_error(setitemsample):
def invalid_in_formula_assignment(x):
invalid_in_formula_assignment[x + 1] = 3 * x
setitemsample.new_cells(formula=invalid_in_formula_assignment)
with SuppressFormulaError():
with pytest.raises(KeyError):
setitemsample.invalid_in_formula_assignment[3]
def test_setitem_in_formula_duplicate_assignment_error(setitemsample):
def duplicate_assignment(x):
duplicate_assignment[x] = 4 * x
return 4 * x
setitemsample.new_cells(formula=duplicate_assignment)
with SuppressFormulaError():
with pytest.raises(ValueError):
setitemsample.duplicate_assignment[4]
@pytest.mark.parametrize("recalc", [True, False])
def test_setitem_recalc(setitemsample, recalc):
last_recalc = mx.get_recalc()
try:
mx.set_recalc(recalc)
setitemsample.balance[0] = 0
assert setitemsample.balance[10] == 100
setitemsample.balance[0] = 100
if recalc:
assert len(setitemsample.balance) == 11
else:
assert len(setitemsample.balance) == 1
assert setitemsample.balance[10] == 200
finally:
mx.set_recalc(last_recalc)
| gpl-3.0 | 5,309,869,218,973,825,000 | 22.533981 | 71 | 0.640264 | false |
kichiki/stokes | python/stnc2pov.py | 1 | 25797 | # stokes-netcdf to pov converter
# Copyright (C) 2006-2008 Kengo Ichiki <[email protected]>
# $Id: stnc2pov.py,v 1.9 2008/06/03 02:57:43 kichiki Exp $
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
import sys
#sys.path.append('/somewhere/ryuon/stokes/python')
import stokes
def write_T_Balls(f, n):
f.write('#declare T_Ball_%d = texture {\n'%(n))
f.write(' pigment {\n'\
' image_map {\n'\
' jpeg \"textures/%d.jpg\"\n'\
' map_type 1\n'\
' interpolate 2\n'\
' }\n'\
' rotate <0, 270, 0>\n'\
' }\n'%(n))
f.write(' finish {\n'\
' phong 0.9\n'\
' ambient 0.5\n'\
' reflection 0.2\n'\
' }\n'\
'}\n')
def write_T_Particle(f):
f.write('#declare T_Particle = texture {\n'\
' pigment { color White }\n'\
' //finish { ambient 0.2 diffuse 0 reflection 0.6 }\n'\
' finish {\n'\
' ambient .1\n'\
' diffuse .1\n'\
' specular 1\n'\
' roughness .001\n'\
' metallic\n'\
' reflection {\n'\
' .75\n'\
' metallic\n'\
' }\n'\
' }\n'\
'}\n')
def write_T_Particles_with_Bonds(f, color):
f.write('#declare T_Particles_with_Bonds = texture {\n'\
' pigment { color %s }\n'\
' finish {\n'\
' ambient .1\n'\
' diffuse .4\n'\
' reflection {\n'\
' .75\n'\
' metallic\n'\
' }\n'\
' specular 1\n'\
' }\n'\
'}\n'%(color))
def write_M_RYUON(f):
# M_RYUON
f.write('#declare M_RYUON = material {\n'\
' texture {\n'\
' pigment {\n'\
' color <0.4, 0.5, 1.0>\n'\
' filter 1\n'\
' }\n'\
' finish {\n'\
' ambient 0\n'\
' diffuse 0\n'\
' reflection .25\n'\
' specular 1\n'\
' roughness .001\n'\
' }\n'\
' } // end of texture\n'\
' interior { ior 1.33 }\n'
'}\n')
def write_T_CHECKER(f):
f.write('#declare T_CHECKER = texture {\n'\
' pigment{\n'\
' checker\n'\
' color <0.4, 0.5, 1.0>\n'\
' color White\n'\
' }\n'\
' scale 0.01\n'\
' finish{\n'\
' phong 0.9\n'\
' metallic\n'\
' }\n'\
'}\n')
def bounding_box (np, x):
(cx,cy,cz) = (0.0, 0.0, 0.0)
for i in range(np):
xx = x[i*3]
yy = x[i*3+1]
zz = x[i*3+2]
cx = cx + xx
cy = cy + yy
cz = cz + zz
if i == 0:
lx0 = lx1 = xx
ly0 = ly1 = yy
lz0 = lz1 = zz
else:
if lx0 > xx:
lx0 = xx
if lx1 < xx:
lx1 = xx
if ly0 > yy:
ly0 = yy
if ly1 < yy:
ly1 = yy
if lz0 > zz:
lz0 = zz
if lz1 < zz:
lz1 = zz
cx = cx / float(np)
cy = cy / float(np)
cz = cz / float(np)
lx = lx1 - lx0
ly = ly1 - ly0
lz = lz1 - lz0
return (cx,cy,cz, lx,ly,lz)
# INPUT
# f : file
# lattice = (lx,ly,lz) in simulation coordinates
# camera = (cx,cy,cz)
# lookat = (lax,lay,laz)
# flag_ball : 0 => checker, 1 => pool balls
# flag_bonds :
# bond_color : '' is accepted (for Red)
def write_pov_header (f, lattice, camera, lookat,
flag_ball=0, flag_bonds=0, bond_color=''):
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
# scale factor = 1/100 (0.01 radius = 1 in POV)
lx = lattice[0]/100.0
lz = lattice[1]/100.0
ly = lattice[2]/100.0
cx = camera[0]/100.0
cz = camera[1]/100.0
cy = camera[2]/100.0
lax = lookat[0]/100.0
laz = lookat[1]/100.0
lay = lookat[2]/100.0
if flag_bonds == 0:
f.write('#include "colors.inc"\n')
#f.write('#include "woods.inc"\n\n')
# place the ground
f.write('// floor\nplane {\n'\
' y, -0.1\n'\
' texture {\n'\
#' T_Wood6\n'\
#' finish{ ambient 1 }\n'\
' pigment { checker color White, color <.7, .7, .7> }\n'\
' scale .3\n'\
' finish{ ambient .4 }\n'\
' }\n'\
'}\n')
# place the walls
f.write('// back wall\n'\
'plane {\n'\
' z, 1\n'\
' pigment { color rgb <1,1,0.8> }\n'\
' finish{ ambient 0.4 }\n'\
'}\n')
f.write('// ceiling\n'\
'plane {\n'\
' y, 5\n'\
' pigment { color White }\n'\
'}\n')
f.write('// right wall\n'\
'plane {\n'\
' x, 5\n'\
' pigment { color White }\n'\
'}\n')
f.write('// left wall\n'\
'plane {\n'\
' x, -5\n'\
' pigment { color White }\n'\
'}\n')
f.write('// behind wall\n'\
'plane {\n z, -5\n'\
' pigment { color White }\n'\
'}\n\n')
# place the box
f.write('box {\n'\
' <0, 0, 0>, // Near lower left corner\n'\
' <%f, %f, %f> // Far upper right corner\n'\
' pigment { color rgbf <0.9, 0.99, 1, 1> }\n'\
'}\n\n'%(lx, ly, lz))
f.write('camera {\n location <%f, %f, %f>\n'%(cx, cy, cz))
f.write(' look_at <%f, %f, %f>\n}\n\n'%(lax, lay, laz))
f.write('light_source { <2, 4.9, -3> color White}\n\n')
write_T_Particle(f)
else:
f.write('#include "colors.inc"\n')
f.write('background { color White }\n')
f.write('camera {\n location <%f, %f, %f>\n'%(cx, cy, cz))
f.write(' look_at <%f, %f, %f>\n}\n\n'%(lax, lay, laz))
f.write('light_source { <2, 4.9, -3> color White}\n\n')
write_T_Particle(f)
if bond_color == '':
write_T_Particles_with_Bonds(f, 'Red')
else:
write_T_Particles_with_Bonds(f, bond_color)
if flag_ball == 0:
write_M_RYUON (f)
write_T_CHECKER(f)
else:
for i in range(16):
write_T_Balls(f,i)
# INPUT
# f : file
# lattice = (lx,ly,lz) in simulation coordinates
# camera = (cx,cy,cz)
# lookat = (lax,lay,laz)
# flag_ball : 1 => pool balls
# flag_bonds :
# bond_color : '' is accepted (for Red)
def write_pov_header_open (f, lattice, camera, lookat,
flag_ball=0, flag_bonds=0, bond_color=''):
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
# scale factor = 1/100 (0.01 radius = 1 in POV)
lx = lattice[0]/100.0
lz = lattice[1]/100.0
ly = lattice[2]/100.0
cx = camera[0]/100.0
cz = camera[1]/100.0
cy = camera[2]/100.0
lax = lookat[0]/100.0
laz = lookat[1]/100.0
lay = lookat[2]/100.0
f.write('#include \"colors.inc\"\n')
f.write('#include "woods.inc"\n\n')
if flag_bonds == 0:
# place the walls
f.write('// back wall\n'\
'plane {\n'\
' z, 2\n'\
' pigment { checker color White, color <0.6, 0.8, 1> }\n'\
' scale 0.1\n}\n')
f.write('// behind wall\n'\
'plane {\n'\
' z, -5\n'\
' pigment { color White }\n'\
'}\n\n')
f.write('camera {\n location <%f, %f, %f>\n'%(cx, cy, cz))
f.write(' look_at <%f, %f, %f>\n}\n\n'%(lax, lay, laz))
f.write('light_source { <2, 4.9, -3> color White}\n\n')
write_T_Particle(f)
else:
f.write('#include "colors.inc"\n')
f.write('background { color White }\n')
f.write('camera {\n location <%f, %f, %f>\n'%(cx, cy, cz))
f.write(' look_at <%f, %f, %f>\n}\n\n'%(lax, lay, laz))
f.write('light_source { <2, 4.9, -3> color White}\n\n')
write_T_Particle(f)
if bond_color == '':
write_T_Particles_with_Bonds(f, 'Red')
else:
write_T_Particles_with_Bonds(f, bond_color)
if flag_ball == 0:
write_M_RYUON (f)
write_T_CHECKER(f)
else:
for i in range(15):
write_T_Balls(f,i+1)
def write_pov_particle (f, x, y, z, a):
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
# scale factor = 1/100 (0.01 radius = 1 in POV)
f.write('sphere {\n')
f.write(' <%f, %f, %f>, %f\n'%(x/100.0, z/100.0, y/100.0, a/100.0))
f.write(' material { M_RYUON }\n}\n')
def write_pov_particle_fixed (f, x, y, z, a):
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
# scale factor = 1/100 (0.01 radius = 1 in POV)
f.write('sphere {\n')
f.write(' <%f, %f, %f>, %f\n'%(x/100.0, z/100.0, y/100.0, a/100.0))
f.write(' texture { T_Particle }\n}\n')
def write_pov_particles_with_bonds (f, nm, pos, a, br):
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
# scale factor = 1/100 (0.01 radius = 1 in POV)
# all objects are merged
f.write('merge {\n')
# sheres
for j in range(nm):
if a == []: rad = 1.0
else: rad = a[j]
x = pos[j*3]
y = pos[j*3+1]
z = pos[j*3+2]
f.write(' sphere {\n')
f.write(' <%f, %f, %f>, %f\n'\
%(x/100.0, z/100.0, y/100.0, rad/100.0))
f.write(' }\n')
# bonds
for j in range(nm-1):
if a == []: rad = 1.0
else: rad = a[j]
if br > 0.0:
rad = br / 100.0
else:
# set cylinder's radius the half
rad = 0.5 * rad / 100.0
x0 = pos[j*3 ] / 100.0
y0 = pos[j*3+1] / 100.0
z0 = pos[j*3+2] / 100.0
x1 = pos[(j+1)*3 ] / 100.0
y1 = pos[(j+1)*3+1] / 100.0
z1 = pos[(j+1)*3+2] / 100.0
f.write(' cylinder {\n')
f.write(' <%f, %f, %f>, <%f, %f, %f>, %f\n'\
%(x0, z0, y0, x1, z1, y1, rad))
f.write(' }\n')
f.write(' texture { T_Particles_with_Bonds }\n')
f.write('}\n')
# make transform matrix (3x3) by quaternion
def Q2M (q1,q2,q3,q4):
m = []
# parity change
q4 *= -1.0
m.append(2.0*(q1*q1 + q4*q4 - .5))
m.append(2.0*(q1*q2 + q3*q4))
m.append(2.0*(q1*q3 - q2*q4))
m.append(2.0*(q1*q2 - q3*q4))
m.append(2.0*(q2*q2 + q4*q4 - .5))
m.append(2.0*(q2*q3 + q1*q4))
m.append(2.0*(q1*q3 + q2*q4))
m.append(2.0*(q2*q3 - q1*q4))
m.append(2.0*(q3*q3 + q4*q4 - .5))
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
t = [1.0, 0.0, 0.0,\
0.0, 0.0, 1.0,\
0.0, 1.0, 0.0]
x = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
y = [0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0, 0.0]
for i in range(3):
for j in range(3):
for k in range(3):
x[i*3+j] += t[i*3+k] * m[k*3+j]
for i in range(3):
for j in range(3):
for k in range(3):
y[i*3+j] += x[i*3+k] * t[k*3+j]
# therefore, Y = T . M . T
return y
def write_pov_particle_Q (f, x, y, z, a, q):
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
# scale factor = 1/100 (0.01 radius = 1 in POV)
m = Q2M (q[0], q[1], q[2], q[3])
f.write('sphere {\n')
f.write(' <0, 0, 0>, %f\n'%(a/100.0))
f.write(' texture { T_CHECKER }\n')
f.write(' transform {\n')
f.write(' matrix <%f, %f, %f,\n'%(m[0], m[3], m[6]))
f.write(' %f, %f, %f,\n'%(m[1], m[4], m[7]))
f.write(' %f, %f, %f,\n'%(m[2], m[5], m[8]))
f.write(' %f, %f, %f> }\n'%(x/100.0, z/100.0, y/100.0))
f.write('}\n')
def write_pov_particle_Balls_Q (f, x, y, z, a, q, i):
n = i%15 + 1
# note that in POVRAY,
# y is the vertical direction
# z is the depth direction
# scale factor = 1/100 (0.01 radius = 1 in POV)
m = Q2M (q[0], q[1], q[2], q[3])
f.write('sphere {\n')
f.write(' <0, 0, 0>, %f\n'%(a/100.0))
f.write(' texture { T_Ball_%d }\n'%(n))
f.write(' transform {\n')
f.write(' matrix <%f, %f, %f,\n'%(m[0], m[3], m[6]))
f.write(' %f, %f, %f,\n'%(m[1], m[4], m[7]))
f.write(' %f, %f, %f,\n'%(m[2], m[5], m[8]))
f.write(' %f, %f, %f> }\n'%(x/100.0, z/100.0, y/100.0))
f.write('}\n')
# now camera angle
# init: <0.17, 0.50, -1.10> <0.17, 0.50, 0.0>
# target: <0.17, 0.22, -0.28> <0.17, 0.15, 0.0>
# those are in POVRAY coordinates
# in simulation coordinates,
# init: <17, -110, 50> <17, 0, 50>
# target: <17, -28, 22> <17, 0, 15>
# diff < 0, 82, -28> < 0, 0, -35>
# let's say targe is reached in the first 200 steps
# d/step= < 0, .41,-.14> < 0, 0,-.175>
def move_camera (camera, lookat):
if (camera[2] <= 22.0): return
camera[1] += .41
camera[2] -= .14
lookat[2] -= .175
def usage():
print '$Id: stnc2pov.py,v 1.9 2008/06/03 02:57:43 kichiki Exp $'
print 'USAGE:'
print '\t-f or --file : stokes-nc-file'
print '\t-o or --out : output filename'
print '\t-ball : use pool balls'
print '\t-bonds : connect particles with bonds'
print '\t-br : radius of bond cylinder'
print '\t-bc : bond color (default: Red)\n'\
'\t\t ex.1 -bc \'rgb <0, .5, 1>\'\n'\
'\t\t ex.2 -bc \'color red 0.623529 green 0.623529 blue 0.372549\'\n'
print '\t-step n : output the config only at the step n\n'\
'\t\t n starts from 1 and ends at 1001 for 1000-step run.\n'
print '\t-nsteps n : output every n steps\n'
print '\t-sphere r : draw sphere with radius r'
print '\t-cylinder r : draw cylinder with radius r'
print '\t-camera r : set the distance to the camera by r'
print '\t-top : the top-view (default: side view)'
print '\t-bottom : the bottom-view (default: side view)'
print '\t-lookat x y z : set the lookat point fixed by (x,y,z)'
sys.exit ()
def render_one_step(str_argv, outfile, i,
nc, pos, a, q, xf0, af, lattice,
flag_ball, flag_bonds, bond_radius, bond_color,
flag_lookat, lk_arg, camera_dist, camera_dir,
sphere_radius, cylinder_radius, flag_bb):
file = '%s-%05d.pov'%(outfile, i)
try:
f = open(file, 'w')
except IOError:
print 'cannot open', file
sys.exit()
# write argv[0]
f.write('/* generated for %d step by\n'%(i))
f.write(' * %s\n'%(str_argv))
f.write(' */\n')
stokes.stokes_nc_get_data (nc, "x", i, pos)
# set camera direction
(cx,cy,cz, lx,ly,lz) = bounding_box (nc.np, pos)
if flag_lookat == 0:
if cylinder_radius > 0.0:
# only x is adjustable
lk = [cx, 0, 0]
else:
lk = [cx, cy, cz]
else:
lk = [lk_arg[0], lk_arg[1], lk_arg[2]]
if camera_dir == 'top':
# top view
if camera_dist == 0.0:
if lx > ly:
l = lx
else:
l = ly
# prevent to go too far away
if l > 50: l = 50
camera = [lk[0], lk[1], lk[2]+2*l]
else:
camera = [lk[0], lk[1], lk[2]+camera_dist]
elif camera_dir == 'bottom':
# bottom view
if camera_dist == 0.0:
if lx > ly:
l = lx
else:
l = ly
# prevent to go too far away
if l > 50: l = 50
camera = [lk[0], lk[1], lk[2]-2*l]
else:
camera = [lk[0], lk[1], lk[2]-camera_dist]
else:
# side view
if camera_dist == 0.0:
if lx > lz:
l = lx
else:
l = lz
# prevent to go too far away
if l > 50: l = 50
camera = [lk[0], lk[1]-2*l, lk[2]]
else:
camera = [lk[0], lk[1]-camera_dist, lk[2]]
# write header part
if lattice[0] == 0.0 and lattice[1] == 0.0 and lattice[2] == 0.0:
# non-periodic boundary
write_pov_header_open (f, lattice, camera, lk,
flag_ball, flag_bonds, bond_color)
else:
# periodic boundary
#move_camera (camera, lk)
write_pov_header (f, lattice, camera, lk,
flag_ball, flag_bonds, bond_color)
if flag_bb != 0:
# write bounding box for periodic system
if lattice[0] != 0.0 or lattice[1] != 0.0 or lattice[2] != 0.0:
f.write('box {\n')
f.write(' <0, 0, 0>,\n')
f.write(' <%f, %f, %f>\n'\
%(lattice[0]/100.0, lattice[2]/100.0, lattice[1]/100.0))
f.write(' pigment {\n')
f.write(' rgbf <.9,1,.9, .95>\n')
f.write(' }\n')
f.write(' finish {\n')
f.write(' ambient .2\n')
f.write(' diffuse .6\n')
f.write(' }\n')
f.write('}\n')
# write confinement
if sphere_radius > 0.0:
# draw sphere
f.write('sphere {\n')
f.write(' <0, 0, 0>, %f\n'%(sphere_radius/100.0)) # scale factor 100
f.write(' pigment {\n')
f.write(' rgbf <.9,1,.9, .95>\n')
f.write(' }\n')
f.write(' finish {\n')
f.write(' ambient .2\n')
f.write(' diffuse .6\n')
f.write(' }\n')
f.write('}\n')
if cylinder_radius > 0.0:
# draw cylinder
f.write('cylinder {\n')
f.write(' <%f, 0, 0>,\n'%((cx-lx)*0.01)) # scale factor 0.01
f.write(' <%f, 0, 0>,\n'%((cx+lx)*0.01)) # scale factor 0.01
f.write(' %f\n'%(cylinder_radius*0.01)) # scale factor 0.01
f.write(' pigment {\n')
f.write(' rgbf <.9,1,.9, .95>\n')
f.write(' }\n')
f.write(' finish {\n')
f.write(' ambient .2\n')
f.write(' diffuse .6\n')
f.write(' }\n')
f.write('}\n')
# write mobile particles
if flag_bonds == 0:
# no bond
if nc.flag_q != 0:
# with quaternion
stokes.stokes_nc_get_data (nc, "q", i, q)
for j in range(nc.np):
x = pos[j*3]
y = pos[j*3+1]
z = pos[j*3+2]
if a != []:
rad = a[j]
else:
rad = 1.0
if flag_ball == 0:
write_pov_particle_Q (f, x, y, z, rad,\
[q[j*4+0],q[j*4+1],\
q[j*4+2],q[j*4+3]])
else:
write_pov_particle_Balls_Q (f, x, y, z, rad,\
[q[j*4+0],q[j*4+1],\
q[j*4+2],q[j*4+3]],\
j)
else:
# no quaternion
for j in range(nc.np):
x = pos[j*3]
y = pos[j*3+1]
z = pos[j*3+2]
if a != []:
write_pov_particle (f, x, y, z, a[j])
else:
write_pov_particle (f, x, y, z, 1.0)
else:
# bond
write_pov_particles_with_bonds (f, nc.np, pos, a, bond_radius)
# write fixed particles
for j in range(nc.npf):
x = xf0[j*3]
y = xf0[j*3+1]
z = xf0[j*3+2]
if af != []:
write_pov_particle_fixed (f, x, y, z, af[j])
else:
write_pov_particle_fixed (f, x, y, z, 1.0)
# done
f.close()
def main():
filename = ''
outfile = ''
flag_ball = 0
flag_bonds = 0
bond_radius = 0.0
bond_color = ''
sphere_radius = 0.0
cylinder_radius = 0.0
flag_bb = 0
camera_dist = 0.0
flag_lookat = 0
lk_x = 0.0
lk_y = 0.0
lk_z = 0.0
camera_dir = ''
step = -1
nsteps = 1
nm = 0
i = 1
while i < len(sys.argv):
if sys.argv[i] == '-f' or sys.argv[i] == '--file':
filename = sys.argv[i+1]
i += 2
elif sys.argv[i] == '-o' or sys.argv[i] == '--out':
outfile = sys.argv[i+1]
i += 2
elif sys.argv[i] == '-step':
step = int(sys.argv[i+1])
step -= 1
i += 2
elif sys.argv[i] == '-nsteps':
nsteps = int(sys.argv[i+1])
i += 2
elif sys.argv[i] == '-ball':
flag_ball = 1
i += 1
elif sys.argv[i] == '-bonds':
flag_bonds = 1
i += 1
elif sys.argv[i] == '-br':
bond_radius = float(sys.argv[i+1])
i += 2
elif sys.argv[i] == '-bc':
bond_color = sys.argv[i+1]
i += 2
elif sys.argv[i] == '-sphere':
sphere_radius = float(sys.argv[i+1])
i += 2
elif sys.argv[i] == '-cylinder':
cylinder_radius = float(sys.argv[i+1])
i += 2
elif sys.argv[i] == '-bbox':
flag_bb = 1
i += 1
elif sys.argv[i] == '-camera':
camera_dist = float(sys.argv[i+1])
i += 2
elif sys.argv[i] == '-top':
camera_dir = 'top'
i += 1
elif sys.argv[i] == '-bottom':
camera_dir = 'bottom'
i += 1
elif sys.argv[i] == '-lookat':
flag_lookat = 1
lk_x = float(sys.argv[i+1])
lk_y = float(sys.argv[i+2])
lk_z = float(sys.argv[i+3])
i += 4
else:
usage()
if filename == '': usage()
if outfile == '': outfile = 'test'
str_argv = ''
for i in range(len(sys.argv)):
str_argv += ' %s'%(sys.argv[i])
nc = stokes.stokes_nc_open (filename)
#stokes.stokes_nc_print_actives(nc, stokes.get_stdout())
lattice = stokes.darray(3)
stokes.stokes_nc_get_array1d (nc, 'l', lattice)
# x[] : center of particles
pos = stokes.darray(nc.np * nc.nvec)
# q[] : quaternion
if nc.flag_q != 0:
q = stokes.darray(nc.np * nc.nquat)
else:
q = []
# a[] : radius of mobile particles
if nc.flag_a != 0:
a = stokes.darray(nc.np)
stokes.stokes_nc_get_array1d (nc, "a", a)
else:
a = []
# af[] : radius of fixed particles
if nc.flag_af != 0:
af = stokes.darray(nc.npf)
stokes.stokes_nc_get_array1d (nc, "af", af)
else:
af = []
# xf0[]
if nc.npf > 0:
xf0 = stokes.darray(nc.npf * nc.nvec)
stokes.stokes_nc_get_data0 (nc, "xf0", xf0)
else:
xf0 = []
if lattice[0] != 0.0 or lattice[1] != 0.0 or lattice[2] != 0.0:
# periodic boundary
if lattice[0] > lattice[2]:
l = lattice[0]
else:
l = lattice[2]
#camera = [0.5 * lattice[0], -1.7*l, 0.5 * lattice[2]]
#camera = [0.5 * lattice[0], -1.1*l, 0.5 * lattice[2]]
#lookat = [0.5 * lattice[0], 0.0, 0.5 * lattice[2]]
camera = [0.5 * lattice[0], -0.8*l, 0.28 * lattice[2]]
lookat = [0.5 * lattice[0], 0.0, 0.3 * lattice[2]]
# extract the config at the step
if step >= 0:
if step > nc.ntime:
print 'out of the range %d <= %d'%(step, nc.ntime)
sys.exit(1)
render_one_step(str_argv, outfile, step, nc,
pos, a, q, xf0, af, lattice,
flag_ball, flag_bonds, bond_radius, bond_color,
flag_lookat, (lk_x, lk_y, lk_z),
camera_dist, camera_dir,
sphere_radius, cylinder_radius, flag_bb)
else:
nloop = nc.ntime / nsteps
for i in range(nloop):
ii = i * nsteps
print '%d step'%(ii)
render_one_step(str_argv, outfile, ii, nc,
pos, a, q, xf0, af, lattice,
flag_ball, flag_bonds, bond_radius, bond_color,
flag_lookat, (lk_x, lk_y, lk_z),
camera_dist, camera_dir,
sphere_radius, cylinder_radius, flag_bb)
if __name__ == "__main__":
main()
| gpl-2.0 | 2,668,609,146,056,144,000 | 30.730627 | 79 | 0.43571 | false |
Tanmay28/coala | coalib/output/dbus/BuildDbusService.py | 1 | 1436 | from distutils.core import Command
from distutils.errors import DistutilsOptionError
from coalib.misc.Constants import Constants
class BuildDbusService(Command):
"""
Add a `build_dbus` command to your setup.py.
To use this Command class add a command to call this class::
# For setuptools
setup(
entry_points={
"distutils.commands": [
"build_dbus = "
"coalib.misc.BuildDbusService:BuildDbusService"
]
}
)
# For distutils
from coalib.misc.BuildDbusService import BuildDbusService
setup(
cmdclass={'build_dbus': BuildDbusService}
)
You can then use the following setup command to produce a dbus service::
$ python setup.py build_dbus
"""
user_options = [('output=', 'O', 'output file')]
def initialize_options(self):
self.output = None
def finalize_options(self):
if self.output is None:
raise DistutilsOptionError('\'output\' option is required')
self.announce('Writing dbus service %s' % self.output)
def run(self):
dist = self.distribution
dbus_service = ("[D-BUS Service]\n"
"Names=" + Constants.BUS_NAME + "\n"
"Exec=coala-dbus")
with open(self.output, 'w') as f:
f.write(dbus_service)
| agpl-3.0 | 1,617,566,290,701,137,700 | 27.72 | 76 | 0.571727 | false |
jorgebaier/iic1103-s4-2016 | clase0830/suma_digitos_primos.py | 1 | 1132 | import math
def suma_digitos(numero):
suma = 0
while numero > 0:
suma = suma + numero%10
numero = numero // 10
return suma
def esPrimo(n):
i = 1
divisores = 0
while i <= n:
if n%i == 0:
divisores = divisores + 1
i = i + 1
return divisores == 2
def esPrimo2(n):
i = 2
if n < 2:
return False
while i < n:
if n%i == 0:
return False
i = i + 1
return True
def esPrimo3(n):
i = 2
if n < 2:
return False
while i <= math.sqrt(n):
if n%i == 0:
return False
i = i + 1
return True
def esPrimo4(n):
if n < 2:
return False
elif n == 2:
return True
if n%2 == 0:
return False
i = 3
while i <= math.sqrt(n):
if n%i == 0:
return False
i = i + 2
return True
limite = int(input("cuantos numeros quieres? "))
numero = 0
contador = 0
while contador < limite:
suma = suma_digitos(numero)
if esPrimo3(suma):
print(numero)
contador = contador + 1
numero = numero + 1
| unlicense | -9,141,152,381,898,231,000 | 16.6875 | 48 | 0.484982 | false |
nsfmc/swatch | swatch/writer.py | 1 | 4463 | # encoding: utf-8
"""
swatch, a parser for adobe swatch exchange files
Copyright (c) 2014 Marcos A Ojeda http://generic.cx/
With notes from
http://iamacamera.org/default.aspx?id=109 and
http://www.colourlovers.com/ase.phps
All Rights Reserved
MIT Licensed, see LICENSE.TXT for details
"""
import logging
import struct
import os
def chunk_count(swatch):
"""return the number of byte-chunks in a swatch object
this recursively walks the swatch list, returning 1 for a single color &
returns 2 for each folder plus 1 for each color it contains
"""
if type(swatch) is dict:
if 'data' in swatch:
return 1
if 'swatches' in swatch:
return 2 + len(swatch['swatches'])
else:
return sum(map(chunk_count, swatch))
def chunk_for_object(obj):
type = obj.get('type')
if type == 'Color Group':
return chunk_for_folder(obj)
if type in ['Process', 'Spot', 'Global']:
return chunk_for_color(obj)
def chunk_for_color(obj):
"""builds up a byte-chunk for a color
the format for this is
b'\x00\x01' +
Big-Endian Unsigned Int == len(bytes that follow in this block)
• Big-Endian Unsigned Short == len(color_name)
in practice, because utf-16 takes up 2 bytes per letter
this will be 2 * (len(name) + 1)
so a color named 'foo' would be 8 bytes long
• UTF-16BE Encoded color_name terminated with '\0'
using 'foo', this yields '\x00f\x00o\x00o\x00\x00'
• A 4-byte char for Color mode ('RGB ', 'Gray', 'CMYK', 'LAB ')
note the trailing spaces
• a variable-length number of 4-byte length floats
this depends entirely on the color mode of the color.
• A Big-Endian short int for either a global, spot, or process color
global == 0, spot == 1, process == 2
the chunk has no terminating string although other sites have indicated
that the global/spot/process short is a terminator, it's actually used
to indicate how illustrator should deal with the color.
"""
title = obj['name'] + '\0'
title_length = len(title)
chunk = struct.pack('>H', title_length)
chunk += title.encode('utf-16be')
mode = obj['data']['mode'].encode()
values = obj['data']['values']
color_type = obj['type']
fmt = {b'RGB': '!fff', b'Gray': '!f', b'CMYK': '!ffff', b'LAB': '!fff'}
if mode in fmt:
padded_mode = mode.decode().ljust(4).encode()
chunk += struct.pack('!4s', padded_mode) # the color mode
chunk += struct.pack(fmt[mode], *values) # the color values
color_types = ['Global', 'Spot', 'Process']
if color_type in color_types:
color_int = color_types.index(color_type)
chunk += struct.pack('>h', color_int) # append swatch mode
chunk = struct.pack('>I', len(chunk)) + chunk # prepend the chunk size
return b'\x00\x01' + chunk # swatch color header
def chunk_for_folder(obj):
"""produce a byte-chunk for a folder of colors
the structure is very similar to a color's data:
• Header
b'\xC0\x01' +
Big Endian Unsigned Int == len(Bytes in the Header Block)
note _only_ the header, this doesn't include the length of color data
• Big Endian Unsigned Short == len(Folder Name + '\0')
Note that Folder Name is assumed to be utf-16be so this
will always be an even number
• Folder Name + '\0', encoded UTF-16BE
• body
chunks for each color, see chunk_for_color
• folder terminator
b'\xC0\x02' +
b'\x00\x00\x00\x00'
Perhaps the four null bytes represent something, but i'm pretty sure
they're just a terminating string, but there's something nice about
how the b'\xC0\x02' matches with the folder's header
"""
title = obj['name'] + '\0'
title_length = len(title)
chunk_body = struct.pack('>H', title_length) # title length
chunk_body += title.encode('utf-16be') # title
chunk_head = b'\xC0\x01' # folder header
chunk_head += struct.pack('>I', len(chunk_body))
# precede entire chunk by folder header and size of folder
chunk = chunk_head + chunk_body
chunk += b''.join([chunk_for_color(c) for c in obj['swatches']])
chunk += b'\xC0\x02' # folder terminator chunk
chunk += b'\x00\x00\x00\x00' # folder terminator
return chunk
| mit | -4,765,099,113,358,125,000 | 35.719008 | 79 | 0.622327 | false |
amenonsen/ansible | lib/ansible/modules/network/fortios/fortios_system_replacemsg_nac_quar.py | 1 | 10019 | #!/usr/bin/python
from __future__ import (absolute_import, division, print_function)
# Copyright 2019 Fortinet, Inc.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
__metaclass__ = type
ANSIBLE_METADATA = {'status': ['preview'],
'supported_by': 'community',
'metadata_version': '1.1'}
DOCUMENTATION = '''
---
module: fortios_system_replacemsg_nac_quar
short_description: Replacement messages in Fortinet's FortiOS and FortiGate.
description:
- This module is able to configure a FortiGate or FortiOS device by allowing the
user to set and modify system_replacemsg feature and nac_quar category.
Examples include all parameters and values need to be adjusted to datasources before usage.
Tested with FOS v6.0.5
version_added: "2.9"
author:
- Miguel Angel Munoz (@mamunozgonzalez)
- Nicolas Thomas (@thomnico)
notes:
- Requires fortiosapi library developed by Fortinet
- Run as a local_action in your playbook
requirements:
- fortiosapi>=0.9.8
options:
host:
description:
- FortiOS or FortiGate IP address.
type: str
required: false
username:
description:
- FortiOS or FortiGate username.
type: str
required: false
password:
description:
- FortiOS or FortiGate password.
type: str
default: ""
vdom:
description:
- Virtual domain, among those defined previously. A vdom is a
virtual instance of the FortiGate that can be configured and
used as a different unit.
type: str
default: root
https:
description:
- Indicates if the requests towards FortiGate must use HTTPS protocol.
type: bool
default: true
ssl_verify:
description:
- Ensures FortiGate certificate must be verified by a proper CA.
type: bool
default: true
state:
description:
- Indicates whether to create or remove the object.
type: str
choices:
- present
- absent
system_replacemsg_nac_quar:
description:
- Replacement messages.
default: null
type: dict
suboptions:
buffer:
description:
- Message string.
type: str
format:
description:
- Format flag.
type: str
choices:
- none
- text
- html
- wml
header:
description:
- Header flag.
type: str
choices:
- none
- http
- 8bit
msg_type:
description:
- Message type.
type: str
'''
EXAMPLES = '''
- hosts: localhost
vars:
host: "192.168.122.40"
username: "admin"
password: ""
vdom: "root"
ssl_verify: "False"
tasks:
- name: Replacement messages.
fortios_system_replacemsg_nac_quar:
host: "{{ host }}"
username: "{{ username }}"
password: "{{ password }}"
vdom: "{{ vdom }}"
https: "False"
state: "present"
system_replacemsg_nac_quar:
buffer: "<your_own_value>"
format: "none"
header: "none"
msg_type: "<your_own_value>"
'''
RETURN = '''
build:
description: Build number of the fortigate image
returned: always
type: str
sample: '1547'
http_method:
description: Last method used to provision the content into FortiGate
returned: always
type: str
sample: 'PUT'
http_status:
description: Last result given by FortiGate on last operation applied
returned: always
type: str
sample: "200"
mkey:
description: Master key (id) used in the last call to FortiGate
returned: success
type: str
sample: "id"
name:
description: Name of the table used to fulfill the request
returned: always
type: str
sample: "urlfilter"
path:
description: Path of the table used to fulfill the request
returned: always
type: str
sample: "webfilter"
revision:
description: Internal revision number
returned: always
type: str
sample: "17.0.2.10658"
serial:
description: Serial number of the unit
returned: always
type: str
sample: "FGVMEVYYQT3AB5352"
status:
description: Indication of the operation's result
returned: always
type: str
sample: "success"
vdom:
description: Virtual domain used
returned: always
type: str
sample: "root"
version:
description: Version of the FortiGate
returned: always
type: str
sample: "v5.6.3"
'''
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.connection import Connection
from ansible.module_utils.network.fortios.fortios import FortiOSHandler
from ansible.module_utils.network.fortimanager.common import FAIL_SOCKET_MSG
def login(data, fos):
host = data['host']
username = data['username']
password = data['password']
ssl_verify = data['ssl_verify']
fos.debug('on')
if 'https' in data and not data['https']:
fos.https('off')
else:
fos.https('on')
fos.login(host, username, password, verify=ssl_verify)
def filter_system_replacemsg_nac_quar_data(json):
option_list = ['buffer', 'format', 'header',
'msg_type']
dictionary = {}
for attribute in option_list:
if attribute in json and json[attribute] is not None:
dictionary[attribute] = json[attribute]
return dictionary
def underscore_to_hyphen(data):
if isinstance(data, list):
for elem in data:
elem = underscore_to_hyphen(elem)
elif isinstance(data, dict):
new_data = {}
for k, v in data.items():
new_data[k.replace('_', '-')] = underscore_to_hyphen(v)
data = new_data
return data
def system_replacemsg_nac_quar(data, fos):
vdom = data['vdom']
state = data['state']
system_replacemsg_nac_quar_data = data['system_replacemsg_nac_quar']
filtered_data = underscore_to_hyphen(filter_system_replacemsg_nac_quar_data(system_replacemsg_nac_quar_data))
if state == "present":
return fos.set('system.replacemsg',
'nac-quar',
data=filtered_data,
vdom=vdom)
elif state == "absent":
return fos.delete('system.replacemsg',
'nac-quar',
mkey=filtered_data['msg-type'],
vdom=vdom)
def is_successful_status(status):
return status['status'] == "success" or \
status['http_method'] == "DELETE" and status['http_status'] == 404
def fortios_system_replacemsg(data, fos):
if data['system_replacemsg_nac_quar']:
resp = system_replacemsg_nac_quar(data, fos)
return not is_successful_status(resp), \
resp['status'] == "success", \
resp
def main():
fields = {
"host": {"required": False, "type": "str"},
"username": {"required": False, "type": "str"},
"password": {"required": False, "type": "str", "no_log": True},
"vdom": {"required": False, "type": "str", "default": "root"},
"https": {"required": False, "type": "bool", "default": True},
"ssl_verify": {"required": False, "type": "bool", "default": True},
"state": {"required": True, "type": "str",
"choices": ["present", "absent"]},
"system_replacemsg_nac_quar": {
"required": False, "type": "dict", "default": None,
"options": {
"buffer": {"required": False, "type": "str"},
"format": {"required": False, "type": "str",
"choices": ["none", "text", "html",
"wml"]},
"header": {"required": False, "type": "str",
"choices": ["none", "http", "8bit"]},
"msg_type": {"required": False, "type": "str"}
}
}
}
module = AnsibleModule(argument_spec=fields,
supports_check_mode=False)
legacy_mode = 'host' in module.params and module.params['host'] is not None and \
'username' in module.params and module.params['username'] is not None and \
'password' in module.params and module.params['password'] is not None
if not legacy_mode:
if module._socket_path:
connection = Connection(module._socket_path)
fos = FortiOSHandler(connection)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
else:
module.fail_json(**FAIL_SOCKET_MSG)
else:
try:
from fortiosapi import FortiOSAPI
except ImportError:
module.fail_json(msg="fortiosapi module is required")
fos = FortiOSAPI()
login(module.params, fos)
is_error, has_changed, result = fortios_system_replacemsg(module.params, fos)
fos.logout()
if not is_error:
module.exit_json(changed=has_changed, meta=result)
else:
module.fail_json(msg="Error in repo", meta=result)
if __name__ == '__main__':
main()
| gpl-3.0 | -8,324,616,875,064,963,000 | 28.72997 | 113 | 0.584489 | false |
SCSSoftware/BlenderTools | addon/io_scs_tools/internals/containers/sii.py | 1 | 7631 | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Copyright (C) 2013-2017: SCS Software
import os
from io_scs_tools.utils import path as _path_utils
from io_scs_tools.utils.printout import lprint
from io_scs_tools.internals.containers.parsers import sii as _sii_reader
from io_scs_tools.internals.containers.writers import sii as _sii_writer
def get_data_from_file(filepath, is_sui=False):
"""Returns entire data in data container from specified SII definition file.
:param filepath: absolute file path where SII should be read from
:type filepath: str
:param is_sui: True if file should be read as SUI, in that case only one unit will be returned
:type is_sui: bool
:return: list of SII Units if parsing succeded; otherwise None
:rtype: list[io_scs_tools.internals.structure.UnitData] | None
"""
container = None
if filepath:
if os.path.isfile(filepath):
container = _sii_reader.parse_file(filepath, is_sui=is_sui)
if container:
if len(container) < 1:
lprint('D SII file "%s" is empty!', (_path_utils.readable_norm(filepath),))
return None
else:
lprint('D SII file "%s" is empty!', (_path_utils.readable_norm(filepath),))
return None
else:
lprint('W Invalid SII file path %r!', (_path_utils.readable_norm(filepath),))
else:
lprint('I No SII file path provided!')
return container
def write_data_to_file(filepath, container, is_sui=False, create_dirs=False):
"""Write given unit data container into SII file.
:param filepath: absolute file path where SII should be written to
:type filepath: str
:param container: iterable of unit data objects to be written
:type container: tuple[io_scs_tools.internals.structure.UnitData]|list[io_scs_tools.internals.structure.UnitData]
:param is_sui: True if unit should be written as SUI, meaning without SiiNunit header
:type is_sui: bool
:param create_dirs: True if directories should be created before export
:type create_dirs: bool
:return: True if container was successfully written; otherwise False
:rtype: bool
"""
file_type = "SUI" if is_sui else "SII"
if filepath:
if container:
return _sii_writer.write_data(filepath, container, is_sui=is_sui, create_dirs=create_dirs)
else:
lprint("W Empty %s container, abort file write: %r!", (file_type, _path_utils.readable_norm(filepath),))
else:
lprint('I No %s file path provided!', (file_type,))
return False
def has_valid_unit_instance(container, unit_type, req_props=tuple(), one_of_props=tuple(), unit_instance=0):
"""Valides unit instance with given unit type, required properties and one of properties lists.
:param container: container as list of unit instances
:type container: list[io_scs_tools.internals.structure.UnitData]
:param unit_type: type of the unit we are validating represented in string
:type unit_type: str
:param req_props: required properties that has to be inside unit instance to be valid
:type req_props: iterable
:param one_of_props: one of properties from this list has to be inside unit instance to be valid
:type one_of_props: iterable
:param unit_instance: index of unit instance in container list that we are validating
:type unit_instance: int
:return: True if valid; False otherwise
:rtype: bool
"""
if container is None:
lprint("D Validation failed: None SII container!")
return False
# there should be only one unit instance inside file
if len(container) < unit_instance + 1:
lprint("D Validation failed: Not enough unit instances!")
return False
# invalid unit type
if unit_type != "" and container[unit_instance].type != unit_type:
lprint("D Validation failed: Invalid unit instance type!")
return False
for prop in req_props:
if prop not in container[unit_instance].props:
lprint("D Validation failed: Required prop %r not found!", (prop,))
return False
one_of_props_found = False
for prop in one_of_props:
if prop in container[unit_instance].props:
one_of_props_found = True
break
if not one_of_props_found and len(one_of_props) > 0:
lprint("D Validation failed: None property found from one of: %r!", (one_of_props,))
return False
return True
def get_unit_property(container, prop, unit_instance=0):
"""Gets property value from unit instance.
NOTE: No validation is done if unit instance exists in container,
so make sure to use validation function before.
:param container: container as list of unit instances
:type container: list[io_scs_tools.internals.structure.UnitData]
:param prop: name of the property we are looking for
:type prop: str
:param unit_instance: index of unit instance in container list that we are validating
:type unit_instance: int
:return: None if property is not found insde unit instance; otherwise value of the property
:rtype: None|any
"""
value = None
if prop in container[unit_instance].props:
value = container[unit_instance].props[prop]
return value
def get_direct_unit_property(unit, prop):
"""Gets property value from unit instance.
NOTE: No validation is done if unit instance exists in container,
so make sure to use validation function before.
:param unit: container as list of unit instances
:type unit: io_scs_tools.internals.structure.UnitData
:param prop: name of the property we are looking for
:type prop: str
:return: None if property is not found insde unit instance; otherwise value of the property
:rtype: None|any
"""
value = None
if prop in unit.props:
value = unit.props[prop]
return value
def get_unit_by_id(container, unit_id, unit_type):
"""Gets first found unit instance from container with given id and type.
:param container: container as list of unit instances
:type container: list[io_scs_tools.internals.structure.UnitData]
:param unit_id: id of the unit we are searching for eg ".truck.cabin"
:type unit_id: str
:param unit_type: type of the unit representing it's class name we are searching for
:type unit_type: str
:return: None if unit is not found; otherwise unit data representation of it's content
:rtype: None|io_scs_tools.internals.structure.UnitData
"""
unit = None
for unit_instance in range(0, len(container)):
if container[unit_instance].type != unit_type:
continue
if container[unit_instance].id != unit_id:
continue
unit = container[unit_instance]
break
return unit
| gpl-2.0 | -3,206,173,954,677,148,700 | 36.406863 | 117 | 0.68261 | false |
shoopio/shoop | shuup_tests/front/test_middleware.py | 2 | 5530 | # This file is part of Shuup.
#
# Copyright (c) 2012-2019, Shoop Commerce Ltd. All rights reserved.
#
# This source code is licensed under the OSL-3.0 license found in the
# LICENSE file in the root directory of this source tree.
import pytest
from django.conf import settings
from django.contrib.auth import logout
from django.contrib.auth.models import AnonymousUser
from django.utils import timezone
import shuup.core.models
from shuup.admin.urls import login
from shuup.core.models import (
AnonymousContact, CompanyContact, Contact, get_company_contact,
get_person_contact, PersonContact, Shop
)
from shuup.front.middleware import ShuupFrontMiddleware
from shuup.front.views.index import IndexView
from shuup.testing.factories import create_random_company, get_default_shop
from shuup.testing.utils import apply_request_middleware
from shuup_tests.utils.fixtures import regular_user
from .fixtures import get_request
__all__ = ("regular_user",) # noqa
def get_unprocessed_request():
request = get_request()
for attrname in ['shop', 'person', 'customer', 'basket']:
assert not hasattr(request, attrname)
return request
def check_request_attribute_basics(request):
assert isinstance(request.shop, Shop)
assert isinstance(request.person, Contact)
assert isinstance(request.customer, Contact)
assert isinstance(request.basket, shuup.front.basket.objects.BaseBasket)
# TODO: Make these tests faster by faking the Shop and not using database
@pytest.mark.django_db
def test_with_anonymous_user():
get_default_shop() # Create a shop
mw = ShuupFrontMiddleware()
request = get_unprocessed_request()
mw.process_request(request)
check_request_attribute_basics(request)
assert isinstance(request.person, AnonymousContact)
assert isinstance(request.customer, AnonymousContact)
assert request.person == request.customer
@pytest.mark.django_db
def test_with_logged_in_user(regular_user):
get_default_shop() # Create a shop
mw = ShuupFrontMiddleware()
request = get_unprocessed_request()
request.user = regular_user
mw.process_request(request)
check_request_attribute_basics(request)
assert isinstance(request.person, PersonContact)
assert isinstance(request.customer, PersonContact)
assert request.person == request.customer
@pytest.mark.django_db
def test_customer_company_member(regular_user):
get_default_shop() # Create a shop
mw = ShuupFrontMiddleware()
request = get_unprocessed_request()
request.user = regular_user
person = get_person_contact(regular_user)
company = create_random_company()
company.members.add(person)
assert get_company_contact(regular_user) == company
mw.process_request(request)
check_request_attribute_basics(request)
assert isinstance(request.person, PersonContact)
assert isinstance(request.customer, CompanyContact)
company = get_company_contact(request.user)
assert company and (company == request.customer)
@pytest.mark.django_db
def test_timezone_setting(regular_user):
get_default_shop() # Create a shop
mw = ShuupFrontMiddleware()
request = get_unprocessed_request()
request.user = regular_user
some_tz = ('US/Hawaii' if settings.TIME_ZONE == 'UTC' else 'UTC')
person = get_person_contact(regular_user)
person.timezone = some_tz
person.save()
original_tz = timezone.get_current_timezone_name()
assert timezone.get_current_timezone_name() != some_tz
mw.process_request(request)
assert timezone.get_current_timezone_name() == some_tz
timezone.activate(original_tz)
@pytest.mark.django_db
def test_intra_request_user_changing(rf, regular_user):
get_default_shop() # Create a shop
mw = ShuupFrontMiddleware()
request = apply_request_middleware(rf.get("/"), user=regular_user)
mw.process_request(request)
assert request.person == get_person_contact(regular_user)
logout(request)
assert request.user == AnonymousUser()
assert request.person == AnonymousContact()
assert request.customer == AnonymousContact()
@pytest.mark.django_db
def test_maintenance_mode(rf, regular_user, admin_user):
shop = get_default_shop()
shop.maintenance_mode = True
shop.save()
mw = ShuupFrontMiddleware()
request = apply_request_middleware(rf.get("/"), user=regular_user)
maintenance_response = mw.process_view(request, IndexView)
assert maintenance_response is not None
assert maintenance_response.status_code == 503
assert mw._get_maintenance_response(request, IndexView).content == maintenance_response.content
login_response = mw.process_view(request, login)
assert login_response is None
request = apply_request_middleware(rf.get("/"), user=admin_user)
admin_response = mw.process_view(request, IndexView)
assert admin_response is None
shop.maintenance_mode = False
shop.save()
@pytest.mark.django_db
def test_with_inactive_contact(rf, regular_user, admin_user):
get_default_shop() # Create a shop
# Get or create contact for regular user
contact = get_person_contact(regular_user)
assert contact.is_active
contact.is_active = False
contact.save()
request = apply_request_middleware(rf.get("/"), user=regular_user)
mw = ShuupFrontMiddleware()
mw.process_request(request)
assert request.user == AnonymousUser()
assert request.person == AnonymousContact()
assert request.customer == AnonymousContact()
| agpl-3.0 | -1,290,846,705,797,852,200 | 29.722222 | 99 | 0.730561 | false |
hmpf/nav | python/nav/event2.py | 2 | 3807 | #
# Copyright (C) 2015 Uninett AS
#
# This file is part of Network Administration Visualized (NAV).
#
# NAV is free software: you can redistribute it and/or modify it under
# the terms of the GNU General Public License version 3 as published by
# the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details. You should have received a copy of the GNU General Public License
# along with NAV. If not, see <http://www.gnu.org/licenses/>.
#
"""
Next generation event factory functionality for NAV, based on the Django ORM
models from nav.models.event.
"""
from __future__ import absolute_import
from django.utils import six
from nav.models.event import EventQueue
class EventFactory(object):
"""A factory for NAV event dispatching"""
def __init__(self, source, target, event_type,
start_type=None, end_type=None):
"""
Initialize a template for event generation.
:param source: An event source string (e.g. 'ipdevpoll')
:param target: An event target string (e.g. 'eventEngine')
:param event_type: An event type name.
:param start_type: An optional start alert type hint for eventengine
:param end_type: An optional end alert type hint for eventengine
"""
self.source = source
self.target = target
self.event_type = event_type
self.start_type = start_type
self.end_type = end_type
def base(self, device=None, netbox=None, subid='', varmap=None,
alert_type=None):
"""Creates and returns an event base template
:param device: A nav.models.manage.Device object or primary key.
:param netbox: A nav.models.manage.Netbox object or primary key.
:param subid: A subid string, if applicable.
:param varmap: A dictionary of arbitrary event variables to attach.
:param alert_type: An option alert type hint for eventEngine; useful
for cases where eventEngine has no specific plugin.
:return:
"""
event = EventQueue()
event.source_id = self.source
event.target_id = self.target
event.event_type_id = self.event_type
if isinstance(device, int):
event.device_id = device
else:
event.device = device
if isinstance(netbox, int):
event.netbox_id = netbox
else:
event.netbox = netbox
event.subid = six.text_type(subid)
var = dict(varmap or {})
if alert_type:
var['alerttype'] = alert_type
event.varmap = var
return event
def start(self, device=None, netbox=None, subid='', varmap=None,
alert_type=None):
"""Creates and returns a start event"""
event = self.base(device, netbox, subid, varmap,
alert_type or self.start_type)
event.state = event.STATE_START
return event
def end(self, device=None, netbox=None, subid='', varmap=None,
alert_type=None):
"""Creates and returns an end event"""
event = self.base(device, netbox, subid, varmap,
alert_type or self.end_type)
event.state = event.STATE_END
return event
def notify(self, device=None, netbox=None, subid='', varmap=None,
alert_type=None):
"""Creates and returns a stateless event"""
event = self.base(device, netbox, subid, varmap,
alert_type or self.start_type)
event.event_type = event.STATE_STATELESS
return event
| gpl-3.0 | -7,836,915,006,604,692,000 | 35.605769 | 79 | 0.628054 | false |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.