blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
96a31bd87d182e38af66c9502dda52cbddd18184 | 9405aa570ede31a9b11ce07c0da69a2c73ab0570 | /aliyun-python-sdk-ons/aliyunsdkons/request/v20190214/OnsMessagePageQueryByTopicRequest.py | 18afe3920e90c8d8b1f8495a693430691275ac77 | [
"Apache-2.0"
]
| permissive | liumihust/aliyun-openapi-python-sdk | 7fa3f5b7ea5177a9dbffc99e73cf9f00e640b72b | c7b5dd4befae4b9c59181654289f9272531207ef | refs/heads/master | 2020-09-25T12:10:14.245354 | 2019-12-04T14:43:27 | 2019-12-04T14:43:27 | 226,002,339 | 1 | 0 | NOASSERTION | 2019-12-05T02:50:35 | 2019-12-05T02:50:34 | null | UTF-8 | Python | false | false | 2,300 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
#
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from aliyunsdkcore.request import RpcRequest
class OnsMessagePageQueryByTopicRequest(RpcRequest):
def __init__(self):
RpcRequest.__init__(self, 'Ons', '2019-02-14', 'OnsMessagePageQueryByTopic','ons')
def get_PreventCache(self):
return self.get_query_params().get('PreventCache')
def set_PreventCache(self,PreventCache):
self.add_query_param('PreventCache',PreventCache)
def get_InstanceId(self):
return self.get_query_params().get('InstanceId')
def set_InstanceId(self,InstanceId):
self.add_query_param('InstanceId',InstanceId)
def get_PageSize(self):
return self.get_query_params().get('PageSize')
def set_PageSize(self,PageSize):
self.add_query_param('PageSize',PageSize)
def get_Topic(self):
return self.get_query_params().get('Topic')
def set_Topic(self,Topic):
self.add_query_param('Topic',Topic)
def get_EndTime(self):
return self.get_query_params().get('EndTime')
def set_EndTime(self,EndTime):
self.add_query_param('EndTime',EndTime)
def get_BeginTime(self):
return self.get_query_params().get('BeginTime')
def set_BeginTime(self,BeginTime):
self.add_query_param('BeginTime',BeginTime)
def get_CurrentPage(self):
return self.get_query_params().get('CurrentPage')
def set_CurrentPage(self,CurrentPage):
self.add_query_param('CurrentPage',CurrentPage)
def get_TaskId(self):
return self.get_query_params().get('TaskId')
def set_TaskId(self,TaskId):
self.add_query_param('TaskId',TaskId) | [
"[email protected]"
]
| |
3055dd516e30c7c07aa5907cd892549bbcf990a3 | 1fccf52e0a694ec03aac55e42795487a69ef1bd4 | /src/euler_python_package/euler_python/medium/p194.py | 8d5449446e6261f855f92632dbc27e892a2191d5 | [
"MIT"
]
| permissive | wilsonify/euler | 3b7e742b520ee3980e54e523a018cd77f7246123 | 5214b776175e6d76a7c6d8915d0e062d189d9b79 | refs/heads/master | 2020-05-27T12:15:50.417469 | 2019-09-14T22:42:35 | 2019-09-14T22:42:35 | 188,614,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27 | py | def problem194():
pass
| [
"[email protected]"
]
| |
967b041df488aabe60dd198a880e7b38e873786f | 163bbb4e0920dedd5941e3edfb2d8706ba75627d | /Code/CodeRecords/2524/60618/287300.py | 48f5b7e392369d39af807d2dddd3b1123248d6db | []
| no_license | AdamZhouSE/pythonHomework | a25c120b03a158d60aaa9fdc5fb203b1bb377a19 | ffc5606817a666aa6241cfab27364326f5c066ff | refs/heads/master | 2022-11-24T08:05:22.122011 | 2020-07-28T16:21:24 | 2020-07-28T16:21:24 | 259,576,640 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 16 | py | print("1 3 2 4") | [
"[email protected]"
]
| |
31f6289142f123b7aa1f3408d8c68b0a4c08744b | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/cosmos/azure-mgmt-cosmosdb/azure/mgmt/cosmosdb/operations/_private_endpoint_connections_operations.py | 4dae2b4662729d3e2714e093eb7ff038afd17b81 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 34,172 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from io import IOBase
from typing import Any, Callable, Dict, IO, Iterable, Optional, TypeVar, Union, cast, overload
import urllib.parse
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
from .._serialization import Serializer
from .._vendor import _convert_request, _format_url_section
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
_SERIALIZER = Serializer()
_SERIALIZER.client_side_validation = False
def build_list_by_database_account_request(
resource_group_name: str, account_name: str, subscription_id: str, **kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"accountName": _SERIALIZER.url(
"account_name", account_name, "str", max_length=50, min_length=3, pattern=r"^[a-z0-9]+(-[a-z0-9]+)*"
),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_get_request(
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"accountName": _SERIALIZER.url(
"account_name", account_name, "str", max_length=50, min_length=3, pattern=r"^[a-z0-9]+(-[a-z0-9]+)*"
),
"privateEndpointConnectionName": _SERIALIZER.url(
"private_endpoint_connection_name", private_endpoint_connection_name, "str"
),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="GET", url=_url, params=_params, headers=_headers, **kwargs)
def build_create_or_update_request(
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-03-15-preview"))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"accountName": _SERIALIZER.url(
"account_name", account_name, "str", max_length=50, min_length=3, pattern=r"^[a-z0-9]+(-[a-z0-9]+)*"
),
"privateEndpointConnectionName": _SERIALIZER.url(
"private_endpoint_connection_name", private_endpoint_connection_name, "str"
),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
if content_type is not None:
_headers["Content-Type"] = _SERIALIZER.header("content_type", content_type, "str")
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="PUT", url=_url, params=_params, headers=_headers, **kwargs)
def build_delete_request(
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
subscription_id: str,
**kwargs: Any
) -> HttpRequest:
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", "2023-03-15-preview"))
accept = _headers.pop("Accept", "application/json")
# Construct URL
_url = kwargs.pop(
"template_url",
"/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}",
) # pylint: disable=line-too-long
path_format_arguments = {
"subscriptionId": _SERIALIZER.url("subscription_id", subscription_id, "str", min_length=1),
"resourceGroupName": _SERIALIZER.url(
"resource_group_name", resource_group_name, "str", max_length=90, min_length=1
),
"accountName": _SERIALIZER.url(
"account_name", account_name, "str", max_length=50, min_length=3, pattern=r"^[a-z0-9]+(-[a-z0-9]+)*"
),
"privateEndpointConnectionName": _SERIALIZER.url(
"private_endpoint_connection_name", private_endpoint_connection_name, "str"
),
}
_url: str = _format_url_section(_url, **path_format_arguments) # type: ignore
# Construct parameters
_params["api-version"] = _SERIALIZER.query("api_version", api_version, "str")
# Construct headers
_headers["Accept"] = _SERIALIZER.header("accept", accept, "str")
return HttpRequest(method="DELETE", url=_url, params=_params, headers=_headers, **kwargs)
class PrivateEndpointConnectionsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.cosmosdb.CosmosDBManagementClient`'s
:attr:`private_endpoint_connections` attribute.
"""
models = _models
def __init__(self, *args, **kwargs):
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_database_account(
self, resource_group_name: str, account_name: str, **kwargs: Any
) -> Iterable["_models.PrivateEndpointConnection"]:
"""List all private endpoint connections on a Cosmos DB account.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either PrivateEndpointConnection or the result of
cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.cosmosdb.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnectionListResult] = kwargs.pop("cls", None)
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_database_account_request(
resource_group_name=resource_group_name,
account_name=account_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_database_account.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
else:
# make call to next link with the client's api-version
_parsed_next_link = urllib.parse.urlparse(next_link)
_next_request_params = case_insensitive_dict(
{
key: [urllib.parse.quote(v) for v in value]
for key, value in urllib.parse.parse_qs(_parsed_next_link.query).items()
}
)
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest(
"GET", urllib.parse.urljoin(next_link, _parsed_next_link.path), params=_next_request_params
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
request.method = "GET"
return request
def extract_data(pipeline_response):
deserialized = self._deserialize("PrivateEndpointConnectionListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem) # type: ignore
return None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(get_next, extract_data)
list_by_database_account.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections"
}
@distributed_trace
def get(
self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> _models.PrivateEndpointConnection:
"""Gets a private endpoint connection.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: PrivateEndpointConnection or the result of cls(response)
:rtype: ~azure.mgmt.cosmosdb.models.PrivateEndpointConnection
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
request = build_get_request(
resource_group_name=resource_group_name,
account_name=account_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
def _create_or_update_initial(
self,
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
parameters: Union[_models.PrivateEndpointConnection, IO],
**kwargs: Any
) -> Optional[_models.PrivateEndpointConnection]:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[Optional[_models.PrivateEndpointConnection]] = kwargs.pop("cls", None)
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(parameters, (IOBase, bytes)):
_content = parameters
else:
_json = self._serialize.body(parameters, "PrivateEndpointConnection")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
account_name=account_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self._create_or_update_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
@overload
def begin_create_or_update(
self,
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
parameters: _models.PrivateEndpointConnection,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.PrivateEndpointConnection]:
"""Approve or reject a private endpoint connection with a given name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
:param parameters: Required.
:type parameters: ~azure.mgmt.cosmosdb.models.PrivateEndpointConnection
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
def begin_create_or_update(
self,
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
parameters: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> LROPoller[_models.PrivateEndpointConnection]:
"""Approve or reject a private endpoint connection with a given name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
:param parameters: Required.
:type parameters: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace
def begin_create_or_update(
self,
resource_group_name: str,
account_name: str,
private_endpoint_connection_name: str,
parameters: Union[_models.PrivateEndpointConnection, IO],
**kwargs: Any
) -> LROPoller[_models.PrivateEndpointConnection]:
"""Approve or reject a private endpoint connection with a given name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
:param parameters: Is either a PrivateEndpointConnection type or a IO type. Required.
:type parameters: ~azure.mgmt.cosmosdb.models.PrivateEndpointConnection or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either PrivateEndpointConnection or the result
of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.cosmosdb.models.PrivateEndpointConnection]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
content_type: Optional[str] = kwargs.pop("content_type", _headers.pop("Content-Type", None))
cls: ClsType[_models.PrivateEndpointConnection] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
account_name=account_name,
private_endpoint_connection_name=private_endpoint_connection_name,
parameters=parameters,
api_version=api_version,
content_type=content_type,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize("PrivateEndpointConnection", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_create_or_update.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
def _delete_initial( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> None:
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
request = build_delete_request(
resource_group_name=resource_group_name,
account_name=account_name,
private_endpoint_connection_name=private_endpoint_connection_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self._delete_initial.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url)
_stream = False
pipeline_response: PipelineResponse = self._client._pipeline.run( # pylint: disable=protected-access
request, stream=_stream, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorResponse, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
@distributed_trace
def begin_delete(
self, resource_group_name: str, account_name: str, private_endpoint_connection_name: str, **kwargs: Any
) -> LROPoller[None]:
"""Deletes a private endpoint connection with a given name.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param account_name: Cosmos DB database account name. Required.
:type account_name: str
:param private_endpoint_connection_name: The name of the private endpoint connection. Required.
:type private_endpoint_connection_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling. Pass in False for this
operation to not poll, or pass in your own initialized polling object for a personal polling
strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no
Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version: str = kwargs.pop("api_version", _params.pop("api-version", self._config.api_version))
cls: ClsType[None] = kwargs.pop("cls", None)
polling: Union[bool, PollingMethod] = kwargs.pop("polling", True)
lro_delay = kwargs.pop("polling_interval", self._config.polling_interval)
cont_token: Optional[str] = kwargs.pop("continuation_token", None)
if cont_token is None:
raw_result = self._delete_initial( # type: ignore
resource_group_name=resource_group_name,
account_name=account_name,
private_endpoint_connection_name=private_endpoint_connection_name,
api_version=api_version,
cls=lambda x, y, z: x,
headers=_headers,
params=_params,
**kwargs
)
kwargs.pop("error_map", None)
def get_long_running_output(pipeline_response): # pylint: disable=inconsistent-return-statements
if cls:
return cls(pipeline_response, None, {})
if polling is True:
polling_method: PollingMethod = cast(PollingMethod, ARMPolling(lro_delay, **kwargs))
elif polling is False:
polling_method = cast(PollingMethod, NoPolling())
else:
polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output,
)
return LROPoller(self._client, raw_result, get_long_running_output, polling_method) # type: ignore
begin_delete.metadata = {
"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.DocumentDB/databaseAccounts/{accountName}/privateEndpointConnections/{privateEndpointConnectionName}"
}
| [
"[email protected]"
]
| |
5a68c04f899f47c89d7192b8ebdfe56a6a2f17e4 | 057d662a83ed85897e9906d72ea90fe5903dccc5 | /.PyCharmCE2019.2/system/python_stubs/cache/2b64a136264952b5fc77d4c27a781542cf8f659109a9e46ce1b22875cea43541/_cython_0_29_2.py | f3272e9cfb67ae70598f0930f6534bae4d67a51e | []
| no_license | Karishma00/AnsiblePractice | 19a4980b1f6cca7b251f2cbea3acf9803db6e016 | 932558d48869560a42ba5ba3fb72688696e1868a | refs/heads/master | 2020-08-05T00:05:31.679220 | 2019-10-04T13:07:29 | 2019-10-04T13:07:29 | 212,324,468 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,255 | py | # encoding: utf-8
# module _cython_0_29_2
# from /usr/lib/python3/dist-packages/brlapi.cpython-37m-x86_64-linux-gnu.so
# by generator 1.147
# no doc
# no imports
# Variables with simple values
__loader__ = None
__spec__ = None
# no functions
# classes
class cython_function_or_method(object):
def __call__(self, *args, **kwargs): # real signature unknown
""" Call self as a function. """
pass
def __get__(self, *args, **kwargs): # real signature unknown
""" Return an attribute of instance, which is of type owner. """
pass
def __init__(self, *args, **kwargs): # real signature unknown
pass
def __reduce__(self, *args, **kwargs): # real signature unknown
pass
def __repr__(self, *args, **kwargs): # real signature unknown
""" Return repr(self). """
pass
func_closure = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
func_code = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
func_defaults = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
func_dict = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
func_doc = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
func_globals = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
func_name = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__annotations__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__closure__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__code__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__defaults__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__globals__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__kwdefaults__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__self__ = property(lambda self: object(), lambda self, v: None, lambda self: None) # default
__dict__ = None # (!) real value is "mappingproxy({'__repr__': <slot wrapper '__repr__' of 'cython_function_or_method' objects>, '__call__': <slot wrapper '__call__' of 'cython_function_or_method' objects>, '__get__': <slot wrapper '__get__' of 'cython_function_or_method' objects>, '__reduce__': <method '__reduce__' of 'cython_function_or_method' objects>, '__module__': <member '__module__' of 'cython_function_or_method' objects>, 'func_doc': <attribute 'func_doc' of 'cython_function_or_method' objects>, '__doc__': <attribute '__doc__' of 'cython_function_or_method' objects>, 'func_name': <attribute 'func_name' of 'cython_function_or_method' objects>, '__name__': <attribute '__name__' of 'cython_function_or_method' objects>, '__qualname__': <attribute '__qualname__' of 'cython_function_or_method' objects>, '__self__': <attribute '__self__' of 'cython_function_or_method' objects>, 'func_dict': <attribute 'func_dict' of 'cython_function_or_method' objects>, '__dict__': <attribute '__dict__' of 'cython_function_or_method' objects>, 'func_globals': <attribute 'func_globals' of 'cython_function_or_method' objects>, '__globals__': <attribute '__globals__' of 'cython_function_or_method' objects>, 'func_closure': <attribute 'func_closure' of 'cython_function_or_method' objects>, '__closure__': <attribute '__closure__' of 'cython_function_or_method' objects>, 'func_code': <attribute 'func_code' of 'cython_function_or_method' objects>, '__code__': <attribute '__code__' of 'cython_function_or_method' objects>, 'func_defaults': <attribute 'func_defaults' of 'cython_function_or_method' objects>, '__defaults__': <attribute '__defaults__' of 'cython_function_or_method' objects>, '__kwdefaults__': <attribute '__kwdefaults__' of 'cython_function_or_method' objects>, '__annotations__': <attribute '__annotations__' of 'cython_function_or_method' objects>})"
__name__ = 'cython_function_or_method'
__qualname__ = 'cython_function_or_method'
| [
"[email protected]"
]
| |
8fbc50489eff50ed424d41fd9e73da22a933f129 | 4b17b361017740a4113ba358460293e55c9bee49 | /LAB02/04-CloudAlbum-XRAY/cloudalbum/model/models.py | f21c3b69306956a5492bd0f320264da3289f4e9d | [
"MIT"
]
| permissive | liks79/aws-chalice-migration-workshop | aa01fa5a585a548c8408ba7448d731deefbbbd18 | 5115117504a3e2b897dc8444be58de0e4e12586a | refs/heads/master | 2022-12-25T09:50:44.821495 | 2018-11-20T03:05:52 | 2018-11-20T03:05:52 | 139,544,736 | 6 | 8 | MIT | 2022-12-08T02:17:36 | 2018-07-03T07:26:11 | JavaScript | UTF-8 | Python | false | false | 3,505 | py | """
model.models.py
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
CloudAlbum is a sample application for TechSummit 2018 workshop.
:copyright: © 2018 by Sungshik Jou.
:license: BSD, see LICENSE for more details.
"""
from sqlalchemy import Float, DateTime, ForeignKey, Integer, String
from flask_login import UserMixin
from flask_sqlalchemy import SQLAlchemy
from cloudalbum import login
db = SQLAlchemy()
class User(UserMixin, db.Model):
"""
Database Model class for User table
"""
__tablename__ = 'User'
id = db.Column(Integer, primary_key=True)
username = db.Column(String(50), unique=False)
email = db.Column(String(50), unique=True)
password = db.Column(String(100), unique=False)
photos = db.relationship('Photo',
backref='user',
cascade='all, delete, delete-orphan')
def __init__(self, name, email, password):
self.username = name
self.email = email
self.password = password
def __repr__(self):
return '<%r %r %r>' % (self.__tablename__, self.username, self.email)
class Photo(db.Model):
"""
Database Model class for Photo table
"""
__tablename__ = 'Photo'
id = db.Column(Integer, primary_key=True)
user_id = db.Column(Integer, ForeignKey(User.id))
tags = db.Column(String(400), unique=False)
desc = db.Column(String(400), unique=False)
filename_orig = db.Column(String(400), unique=False)
filename = db.Column(String(400), unique=False)
filesize = db.Column(Integer, unique=False)
geotag_lat = db.Column(Float, unique=False)
geotag_lng = db.Column(Float, unique=False)
upload_date = db.Column(DateTime, unique=False)
taken_date = db.Column(DateTime, unique=False)
make = db.Column(String(400), unique=False)
model = db.Column(String(400), unique=False)
width = db.Column(String(400), unique=False)
height = db.Column(String(400), unique=False)
city = db.Column(String(400), unique=False)
nation = db.Column(String(400), unique=False)
address = db.Column(String(400), unique=False)
def __init__(self, user_id, tags, desc, filename_orig, filename, filesize, geotag_lat, geotag_lng, upload_date,
taken_date, make, model, width, height, city, nation, address):
"""Initialize"""
self.user_id = user_id
self.tags = tags
self.desc = desc
self.filename_orig = filename_orig
self.filename = filename
self.filesize = filesize
self.geotag_lat = geotag_lat
self.geotag_lng = geotag_lng
self.upload_date = upload_date
self.taken_date = taken_date
self.make = make
self.model = model
self.width = width
self.height = height
self.city = city
self.nation = nation
self.address = address
def __repr__(self):
"""print information"""
return '<%r %r %r>' % (self.__tablename__, self.user_id, self.upload_date)
@login.user_loader
def load_user(id):
"""
User information loader for authenticated user
:param id: user id
:return: user record from User table
"""
# user = User.query.get(int(id))
#
# minutes = conf['SESSION_TIMEOUT']
#
# if user.last_seen < (datetime.utcnow() - datetime.timedelta(minutes=minutes)):
# # Session has timed out
# return None
#
# return User.query.get(user)
return User.query.get(int(id)) | [
"[email protected]"
]
| |
2e12d79fa9ad4afdc9d45903736aa325321b8bdf | 780b01976dad99c7c2ed948b8473aa4e2d0404ba | /scripts/alphas_archive/zw_contfut/alpha_ichimokucloud_long_bullish_feb02_.py | ac7c5e278698c535bbbd3bc0ba40df1577ab83ca | []
| no_license | trendmanagement/tmqrexo_alexveden | a8ad699c2c3df4ce283346d287aff4364059a351 | 4d92e2ee2bc97ea2fcf075382d4a5f80ce3d72e4 | refs/heads/master | 2021-03-16T08:38:00.518593 | 2019-01-23T08:30:18 | 2019-01-23T08:30:18 | 56,336,692 | 1 | 1 | null | 2019-01-22T14:21:03 | 2016-04-15T17:05:53 | Python | UTF-8 | Python | false | false | 1,430 | py | #
#
# Automatically generated file
# Created at: 2017-02-09 10:10:05.686710
#
from strategies.strategy_ichimokucloud import StrategyIchimokuCloud
from backtester.strategy import OptParam
from backtester.swarms.rebalancing import SwarmRebalance
from backtester.costs import CostsManagerEXOFixed
from backtester.swarms.rankingclasses import RankerBestWithCorrel
from backtester.strategy import OptParamArray
STRATEGY_NAME = StrategyIchimokuCloud.name
STRATEGY_SUFFIX = "_Bullish_Feb02_"
STRATEGY_CONTEXT = {
'swarm': {
'members_count': 1,
'ranking_class': RankerBestWithCorrel(window_size=-1, correl_threshold=-0.5),
'rebalance_time_function': SwarmRebalance.every_friday,
},
'strategy': {
'exo_name': 'ZW_ContFut',
'class': StrategyIchimokuCloud,
'opt_params': [
OptParamArray('Direction', [1]),
OptParam('conversion_line_period', 9, 2, 22, 5),
OptParam('base_line_period', 26, 13, 13, 2),
OptParam('leading_spans_lookahead_period', 26, 26, 26, 13),
OptParam('leading_span_b_period', 52, 2, 106, 30),
OptParamArray('RulesIndex', [14, 6, 13]),
OptParam('MedianPeriod', 5, 14, 26, 12),
],
},
'costs': {
'context': {
'costs_options': 3.0,
'costs_futures': 3.0,
},
'manager': CostsManagerEXOFixed,
},
}
| [
"[email protected]"
]
| |
d2f0d4f6c0fbf446e71d0d8932ea484f5254a496 | e10a6d844a286db26ef56469e31dc8488a8c6f0e | /protein_lm/evaluation.py | 65526ef1936021cd9841bd15b70a9acb1961835f | [
"Apache-2.0",
"CC-BY-4.0"
]
| permissive | Jimmy-INL/google-research | 54ad5551f97977f01297abddbfc8a99a7900b791 | 5573d9c5822f4e866b6692769963ae819cb3f10d | refs/heads/master | 2023-04-07T19:43:54.483068 | 2023-03-24T16:27:28 | 2023-03-24T16:32:17 | 282,682,170 | 1 | 0 | Apache-2.0 | 2020-07-26T15:50:32 | 2020-07-26T15:50:31 | null | UTF-8 | Python | false | false | 4,563 | py | # coding=utf-8
# Copyright 2022 The Google Research Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Baselines and evaluation metrics for Jax language models."""
import itertools
from flax.training import common_utils
import jax
import jax.numpy as jnp
import numpy as np
from protein_lm import utils
class EmpiricalBaseline():
"""Empirical baseline as described in the ProGen paper.
References:
[ProGen](https://www.biorxiv.org/content/10.1101/2020.03.07.982272v1)
"""
def __init__(self, domain, train_ds, alpha=1.):
"""Creates an instance of this class.
# TODO(gandreea): It's unclear how to handle the length (EOS token). The
# fact that the uniform baseline is reported as (perplexity=25,
# accuracy=0.04) suggests that the EOS prediction step is not included.
Args:
domain: An instance of domains.Domain.
train_ds: A tf.data.Dataset containing the data to be used for computing
the empirical distribution.
alpha: A float indicating the Laplace smoothing constant.
"""
self._vocab_size = domain.vocab_size
self._token_indices = [
idx for idx in range(len(domain.vocab.tokens))
if idx != domain.vocab.bos and idx != domain.vocab.eos]
self._mask_token = domain.vocab.bos
self._empirical_dist = np.zeros((len(self._token_indices),))
for batch in train_ds:
batch = np.atleast_2d(batch)
batch_one_hot = np.eye(self._vocab_size)[batch]
batch_one_hot = np.take(batch_one_hot, self._token_indices, axis=-1)
self._empirical_dist += np.sum(np.sum(batch_one_hot, axis=0), axis=0)
self._empirical_dist += alpha # Laplace smoothing.
self._empirical_dist /= np.sum(self._empirical_dist)
def evaluate_batch(self, batch):
"""Computes all metrics on the given batch."""
labels = np.atleast_2d(batch)
logits = np.log(self._empirical_dist)
logits = np.tile(logits, list(labels.shape) + [1])
weights = np.where(labels != self._mask_token, 1, 0)
metrics = utils.compute_metrics(logits, labels, weights)
for key, value in metrics.items():
metrics[key] = jnp.atleast_1d(value)
return metrics
def combine_metrics(step_metrics):
"""Given a list of metric dicts, combine to a single summary metrics dict.
Args:
step_metrics: A dict with (metric name, metric value) items. Contains summed
metrics and the corresponding denominator (the number of next-token
prediction instances). Each metric value have at least one dimension.
Returns:
A dict with (metric name, metric value) items containing combined metrics.
"""
metrics_all = common_utils.get_metrics(step_metrics)
lr = None
if 'learning_rate' in metrics_all:
lr = metrics_all.pop('learning_rate').mean()
metrics_sums = jax.tree_map(jnp.sum, metrics_all)
denominator = metrics_sums.pop('denominator')
summary = jax.tree_map(lambda x: x / denominator, metrics_sums) # pylint: disable=cell-var-from-loop
if lr is not None:
summary['learning_rate'] = lr
# Calculate (clipped) perplexity after averaging log-perplexities:
if 'loss' in summary:
summary['perplexity'] = jnp.clip(jnp.exp(summary['loss']), a_max=1.0e4)
return summary
def evaluate(model, eval_ds, num_eval_steps=None):
"""Evaluates model on eval_ds for num_eval_steps.
Args:
model: A model to use for evaluation. Must have an evaluate_batch() method.
eval_ds: A tensorflow dataset containing the data to be used for evaluation.
num_eval_steps: If given, evaluate for this many steps, otherwise use the
entire dataset.
Returns:
A dictionary with (metric name, metric value) items.
"""
eval_metrics = []
eval_iter = iter(eval_ds)
if num_eval_steps is None:
num_iter = itertools.repeat(1)
else:
num_iter = range(num_eval_steps)
for _, eval_batch in zip(num_iter, eval_iter):
eval_batch = np.asarray(eval_batch)
metrics = model.evaluate_batch(eval_batch)
eval_metrics.append(metrics)
eval_summary = combine_metrics(eval_metrics)
return eval_summary
| [
"[email protected]"
]
| |
fe3182900da8d8bb4dbc2094bba70c61c293ed2a | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /fyyJRDHcTe9REs4Ni_2.py | fa8d5a9f92273ccb98b5f5ce47ca0d2a51943ab1 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,033 | py | """
Create a function that takes three arguments (first dictionary, second
dictionary, key) in order to:
1. Return the boolean `True` if both dictionaries have the same values for the same keys.
2. If the dictionaries don't match, return the string `"Not the same"`, or the string `"One's empty"` if only one of the dictionaries contains the given key.
### Examples
dict_first = { "sky": "temple", "horde": "orcs", "people": 12, "story": "fine", "sun": "bright" }
dict_second = { "people": 12, "sun": "star", "book": "bad" }
check(dict_first, dict_second, "horde") ➞ "One's empty"
check(dict_first, dict_second, "people") ➞ True
check(dict_first, dict_second, "sun") ➞ "Not the same"
### Notes
* Dictionaries are an unordered data type.
* Double quotes may be helpful.
* `KeyError` can occur when trying to access a dictionary key that doesn't exist.
"""
def check(d1, d2, k):
try: return ["Not the same", True][d1[k] == d2[k]]
except KeyError: return "One's empty"
| [
"[email protected]"
]
| |
74b45ad1bf32de78395b3bc6704098a560d4dad1 | ce083128fa87ca86c65059893aa8882d088461f5 | /python/pytest-labs/mock_lab_1.py | 205cbdd5b5cb3ab89040e1c723b30e8800b68fc2 | []
| no_license | marcosptf/fedora | 581a446e7f81d8ae9a260eafb92814bc486ee077 | 359db63ff1fa79696b7bc803bcfa0042bff8ab44 | refs/heads/master | 2023-04-06T14:53:40.378260 | 2023-03-26T00:47:52 | 2023-03-26T00:47:52 | 26,059,824 | 6 | 5 | null | 2022-12-08T00:43:21 | 2014-11-01T18:48:56 | null | UTF-8 | Python | false | false | 413 | py | #!/usr/bin/env python2
#encoding: UTF-8
import mock
class Target(object):
def apply(value, are_you_sure):
if are_you_sure:
return value
else:
return None
def method(target, value):
return target.apply(value)
#pytest using mock.Mock() instance
def test_method():
target = mock.Mock()
method(target, "value")
target.apply.assert_called_with("value")
| [
"[email protected]"
]
| |
bfe5cbc0d0982f816c0f439ccfe312343bd3a6b6 | 5178f5aa20a857f8744fb959e8b246079c800c65 | /02_oop/tr/src/21_str/str_tr1.py | 28f1d95b840592e11cb603addacf54d232aacff2 | []
| no_license | murayama333/python2020 | 4c3f35a0d78426c96f0fbaed335f9a63227205da | 8afe367b8b42fcf9489fff1da1866e88f3af3b33 | refs/heads/master | 2021-05-19T04:03:46.295906 | 2021-03-09T22:23:58 | 2021-03-09T22:23:58 | 251,520,131 | 0 | 3 | null | 2020-10-26T01:20:09 | 2020-03-31T06:35:18 | Python | UTF-8 | Python | false | false | 113 | py | language = "python"
print(language.upper())
print(language.capitalize())
print(language.capitalize().swapcase())
| [
"[email protected]"
]
| |
be7503afcbfae63f32619f134faf846ec33a132d | 781e2692049e87a4256320c76e82a19be257a05d | /all_data/exercism_data/python/meetup/17857a6adde04647acc54269ab6ef4be.py | 84c22eae0a99b1a54a20f6aa508a6a593c16744c | []
| no_license | itsolutionscorp/AutoStyle-Clustering | 54bde86fe6dbad35b568b38cfcb14c5ffaab51b0 | be0e2f635a7558f56c61bc0b36c6146b01d1e6e6 | refs/heads/master | 2020-12-11T07:27:19.291038 | 2016-03-16T03:18:00 | 2016-03-16T03:18:42 | 59,454,921 | 4 | 0 | null | 2016-05-23T05:40:56 | 2016-05-23T05:40:56 | null | UTF-8 | Python | false | false | 750 | py | from collections import defaultdict
from datetime import date, timedelta
def meetup_day(year, month, day_name, date_type):
weekdays = ['Monday', 'Tuesday', 'Wednesday', 'Thursday',
'Friday', 'Saturday', 'Sunday']
operators = {
'1st': lambda x: x[0],
'2nd': lambda x: x[1],
'3rd': lambda x: x[2],
'4th': lambda x: x[3],
'last': lambda x: x[-1],
'teenth': lambda x: [d for d in x if 13 <= d <= 19][0],
}
data = defaultdict(list)
day = date(year=year, month=month, day=1)
while day.month == month:
data[weekdays[day.weekday()]].append(day.day)
day += timedelta(1)
return date(year=year, month=month, day=operators[date_type](data[day_name]))
| [
"[email protected]"
]
| |
47d8e02e074f1b33228838b15e10ea23d3c6ee86 | 4fca17a3dbc3e74ba7e46bd7869eb6d138e4c422 | /_0681_Next_Closest_Time.py | 6c11bbad0971087cef6f5ea28aee8b0e18f175eb | []
| no_license | mingweihe/leetcode | a2cfee0e004627b817a3c0321bb9c74128f8c1a7 | edff905f63ab95cdd40447b27a9c449c9cefec37 | refs/heads/master | 2021-06-19T07:46:46.897952 | 2021-05-02T05:13:17 | 2021-05-02T05:13:17 | 205,740,338 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 938 | py | class Solution(object):
def nextClosestTime(self, time):
"""
:type time: str
:rtype: str
"""
def helper(start, cur, pool):
if start == 4:
left, right = cur[:2], cur[2:]
hour, minute = int(left), int(right)
if hour > 23 or minute > 59: return
cur_digit = int(left + right)
if cur_digit <= self.original_digit: return
cur_diff = cur_digit - self.original_digit
if cur_diff < self.diff:
self.diff = cur_diff
self.res = left + ':' + right
return
for c in pool: helper(start + 1, cur + c, pool)
self.res = min(time) * 2 + ':' + min(time) * 2
self.original_digit = int(time.replace(':', ''))
self.diff = float('inf')
helper(0, '', set(time) - {':'})
return self.res
| [
"[email protected]"
]
| |
c5b50c84a27561cd42e497c41900c80a6f77b56c | 30de452d89eacf48f61ceddfaff86aa62d505507 | /traits/observation/_i_notifier.py | 2b28ed8a676667350285c54a1f6916280271f97f | [
"BSD-3-Clause",
"CC-BY-3.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | odidev/traits | 92224376b5444a7a5f805b474f0aa53ac1ca7cd2 | 52f4d00248cec5dbf0826de4e846b4ad83cf072e | refs/heads/master | 2023-06-18T16:53:43.850534 | 2021-07-14T05:48:46 | 2021-07-14T05:48:46 | 388,075,888 | 0 | 0 | NOASSERTION | 2021-07-21T10:31:06 | 2021-07-21T10:11:23 | null | UTF-8 | Python | false | false | 1,620 | py | # (C) Copyright 2005-2021 Enthought, Inc., Austin, TX
# All rights reserved.
#
# This software is provided without warranty under the terms of the BSD
# license included in LICENSE.txt and may be redistributed only under
# the conditions described in the aforementioned license. The license
# is also available online at http://www.enthought.com/licenses/BSD.txt
#
# Thanks for using Enthought open source!
import abc
class INotifier(abc.ABC):
""" Interface for all notifiers.
An instance of notifier must be a callable, i.e. ``__call__`` must be
implemented and cannot be None. The signature of that callable should be
compatible with the observables the notifier will be given to. This
interface does not define what that signature should be.
"""
def __call__(self, *args, **kwargs):
""" Called by an observable.
The signature is not restricted by the interface.
"""
raise NotImplementedError("__call__ must be implemented.")
def add_to(self, observable):
""" Add this notifier to the observable.
Parameters
----------
observable : IObservable
"""
raise NotImplementedError("add_to must be implemented.")
def remove_from(self, observable):
""" Remove this notifier or a notifier equivalent to this one
from the observable.
Parameters
----------
observable : IObservable
Raises
------
NotifierNotFound
If the notifier cannot be found.
"""
raise NotImplementedError("remove_from must be implemented.")
| [
"[email protected]"
]
| |
5984781a7bf3e925d29c995794955234adfb0a95 | 1ee3dc4fa096d12e409af3a298ba01f5558c62b5 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/topology/simrouter.py | 02b3d8c591855440623aaa468cf1e02a2f4d36be | [
"MIT"
]
| permissive | parthpower/ixnetwork_restpy | 321e64a87be0a4d990276d26f43aca9cf4d43cc9 | 73fa29796a5178c707ee4e21d90ff4dad31cc1ed | refs/heads/master | 2020-07-04T13:34:42.162458 | 2019-08-13T20:33:17 | 2019-08-13T20:33:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,972 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class SimRouter(Base):
"""The SimRouter class encapsulates a system managed simRouter node in the ixnetwork hierarchy.
An instance of the class can be obtained by accessing the SimRouter property from a parent instance.
The internal properties list will be empty when the property is accessed and is populated from the server by using the find method.
"""
_SDM_NAME = 'simRouter'
def __init__(self, parent):
super(SimRouter, self).__init__(parent)
@property
def Connector(self):
"""An instance of the Connector class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.connector.Connector)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.connector import Connector
return Connector(self)
@property
def IsisL3PseudoRouter(self):
"""An instance of the IsisL3PseudoRouter class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.isisl3pseudorouter.IsisL3PseudoRouter)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.isisl3pseudorouter import IsisL3PseudoRouter
return IsisL3PseudoRouter(self)
@property
def LdpPseudoRouter(self):
"""An instance of the LdpPseudoRouter class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.ldppseudorouter.LdpPseudoRouter)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.ldppseudorouter import LdpPseudoRouter
return LdpPseudoRouter(self)
@property
def OspfPseudoRouter(self):
"""An instance of the OspfPseudoRouter class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.ospfpseudorouter.OspfPseudoRouter)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.ospfpseudorouter import OspfPseudoRouter
return OspfPseudoRouter(self)
@property
def Ospfv3PseudoRouter(self):
"""An instance of the Ospfv3PseudoRouter class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.ospfv3pseudorouter.Ospfv3PseudoRouter)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.ospfv3pseudorouter import Ospfv3PseudoRouter
return Ospfv3PseudoRouter(self)
@property
def Tag(self):
"""An instance of the Tag class.
Returns:
obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tag.Tag)
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.topology.tag import Tag
return Tag(self)
@property
def Count(self):
"""Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
Returns:
number
"""
return self._get_attribute('count')
@property
def DescriptiveName(self):
"""Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offers more context
Returns:
str
"""
return self._get_attribute('descriptiveName')
@property
def Name(self):
"""Name of NGPF element, guaranteed to be unique in Scenario
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def RouterId(self):
"""4 Byte Router Id in dotted decimal format.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('routerId')
@property
def SystemId(self):
"""6 Byte System Id in hex format.
Returns:
obj(ixnetwork_restpy.multivalue.Multivalue)
"""
return self._get_attribute('systemId')
def update(self, Name=None):
"""Updates a child instance of simRouter on the server.
This method has some named parameters with a type: obj (Multivalue).
The Multivalue class has the associated documentation that details the possible values for those named parameters.
Args:
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
def find(self, Count=None, DescriptiveName=None, Name=None):
"""Finds and retrieves simRouter data from the server.
All named parameters support regex and can be used to selectively retrieve simRouter data from the server.
By default the find method takes no parameters and will retrieve all simRouter data from the server.
Args:
Count (number): Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
DescriptiveName (str): Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offers more context
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Returns:
self: This instance with matching simRouter data retrieved from the server available through an iterator or index
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._select(locals())
def read(self, href):
"""Retrieves a single instance of simRouter data from the server.
Args:
href (str): An href to the instance to be retrieved
Returns:
self: This instance with the simRouter data from the server available through an iterator or index
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def get_device_ids(self, PortNames=None, RouterId=None, SystemId=None):
"""Base class infrastructure that gets a list of simRouter device ids encapsulated by this object.
Use the optional regex parameters in the method to refine the list of device ids encapsulated by this object.
Args:
PortNames (str): optional regex of port names
RouterId (str): optional regex of routerId
SystemId (str): optional regex of systemId
Returns:
list(int): A list of device ids that meets the regex criteria provided in the method parameters
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
return self._get_ngpf_device_ids(locals())
def Start(self):
"""Executes the start operation on the server.
Start CPF control plane (equals to promote to negotiated state).
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('start', payload=payload, response_object=None)
def Stop(self):
"""Executes the stop operation on the server.
Stop CPF control plane (equals to demote to PreValidated-DoDDone state).
Raises:
NotFoundError: The requested resource does not exist on the server
ServerError: The server has encountered an uncategorized error condition
"""
payload = { "Arg1": self }
return self._execute('stop', payload=payload, response_object=None)
| [
"[email protected]"
]
| |
10e9fdf6a6b34922bef66358b5ff457a52a28977 | 1d0e36f710ed936d9bec3d88b69edd8a26b62823 | /examples/ble_uart_echo_client.py | ce36ac80691af1a7338da4730e0fbc2c9a45008c | [
"MIT"
]
| permissive | dglaude/Adafruit_CircuitPython_BLE | c0336787bbf739ddad9d078eab4edc2a80530bd4 | e8f72b053af8cfcde0c07040a5f2feecd4ca585b | refs/heads/master | 2020-12-12T17:54:52.632742 | 2020-01-09T03:09:22 | 2020-01-14T23:18:46 | 234,190,805 | 0 | 0 | MIT | 2020-01-15T23:05:42 | 2020-01-15T23:05:41 | null | UTF-8 | Python | false | false | 1,086 | py | """
Used with ble_uart_echo_test.py. Transmits "echo" to the UARTService and receives it back.
"""
import time
from adafruit_ble import BLERadio
from adafruit_ble.advertising.standard import ProvideServicesAdvertisement
from adafruit_ble.services.nordic import UARTService
ble = BLERadio()
while True:
while ble.connected and any(UARTService in connection for connection in ble.connections):
for connection in ble.connections:
if UARTService not in connection:
continue
print("echo")
uart = connection[UARTService]
uart.write(b"echo")
# Returns b'' if nothing was read.
one_byte = uart.read(4)
if one_byte:
print(one_byte)
print()
time.sleep(1)
print("disconnected, scanning")
for advertisement in ble.start_scan(ProvideServicesAdvertisement, timeout=1):
if UARTService not in advertisement.services:
continue
ble.connect(advertisement)
print("connected")
break
ble.stop_scan()
| [
"[email protected]"
]
| |
5759a3994045c73ec308fd6d0a0209db7f485d10 | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/380/usersdata/328/74024/submittedfiles/testes.py | d9f0fad052a8e893b10ab6e61accf13576dfb507 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 183 | py | # -*- coding: utf-8 -*-
#COMECE AQUI ABAIXO
a=5
print('%.2f'%a)
a=30
b=5
c=10
if a<b<c:
print('comando 1')
else:
if a<c<b:
print('comando 2')
else:
| [
"[email protected]"
]
| |
9bf5f51bb15906ebe54e8158ffa8d0e1abcdfd05 | 2f6817fc8f6ddb48f5f88c913d8e40b672fc3dbf | /MLP/quiz11-3.py | 331fe654ca3d83ef9ffd1dcf2ebdbcc9879d2b59 | []
| no_license | cutz-j/TodayILearned | 320b5774de68a0f4f68fda28a6a8b980097d6ada | 429b24e063283a0d752ccdfbff455abd30ba3859 | refs/heads/master | 2020-03-23T17:34:51.389065 | 2018-11-24T08:49:41 | 2018-11-24T08:49:41 | 141,865,899 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,157 | py | ## Q13: K-means ##
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from sklearn.cluster import KMeans
from sklearn.metrics import silhouette_score
from scipy.spatial.distance import cdist, pdist
## 파일 전처리 ##
def file_open(file_name):
## file_open --> np.array ##
file_open = open(file_name, 'r')
all_data = []
for i in file_open.readlines():
all_data.append(i.strip('\n').split(','))
all_data = np.array(all_data) # shape(9835, None)
return all_data
all_data = file_open("d:/data/prac/groceries.csv")
def numbering(all_data):
## product를 dict에 넣으면서 numbering ##
global all_item_num
k = 0
all_dict = {}
for buy in all_data:
for product in buy:
if product in all_dict:
continue
else:
all_dict[product] = k
k += 1
all_item_num = k
for i in all_data:
for k in range(len(i)):
i[k] = all_dict[i[k]]
return all_data, all_dict
all_transaction = len(all_data) # 전체 거래수 9835건
all_item_num = 0 # 169개
all_data, all_dict = numbering(all_data) # 전체 아이템 개수 169개
## one-hot ##
def one_hot(data):
## 구매자마다 벡터화 시키기 위해 one-hot-encoding ## --> X: shape(9835, 169)
one_hot = np.zeros([all_transaction, all_item_num], dtype=np.int32)
for i in range(len(all_data)):
for j in all_data[i]:
one_hot[i,j] = 1
return one_hot
x_one_hot = one_hot(all_data) # one-hot
## split ##
x_train, x_test = x_one_hot[:9800, :], x_one_hot[9800:, :]
## Kmeans ##
# n_cluster = 10, max_iter=3000 #
k_means = KMeans(n_clusters=10, max_iter=3000, random_state=77)
k_means.fit(x_train)
k_cluster = k_means.predict(x_test)
ss = silhouette_score(x_train, k_means.labels_, metric='euclidean')
print("테스트 데이터 35명의 클러스터: \n", k_cluster)
print("\nsilhouette_score: ", ss)
| [
"[email protected]"
]
| |
509baa5595f18af9b6609d75c3eb70cd7f7b8834 | 1593d6393efb987b24e9e008c275725ff489bc95 | /dxm/lib/DxRuleset/DxRulesetList.py | 7281428ff08bc501d5ed1a98cab152eccb258957 | [
"Apache-2.0"
]
| permissive | SLEEP1NG/dxm-toolkit | fedab63ac21305652f047ab51af7de090fdd8035 | 1c2aae49701da47c5932e3b7e67844bf8c3d0ccf | refs/heads/master | 2020-12-07T14:41:59.080025 | 2019-11-04T12:10:36 | 2019-11-04T12:10:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,579 | py | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Copyright (c) 2018 by Delphix. All rights reserved.
#
# Author : Marcin Przepiorowski
# Date : April 2018
import logging
from dxm.lib.DxRuleset.DxDatabaseRuleset import DxDatabaseRuleset
from dxm.lib.DxRuleset.DxFileRuleset import DxFileRuleset
from dxm.lib.DxTools.DxTools import get_objref_by_val_and_attribute
from dxm.lib.DxTools.DxTools import paginator
from dxm.lib.DxEngine.DxMaskingEngine import DxMaskingEngine
from dxm.lib.DxEnvironment.DxEnvironmentList import DxEnvironmentList
from masking_apis.apis.database_ruleset_api import DatabaseRulesetApi
from masking_apis.apis.file_ruleset_api import FileRulesetApi
from masking_apis.rest import ApiException
from dxm.lib.DxLogging import print_error
from dxm.lib.DxConnector.DxConnectorsList import DxConnectorsList
class DxRulesetList(object):
__rulesetList = {}
__engine = None
__logger = None
@classmethod
def __init__(self, environment_name=None):
"""
Constructor
:param engine: DxMaskingEngine object
"""
self.__engine = DxMaskingEngine
self.__logger = logging.getLogger()
self.__logger.debug("creating DxRulesetList object")
self.LoadRulesets(environment_name)
@classmethod
def LoadRulesets(self, environment_name):
"""
Load list of rule sets
Return None if OK
"""
return self.LoadRulesets_worker(environment_name, None)
@classmethod
def LoadRulesetsbyId(self, env_id):
"""
Load list of rule sets for env_id
Return None if OK
"""
return self.LoadRulesets_worker(None, env_id)
@classmethod
def LoadRulesets_worker(self, environment_name, env_id):
"""
Load list of rule sets
Return None if OK
"""
DxConnectorsList(environment_name)
self.__rulesetList = {}
try:
api_instance = DatabaseRulesetApi(self.__engine.api_client)
if environment_name:
environment_id = DxEnvironmentList.get_environmentId_by_name(
environment_name)
if environment_id:
database_rulesets = paginator(
api_instance,
"get_all_database_rulesets",
environment_id=environment_id,
_request_timeout=self.__engine.get_timeout())
else:
return 1
else:
if env_id:
environment_id = env_id
database_rulesets = paginator(
api_instance,
"get_all_database_rulesets",
environment_id=environment_id,
_request_timeout=self.__engine.get_timeout())
else:
environment_id = None
database_rulesets = paginator(
api_instance,
"get_all_database_rulesets")
if database_rulesets.response_list:
for c in database_rulesets.response_list:
ruleset = DxDatabaseRuleset(self.__engine)
ruleset.from_ruleset(c)
self.__rulesetList[c.database_ruleset_id] = ruleset
else:
if environment_id:
self.__logger.error("No database ruleset found for "
"environment name %s"
% environment_name)
else:
self.__logger.error("No database ruleset found")
api_instance = FileRulesetApi(self.__engine.api_client)
if environment_id:
file_rulesets = paginator(
api_instance,
"get_all_file_rulesets",
environment_id=environment_id)
else:
file_rulesets = paginator(
api_instance,
"get_all_file_rulesets")
if file_rulesets.response_list:
for c in file_rulesets.response_list:
ruleset = DxFileRuleset(self.__engine)
ruleset.from_ruleset(c)
self.__rulesetList[c.file_ruleset_id] = ruleset
else:
if environment_id:
self.__logger.error("No file ruleset found for "
"environment name %s"
% environment_name)
else:
self.__logger.error("No file ruleset found")
except ApiException as e:
print_error("Can't load ruleset %s" % e.body)
return 1
@classmethod
def get_by_ref(self, reference):
"""
return a Ruleset object by refrerence
return None if not found
"""
try:
self.__logger.debug("reference %s" % reference)
return self.__rulesetList[reference]
except KeyError as e:
self.__logger.debug("can't find Ruleset object"
" for reference %s" % reference)
self.__logger.debug(e)
return None
@classmethod
def get_allref(self):
"""
return a list of all references
"""
return self.__rulesetList.keys()
@classmethod
def get_rulesetId_by_name(self, name):
"""
Return ruleset id by name.
:param1 name: name of ruleset
return ref if OK
return None if ruleset not found or not unique
"""
reflist = self.get_rulesetId_by_name_worker(name)
# convert list to single value
# as there will be only one element in list
if reflist:
return reflist[0]
else:
return None
@classmethod
def get_all_rulesetId_by_name(self, name):
"""
Return ruleset id by name.
:param1 name: name of ruleset
return list of references if OK
return None if ruleset not found
"""
return self.get_rulesetId_by_name_worker(name, None)
@classmethod
def get_rulesetId_by_name_worker(self, name, check_uniqueness=1):
"""
:param1 name: name of ruleset
:param2 check_uniqueness: check uniqueness put None if skip this check
return list of rulesets
"""
reflist = get_objref_by_val_and_attribute(name, self, 'ruleset_name')
if len(reflist) == 0:
self.__logger.error('Ruleset %s not found' % name)
print_error('Ruleset %s not found' % name)
return None
if check_uniqueness:
if len(reflist) > 1:
self.__logger.error('Ruleset name %s is not unique' % name)
print_error('Ruleset name %s is not unique' % name)
return None
return reflist
@classmethod
def get_all_database_rulesetIds(self):
"""
Return list of database ruleset ids.
return list of references if OK
return None if ruleset not found
"""
return get_objref_by_val_and_attribute('Database', self, 'type')
@classmethod
def get_all_file_rulesetIds(self):
"""
Return list of database ruleset ids.
return list of references if OK
return None if ruleset not found
"""
return get_objref_by_val_and_attribute('File', self, 'type')
@classmethod
def add(self, ruleset):
"""
Add an Ruleset to a list and Engine
:param ruleset: Ruleset object to add to Engine and list
return None if OK
"""
if (ruleset.add() is None):
self.__logger.debug("Adding ruleset %s to list" % ruleset)
self.__rulesetList[ruleset.ruleset_id] = ruleset
return None
else:
return 1
@classmethod
def delete(self, RulesetId):
"""
Delete a ruleset from a list and Engine
:param RulesetId: Ruleset id to delete from Engine and list
return None if OK
"""
ruleset = self.get_by_ref(RulesetId)
if ruleset is not None:
if ruleset.delete() is None:
return None
else:
return 1
else:
print "Ruleset with id %s not found" % RulesetId
return 1
@classmethod
def copy(self, ruleset_id, newname):
"""
Add an Ruleset to a list and Engine
:param ruleset: Ruleset id of the existing ruleset
:param newname: Name of the new ruleset
return new ruleset_id if OK, None if failure
"""
ruleset = self.get_by_ref(ruleset_id)
if ruleset.type == 'Database':
newruleset = DxDatabaseRuleset(self.__engine)
newruleset.from_ruleset(ruleset)
newruleset.ruleset_name = newname
elif ruleset.type == 'File':
newruleset = DxFileRuleset(self.__engine)
newruleset.from_ruleset(ruleset)
newruleset.ruleset_name = newname
if (newruleset.add() is None):
self.__logger.debug("Adding ruleset %s to list" % newruleset)
self.__rulesetList[newruleset.ruleset_id] = newruleset
return newruleset.ruleset_id
else:
return None
@classmethod
def refresh(self, RulesetId):
"""
Refresh a ruleset on the Engine
:param RulesetId: Ruleset id to delete from Engine and list
return None if OK
"""
ruleset = self.get_by_ref(RulesetId)
if ruleset is not None:
if ruleset.refresh() is None:
return None
else:
return 1
else:
print "Ruleset with id %s not found" % RulesetId
return 1 | [
"[email protected]"
]
| |
90e081344e37878f7f20b3dfb85f48791ce8604c | 1fe4f9eb9b1d756ad17e1ff6585e8ee7af23903c | /saleor/store/migrations/0003_specialpage.py | 5f0354beda8b80442f5c4eb27b7a679dbb897729 | [
"BSD-3-Clause"
]
| permissive | Chaoslecion123/Diver | ab762e7e6c8d235fdb89f6c958488cd9b7667fdf | 8c5c493701422eada49cbf95b0b0add08f1ea561 | refs/heads/master | 2022-02-23T10:43:03.946299 | 2019-10-19T23:39:47 | 2019-10-19T23:39:47 | 216,283,489 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,245 | py | # Generated by Django 2.2 on 2019-04-17 02:38
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('site', '0022_auto_20190413_2016'),
('page', '0007_auto_20190225_0252'),
('store', '0002_socialnetwork'),
]
operations = [
migrations.CreateModel(
name='SpecialPage',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(choices=[('about', 'About'), ('faq', 'FAQ'), ('legal', 'Terms and Conditions'), ('privacy', 'Privacy and Cookies'), ('accessibility', 'Accessibility')], max_length=32)),
('page', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='site_settings', related_query_name='site_setting', to='page.Page')),
('site_settings', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='special_pages', related_query_name='special_page', to='site.SiteSettings')),
],
options={
'unique_together': {('site_settings', 'type')},
},
),
]
| [
"[email protected]"
]
| |
1e1eaa7a694586422bdc9da3b230971d98ace025 | 8da91c26d423bacbeee1163ac7e969904c7e4338 | /pyvisdk/do/host_wwn_conflict_event.py | faa99ccf0a169abcf46f9c22e5db93ed38d7722e | []
| no_license | pexip/os-python-infi-pyvisdk | 5d8f3a3858cdd61fb76485574e74ae525cdc7e25 | 1aadea0afbc306d09f6ecb9af0e683dbbf961d20 | refs/heads/master | 2023-08-28T02:40:28.789786 | 2020-07-16T04:00:53 | 2020-07-16T04:00:53 | 10,032,240 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,209 | py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def HostWwnConflictEvent(vim, *args, **kwargs):
'''This event records a conflict of host WWNs (World Wide Name).'''
obj = vim.client.factory.create('{urn:vim25}HostWwnConflictEvent')
# do some validation checking...
if (len(args) + len(kwargs)) < 5:
raise IndexError('Expected at least 6 arguments got: %d' % len(args))
required = [ 'wwn', 'chainId', 'createdTime', 'key', 'userName' ]
optional = [ 'conflictedHosts', 'conflictedVms', 'changeTag', 'computeResource',
'datacenter', 'ds', 'dvs', 'fullFormattedMessage', 'host', 'net', 'vm',
'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| [
"[email protected]"
]
| |
d1d3fffdb132de5d0a3663618f087eeb3caf28f7 | 6437a3a4a31ab9ad233d6b2d985beb50ed50de23 | /PythonistaAppTemplate/PythonistaKit.framework/pylib/site-packages/simpy/test/test_timeout.py | 91e83c4c66e014e0b5c6321a306dd2e9a7bc7ae8 | []
| no_license | sreyemnayr/jss-lost-mode-app | 03ddc472decde3c17a11294d8ee48b02f83b71e7 | 3ff4ba6fb13f4f3a4a98bfc824eace137f6aabaa | refs/heads/master | 2021-05-02T08:50:10.580091 | 2018-02-08T20:32:29 | 2018-02-08T20:32:29 | 120,813,623 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,821 | py | #\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
"""
Tests for ``simpy.events.Timeout``.
"""
# Pytest gets the parameters "env" and "log" from the *conftest.py* file
import pytest
def test_discrete_time_steps(env, log):
"""envple envulation with discrete time steps."""
def pem(env, log):
while True:
log.append(env.now)
yield env.timeout(delay=1)
env.process(pem(env, log))
env.run(until=3)
assert log == [0, 1, 2]
def test_negative_timeout(env):
"""Don't allow negative timeout times."""
def pem(env):
yield env.timeout(-1)
env.process(pem(env))
pytest.raises(ValueError, env.run)
def test_timeout_value(env):
"""You can pass an additional *value* to *timeout* which will be
directly yielded back into the PEM. This is useful to implement some
kinds of resources or other additions.
See :class:`envpy.resources.Store` for an example.
"""
def pem(env):
val = yield env.timeout(1, 'ohai')
assert val == 'ohai'
env.process(pem(env))
env.run()
def test_shared_timeout(env, log):
def child(env, timeout, id, log):
yield timeout
log.append((id, env.now))
timeout = env.timeout(1)
for i in range(3):
env.process(child(env, timeout, i, log))
env.run()
assert log == [(0, 1), (1, 1), (2, 1)]
def test_triggered_timeout(env):
def process(env):
def child(env, event):
value = yield event
env.exit(value)
event = env.timeout(1, 'i was already done')
# Start the child after the timeout has already happened.
yield env.timeout(2)
value = yield env.process(child(env, event))
assert value == 'i was already done'
env.run(env.process(process(env)))
| [
"[email protected]"
]
| |
d4a34ee20df7bcfc81d4f08d997084a701fe6793 | 6ea84a1ee3f08cc0e2c50b452ccda0469dda0b6c | /projectLimat/manage.py | 5ce2506a4a3d51b3e6927ce569073b28003bf100 | []
| no_license | frestea09/django_note | b818d9d95f2f1e43ba47f8f2168bc5980d5da1f7 | b8d1e41a450f5c452afd36319779740bed874caa | refs/heads/master | 2020-11-24T03:54:00.000949 | 2020-01-01T06:50:12 | 2020-01-01T06:50:12 | 227,950,347 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 632 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'projectLimat.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
4a47fffa44259b959487191994bc1233b3491c11 | 4f75cc33b4d65d5e4b054fc35b831a388a46c896 | /.history/app_20210903124401.py | 2579fc4075f93959be51f7bd4b7d23610331e820 | []
| no_license | Lr-2002/newpage | c3fe2acc451e24f6408996ea1271c61c321de702 | c589ad974e7100aa9b1c2ccc095a959ff68069b6 | refs/heads/main | 2023-09-03T06:13:53.428236 | 2021-11-23T10:41:21 | 2021-11-23T10:41:21 | 402,606,000 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 869 | py | from flask import Flask ,render_template,url_for
from flask_sqlalchemy import
app = Flask(__name__)
name = 'Grey Li'
movies = [
{'title': 'My Neighbor Totoro', 'year': '1988'},
{'title': 'Dead Poets Society', 'year': '1989'},
{'title': 'A Perfect World', 'year': '1993'},
{'title': 'Leon', 'year': '1994'},
{'title': 'Mahjong', 'year': '1996'},
{'title': 'Swallowtail Butterfly', 'year': '1996'},
{'title': 'King of Comedy', 'year': '1999'},
{'title': 'Devils on the Doorstep', 'year': '1999'},
{'title': 'WALL-E', 'year': '2008'},
{'title': 'The Pork of Music', 'year': '2012'},
]
# @app.route('/static/<name>')
# def static(name):
# # url_for('static')
# return name
@app.route('/')
def hello():
return render_template('index.html',name=name,movies = movies)
# if __name__ == '__main__':
# app.run()
| [
"[email protected]"
]
| |
f4e6f2a11be9b1c9b26e680848c56ec23e147339 | e78154abbb8bacf5afccda9da371684cbeabad36 | /popego/popserver/build/lib/popserver/tests/agents/test_lastfm.py | 96e4e5360546f9480b42ef1450462b3d3a5daae1 | [
"BSD-3-Clause"
]
| permissive | enterstudio/popego | 1a196fabc374c0f45764e5c74bd7752236424040 | 2d09e793d9d2f297139edb325b8a70ddda9b2705 | refs/heads/master | 2021-04-09T16:39:40.781634 | 2016-10-14T16:53:47 | 2016-10-14T16:53:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,723 | py | # -*- coding: utf-8 -*-
__docformat__='restructuredtext'
from popserver.tests.nodb_model import *
from popserver.tests import *
from fixture import DataTestCase
from popserver.tests import popfixtures
from popserver.agents.lastfm_agent import LastFmAgent
from popserver.agents.lastfm_client import LastFMClient
import popserver.agents
import types
import unittest
class TestLastFmClient(unittest.TestCase):
def setUp(self):
self._restoreMethod = popserver.agents.lastfm_client.LastFMClient._getFeed
LastFMClient._getFeed = types.MethodType(mock_lastfm_getFeed, None, LastFMClient)
self.client = LastFMClient()
def tearDown(self):
LastFMClient._getFeed = types.MethodType(self._restoreMethod, None, LastFMClient)
def testRecentTracks(self):
t = self.client.getRecentTracksForUser('maristaran')
assert type(t) == type([])
assert len(t) == 1
assert type(t[0]) == type({})
assert t[0]['artist'] == 'Willie Bobo'
def testTopTracks(self):
t = self.client.getTopTracksForUser('maristaran')
assert type(t) == type([])
assert len(t) == 1
assert type(t[0]) == type({})
assert t[0]['artist'] == 'Brian Wilson'
assert t[0]['name'] == 'Our Prayer Gee'
def testTopArtists(self):
t = self.client.getTopArtistsForUser('maristaran')
assert type(t) == type([])
assert len(t) == 1
assert type(t[0]) == type({})
assert t[0]['name'] == 'The Beatles'
def testUserTagsForTrack(self):
t = self.client.getUserTagsForTrack('maristaran', 'Brian Wilson', 'Our Prayer Gee')
assert type(t) == type([])
assert len(t) == 1
assert t == ['bombastic']
def testTopArtistsForUser(self):
t = self.client.getTopArtistsForUser('maristaran')
assert type(t) == type([])
assert len(t) == 1
assert t[0]['name'] == 'The Beatles'
def testTopTagsForTrack(self):
t = self.client.getTopTagsForTrack('Willie Bobo', 'Funky Sneakers')
assert type(t) == type([])
assert len(t) == 0
def testGetArtistData(self):
t = self.client.getArtistData('Brian Wilson')
assert type(t) == type({})
assert t['name'] == 'Brian Wilson'
# TODO: tests para el agente
# class TestLastFmAgent(TestModel, DataTestCase):
# fixture = dbfixture
# datasets = [popfixtures.UserData, popfixtures.ServiceTypeData, popfixtures.ServiceData, popfixtures.AccountData]
# def setUp(self):
# TestModel.setUp(self)
# DataTestCase.setUp(self)
# self._restoreMethod = popserver.agents.lastfm_client.LastFMClient._getFeed
# LastFMClient._getFeed = types.MethodType(mock_lastfm_getFeed, None, LastFMClient)
# self.agent = LastFmAgent()
# self.user = self.data.UserData.dartagnan
# self.lastfm_svc = self.data.ServiceData.lastfm
# self.account = Account.get_by(user_id=self.user.id, service_id=self.lastfm_svc.id)
# def tearDown(self):
# dbsession.clear()
# DataTestCase.tearDown(self)
# TestModel.tearDown(self)
# LastFMClient._getFeed = types.MethodType(self._restoreMethod, None, LastFMClient)
# def test_getUserGraph(self):
# r = self.agent.getUserGraph(self.account)
# assert len(r) == 3 # grupos: top artists, top tracks y recently_listened
# assert map(type, r) == [popserver.model.ItemGroup, popserver.model.ItemGroup, popserver.model.ItemGroup]
# assert map(lambda g: type(g.items[0]), r) == [popserver.model.UserItem, popserver.model.UserItem,popserver.model.UserItem]
# assert map(lambda g: len(g.items), r) == [1, 1, 1]
# top_artists = r[0]
# assert type(top_artists.items[0].item) == popserver.model.Artist
# assert top_artists.items[0].item.title == 'The Beatles'
# top_tracks = r[1]
# assert type(top_tracks.items[0].item) == popserver.model.Song
# assert top_tracks.items[0].item.title == 'Our Prayer Gee'
# assert top_tracks.items[0].item.artist.title == 'Brian Wilson'
# recently_listened = r[2]
# assert type(recently_listened.items[0].item) == popserver.model.Song
# assert recently_listened.items[0].item.title == 'Funky Sneakers'
# assert recently_listened.items[0].item.artist.title == 'Willie Bobo'
# assert True
def mock_lastfm_getFeed(self, url):
samples = {
'http://ws.audioscrobbler.com/1.0/user/maristaran/recenttracks.xml' : 'recenttracks.xml',
'http://ws.audioscrobbler.com/1.0/artist/Willie%2BBobo/similar.xml' : 'willie-bobo-similar.xml',
'http://ws.audioscrobbler.com/1.0/track/Willie%2BBobo/Funky%2BSneakers/toptags.xml' : 'funky-sneakers-toptags.xml',
'http://ws.audioscrobbler.com/1.0/user/maristaran/tracktags.xml?artist=Willie+Bobo&track=Funky+Sneakers' : 'funky-sneakers-tracktags.xml',
'http://ws.audioscrobbler.com/1.0/user/maristaran/toptracks.xml' : 'toptracks.xml',
'http://ws.audioscrobbler.com/1.0/artist/Brian%2BWilson/similar.xml' : 'brian-wilson-similar.xml',
'http://ws.audioscrobbler.com/1.0/track/Brian%2BWilson/Our%2BPrayer%2BGee/toptags.xml' : 'our-prayer-gee-toptags.xml',
'http://ws.audioscrobbler.com/1.0/user/maristaran/tracktags.xml?artist=Brian+Wilson&track=Our+Prayer+Gee' : 'maristaran-our-prayer-gee-toptags.xml',
'http://ws.audioscrobbler.com/1.0/user/maristaran/topartists.xml' : 'topartists.xml',
'http://ws.audioscrobbler.com/1.0/artist/The%2BBeatles/similar.xml' : 'beatles-similar.xml',
'http://ws.audioscrobbler.com/1.0/user/maristaran/artisttags.xml?artist=The+Beatles' : 'maristaran-beatles-tags.xml'
}
import xml.dom.minidom
if samples[url] == 404:
import urllib2
raise urllib2.HTTPError
else:
return xml.dom.minidom.parse(popserver.tests.__path__[0] + '/samples/lastfm/' + samples[url])
# class TestLastfmAgent(DataTestCase, TestModel):
# fixture = dbfixture
# datasets = [popfixtures.UserData, popfixtures.ServiceTypeData, popfixtures.ServiceData, popfixtures.AccountData]
# def setUp(self):
# TestModel.setUp(self)
# DataTestCase.setUp(self)
# self.user = User.get_by(username='darty')
# self.lastfm_svc = Service.get_by(name='Last.FM')
# self.account = Account.get_by(user=self.user, service=self.lastfm_svc)
# self.agent = self.lastfm_svc.getAgent()
# def tearDown(self):
# DataTestCase.tearDown(self)
# TestModel.tearDown(self)
# LastFmAgent._getFeed = orig_getFeed
| [
"[email protected]"
]
| |
6c0d1cac4f7d4207631446b5ea39072ab40066dd | 18319a52cce2b3f3a3607a18f45cbd5933ad8e31 | /venv/Lib/site-packages/bottle_sqlite.py | f568b58a6389cfdd11c1b2072cadb07f55fc79a2 | []
| no_license | AmithRajMP/Web-Tech-Assignment--2- | 8d9e56ef6bd302661654e32182964b9fe5644801 | ec7f410868f8936199bec19f01fce4ad6e081e79 | refs/heads/master | 2020-03-18T01:54:46.662732 | 2018-09-16T11:06:04 | 2018-09-16T11:06:04 | 134,165,641 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,833 | py | '''
Bottle-sqlite is a plugin that integrates SQLite3 with your Bottle
application. It automatically connects to a database at the beginning of a
request, passes the database handle to the route callback and closes the
connection afterwards.
To automatically detect routes that need a database connection, the plugin
searches for route callbacks that require a `db` keyword argument
(configurable) and skips routes that do not. This removes any overhead for
routes that don't need a database connection.
Usage Example::
import bottle
from bottle.ext import sqlite
app = bottle.Bottle()
plugin = sqlite.Plugin(dbfile='/tmp/test.db')
app.install(plugin)
@app.route('/show/:item')
def show(item, db):
row = db.execute('SELECT * from items where name=?', item).fetchone()
if row:
return template('showitem', page=row)
return HTTPError(404, "Page not found")
'''
__author__ = "Marcel Hellkamp"
__version__ = '0.1.3'
__license__ = 'MIT'
### CUT HERE (see setup.py)
import sqlite3
import inspect
import bottle
# PluginError is defined to bottle >= 0.10
if not hasattr(bottle, 'PluginError'):
class PluginError(bottle.BottleException):
pass
bottle.PluginError = PluginError
class SQLitePlugin(object):
''' This plugin passes an sqlite3 database handle to route callbacks
that accept a `db` keyword argument. If a callback does not expect
such a parameter, no connection is made. You can override the database
settings on a per-route basis. '''
name = 'sqlite'
api = 2
''' python3 moves unicode to str '''
try:
str
except NameError:
str = str
def __init__(self, dbfile=':memory:', autocommit=True, dictrows=True,
keyword='db', text_factory=str):
self.dbfile = dbfile
self.autocommit = autocommit
self.dictrows = dictrows
self.keyword = keyword
self.text_factory = text_factory
def setup(self, app):
''' Make sure that other installed plugins don't affect the same
keyword argument.'''
for other in app.plugins:
if not isinstance(other, SQLitePlugin):
continue
if other.keyword == self.keyword:
raise PluginError("Found another sqlite plugin with "
"conflicting settings (non-unique keyword).")
elif other.name == self.name:
self.name += '_%s' % self.keyword
def apply(self, callback, route):
# hack to support bottle v0.9.x
if bottle.__version__.startswith('0.9'):
config = route['config']
_callback = route['callback']
else:
config = route.config
_callback = route.callback
# Override global configuration with route-specific values.
if "sqlite" in config:
# support for configuration before `ConfigDict` namespaces
g = lambda key, default: config.get('sqlite', {}).get(key, default)
else:
g = lambda key, default: config.get('sqlite.' + key, default)
dbfile = g('dbfile', self.dbfile)
autocommit = g('autocommit', self.autocommit)
dictrows = g('dictrows', self.dictrows)
keyword = g('keyword', self.keyword)
text_factory = g('keyword', self.text_factory)
# Test if the original callback accepts a 'db' keyword.
# Ignore it if it does not need a database handle.
argspec = inspect.getargspec(_callback)
if keyword not in argspec.args:
return callback
def wrapper(*args, **kwargs):
# Connect to the database
db = sqlite3.connect(dbfile)
# set text factory
db.text_factory = text_factory
# This enables column access by name: row['column_name']
if dictrows:
db.row_factory = sqlite3.Row
# Add the connection handle as a keyword argument.
kwargs[keyword] = db
try:
rv = callback(*args, **kwargs)
if autocommit:
db.commit()
except sqlite3.IntegrityError as e:
db.rollback()
raise bottle.HTTPError(500, "Database Error", e)
except bottle.HTTPError as e:
raise
except bottle.HTTPResponse as e:
if autocommit:
db.commit()
raise
finally:
db.close()
return rv
# Replace the route callback with the wrapped one.
return wrapper
Plugin = SQLitePlugin
| [
"[email protected]"
]
| |
dee8a6fa242eaa17b94d0e6b419c56fdcdd8f742 | 0fa00ecf2dd671515dc001d4b14049ec6a0c1f1c | /custom_components/powercalc/power_profile/power_profile.py | 666c4af626a6097f47d1c1015b83eeac17df2eda | [
"Unlicense"
]
| permissive | bacco007/HomeAssistantConfig | d91a5368344f50abbea881bd1e6dfc57a0e456ca | 8548d9999ddd54f13d6a307e013abcb8c897a74e | refs/heads/master | 2023-08-30T07:07:33.571959 | 2023-08-29T20:00:00 | 2023-08-29T20:00:00 | 230,585,631 | 98 | 16 | Unlicense | 2023-09-09T08:28:39 | 2019-12-28T09:05:02 | Python | UTF-8 | Python | false | false | 13,870 | py | from __future__ import annotations
import json
import logging
import os
import re
from typing import NamedTuple, Protocol
from awesomeversion.awesomeversion import AwesomeVersion
from homeassistant.const import __version__ as HA_VERSION # noqa
if AwesomeVersion(HA_VERSION) >= AwesomeVersion("2023.8.0"):
from enum import StrEnum
else:
from homeassistant.backports.enum import StrEnum # pragma: no cover
from homeassistant.components.binary_sensor import DOMAIN as BINARY_SENSOR_DOMAIN
from homeassistant.components.camera import DOMAIN as CAMERA_DOMAIN
from homeassistant.components.light import DOMAIN as LIGHT_DOMAIN
from homeassistant.components.media_player import DOMAIN as MEDIA_PLAYER_DOMAIN
from homeassistant.components.switch import DOMAIN as SWITCH_DOMAIN
from homeassistant.core import HomeAssistant, State
from homeassistant.helpers.typing import ConfigType
from custom_components.powercalc.common import SourceEntity
from custom_components.powercalc.const import CONF_POWER, CalculationStrategy
from custom_components.powercalc.errors import (
ModelNotSupportedError,
PowercalcSetupError,
UnsupportedStrategyError,
)
_LOGGER = logging.getLogger(__name__)
class DeviceType(StrEnum):
CAMERA = "camera"
LIGHT = "light"
SMART_SWITCH = "smart_switch"
SMART_SPEAKER = "smart_speaker"
NETWORK = "network"
class SubProfileMatcherType(StrEnum):
ATTRIBUTE = "attribute"
ENTITY_ID = "entity_id"
ENTITY_STATE = "entity_state"
INTEGRATION = "integration"
DEVICE_DOMAINS = {
DeviceType.CAMERA: CAMERA_DOMAIN,
DeviceType.LIGHT: LIGHT_DOMAIN,
DeviceType.SMART_SWITCH: SWITCH_DOMAIN,
DeviceType.SMART_SPEAKER: MEDIA_PLAYER_DOMAIN,
DeviceType.NETWORK: BINARY_SENSOR_DOMAIN,
}
class PowerProfile:
def __init__(
self,
hass: HomeAssistant,
manufacturer: str,
model: str,
directory: str,
json_data: ConfigType,
) -> None:
self._manufacturer = manufacturer
self._model = model.replace("#slash#", "/")
self._hass = hass
self._directory = directory
self._json_data = json_data
self.sub_profile: str | None = None
self._sub_profile_dir: str | None = None
def get_model_directory(self, root_only: bool = False) -> str:
"""Get the model directory containing the data files."""
if self.linked_lut:
return os.path.join(os.path.dirname(__file__), "../data", self.linked_lut)
if root_only:
return self._directory
return self._sub_profile_dir or self._directory
def supports(self, model: str) -> bool:
"""Check whether this power profile supports a given model ID.
Also looks at possible aliases.
"""
model = model.lower().replace("#slash#", "/")
if self._model.lower() == model:
return True
# @todo implement Regex/Json path
for alias in self.aliases:
if alias.lower() == model:
return True
# Also try to match model ID between parentheses.
if match := re.search(r"\(([^\(\)]+)\)$", model):
return self.supports(match.group(1))
return False
@property
def manufacturer(self) -> str:
return self._manufacturer
@property
def model(self) -> str:
return self._model
@property
def name(self) -> str:
return self._json_data.get("name") or ""
@property
def standby_power(self) -> float:
return self._json_data.get("standby_power") or 0
@property
def standby_power_on(self) -> float:
return self._json_data.get("standby_power_on") or 0
@property
def calculation_strategy(self) -> CalculationStrategy:
"""Get the calculation strategy this profile provides.
supported modes is here for BC purposes.
"""
if "calculation_strategy" in self._json_data:
return CalculationStrategy(str(self._json_data.get("calculation_strategy")))
return CalculationStrategy.LUT
@property
def linked_lut(self) -> str | None:
return self._json_data.get("linked_lut")
@property
def calculation_enabled_condition(self) -> str | None:
return self._json_data.get("calculation_enabled_condition")
@property
def aliases(self) -> list[str]:
return self._json_data.get("aliases") or []
@property
def linear_mode_config(self) -> ConfigType | None:
"""Get configuration to setup linear strategy."""
if not self.is_strategy_supported(CalculationStrategy.LINEAR):
raise UnsupportedStrategyError(
f"Strategy linear is not supported by model: {self._model}",
)
return self._json_data.get("linear_config")
@property
def fixed_mode_config(self) -> ConfigType | None:
"""Get configuration to setup fixed strategy."""
if not self.is_strategy_supported(CalculationStrategy.FIXED):
raise UnsupportedStrategyError(
f"Strategy fixed is not supported by model: {self._model}",
)
fixed_config = self._json_data.get("fixed_config")
if fixed_config is None and self.standby_power_on:
fixed_config = {CONF_POWER: 0}
return fixed_config
@property
def sensor_config(self) -> ConfigType:
"""Additional sensor configuration."""
return self._json_data.get("sensor_config") or {}
def is_strategy_supported(self, mode: CalculationStrategy) -> bool:
"""Whether a certain calculation strategy is supported by this profile."""
return mode == self.calculation_strategy
@property
def needs_fixed_config(self) -> bool:
"""Used for smart switches which only provides standby power values.
This indicates the user must supply the power values in the config flow.
"""
return self.is_strategy_supported(
CalculationStrategy.FIXED,
) and not self._json_data.get("fixed_config")
@property
def device_type(self) -> DeviceType:
device_type = self._json_data.get("device_type")
if not device_type:
return DeviceType.LIGHT
return DeviceType(device_type)
@property
def config_flow_discovery_remarks(self) -> str | None:
return self._json_data.get("config_flow_discovery_remarks")
def get_sub_profiles(self) -> list[str]:
"""Get listing of possible sub profiles."""
return sorted(next(os.walk(self.get_model_directory(True)))[1])
@property
def has_sub_profiles(self) -> bool:
return len(self.get_sub_profiles()) > 0
@property
def sub_profile_select(self) -> SubProfileSelectConfig | None:
"""Get the configuration for automatic sub profile switching."""
select_dict = self._json_data.get("sub_profile_select")
if not select_dict:
return None
return SubProfileSelectConfig(**select_dict)
def select_sub_profile(self, sub_profile: str) -> None:
"""Select a sub profile. Only applicable when to profile actually supports sub profiles."""
if not self.has_sub_profiles:
return
# Sub profile already selected, no need to load it again
if self.sub_profile == sub_profile:
return
self._sub_profile_dir = os.path.join(self._directory, sub_profile)
_LOGGER.debug(f"Loading sub profile directory {sub_profile}")
if not os.path.exists(self._sub_profile_dir):
raise ModelNotSupportedError(
f"Sub profile not found (manufacturer: {self._manufacturer}, model: {self._model}, "
f"sub_profile: {sub_profile})",
)
# When the sub LUT directory also has a model.json (not required),
# merge this json into the main model.json data.
file_path = os.path.join(self._sub_profile_dir, "model.json")
if os.path.exists(file_path):
with open(file_path) as json_file:
self._json_data = {**self._json_data, **json.load(json_file)}
self.sub_profile = sub_profile
def is_entity_domain_supported(self, source_entity: SourceEntity) -> bool:
"""Check whether this power profile supports a given entity domain."""
entity_entry = source_entity.entity_entry
if (
self.device_type == DeviceType.SMART_SWITCH
and entity_entry
and entity_entry.platform in ["hue"]
and source_entity.domain == LIGHT_DOMAIN
): # see https://github.com/bramstroker/homeassistant-powercalc/issues/1491
return True
return DEVICE_DOMAINS[self.device_type] == source_entity.domain
class SubProfileSelector:
def __init__(
self,
hass: HomeAssistant,
config: SubProfileSelectConfig,
source_entity: SourceEntity,
) -> None:
self._hass = hass
self._config = config
self._source_entity = source_entity
self._matchers: list[SubProfileMatcher] = self._build_matchers()
def _build_matchers(self) -> list[SubProfileMatcher]:
matchers: list[SubProfileMatcher] = []
for matcher_config in self._config.matchers:
matchers.append(self._create_matcher(matcher_config))
return matchers
def select_sub_profile(self, entity_state: State) -> str:
"""Dynamically tries to select a sub profile depending on the entity state.
This method always need to return a sub profile, when nothing is matched it will return a default.
"""
for matcher in self._matchers:
sub_profile = matcher.match(entity_state, self._source_entity)
if sub_profile:
return sub_profile
return self._config.default
def get_tracking_entities(self) -> list[str]:
"""Get additional list of entities to track for state changes."""
return [
entity_id
for matcher in self._matchers
for entity_id in matcher.get_tracking_entities()
]
def _create_matcher(self, matcher_config: dict) -> SubProfileMatcher:
"""Create a matcher from json config. Can be extended for more matchers in the future."""
matcher_type: SubProfileMatcherType = matcher_config["type"]
if matcher_type == SubProfileMatcherType.ATTRIBUTE:
return AttributeMatcher(matcher_config["attribute"], matcher_config["map"])
if matcher_type == SubProfileMatcherType.ENTITY_STATE:
return EntityStateMatcher(
self._hass,
self._source_entity,
matcher_config["entity_id"],
matcher_config["map"],
)
if matcher_type == SubProfileMatcherType.ENTITY_ID:
return EntityIdMatcher(matcher_config["pattern"], matcher_config["profile"])
if matcher_type == SubProfileMatcherType.INTEGRATION:
return IntegrationMatcher(
matcher_config["integration"],
matcher_config["profile"],
)
raise PowercalcSetupError(f"Unknown sub profile matcher type: {matcher_type}")
class SubProfileSelectConfig(NamedTuple):
default: str
matchers: list[dict]
class SubProfileMatcher(Protocol):
def match(self, entity_state: State, source_entity: SourceEntity) -> str | None:
"""Returns a sub profile."""
def get_tracking_entities(self) -> list[str]:
"""Get extra entities to track for state changes."""
class EntityStateMatcher(SubProfileMatcher):
def __init__(
self,
hass: HomeAssistant,
source_entity: SourceEntity | None,
entity_id: str,
mapping: dict[str, str],
) -> None:
self._hass = hass
if source_entity:
entity_id = entity_id.replace(
"{{source_object_id}}",
source_entity.object_id,
)
self._entity_id = entity_id
self._mapping = mapping
def match(self, entity_state: State, source_entity: SourceEntity) -> str | None:
state = self._hass.states.get(self._entity_id)
if state is None:
return None
return self._mapping.get(state.state)
def get_tracking_entities(self) -> list[str]:
return [self._entity_id]
class AttributeMatcher(SubProfileMatcher):
def __init__(self, attribute: str, mapping: dict[str, str]) -> None:
self._attribute = attribute
self._mapping = mapping
def match(self, entity_state: State, source_entity: SourceEntity) -> str | None:
val = entity_state.attributes.get(self._attribute)
if val is None:
return None
return self._mapping.get(val)
def get_tracking_entities(self) -> list[str]:
return []
class EntityIdMatcher(SubProfileMatcher):
def __init__(self, pattern: str, profile: str) -> None:
self._pattern = pattern
self._profile = profile
def match(self, entity_state: State, source_entity: SourceEntity) -> str | None:
if re.search(self._pattern, entity_state.entity_id):
return self._profile
return None
def get_tracking_entities(self) -> list[str]:
return []
class IntegrationMatcher(SubProfileMatcher):
def __init__(self, integration: str, profile: str) -> None:
self._integration = integration
self._profile = profile
def match(self, entity_state: State, source_entity: SourceEntity) -> str | None:
registry_entry = source_entity.entity_entry
if not registry_entry:
return None
if registry_entry.platform == self._integration:
return self._profile
return None
def get_tracking_entities(self) -> list[str]:
return []
| [
"[email protected]"
]
| |
f06e381bb59634a56edccfa28b2c65ece1dd503f | 0cc075bb2b1c30c257dc5eafa1e309ee9f5bfadc | /tests/__init__.py | ebd6f8d11eadd944b39f2ea50e1f67c0fadf85b2 | []
| no_license | gcaaa31928/BusTrackerTaipeiAppiumTesting | d2f49e39db41b9507d8f05338defed7c72f3e456 | 1363ca72e10369affd4397223d150556b9172995 | refs/heads/master | 2021-01-21T14:48:32.558734 | 2016-06-21T19:24:39 | 2016-06-21T19:24:39 | 57,986,263 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 154 | py | from . import test_basic
from . import test_app
from . import test_nearby_page
from . import test_nearby_page_stations
from .import test_nearby_page_info
| [
"[email protected]"
]
| |
b78dfb29942edd235434f9bf380f779d177759f5 | 06ad345f69a2f91b5c5a730bb4943f04cba93a44 | /Pygame/08_functions_and_graphics.py | 17721ec3ef30096ed81712cd5443420abe7f5daf | []
| no_license | whyj107/Python | 61bd58d202350616a322b7b75086976f354bda9b | 3c3649ca8c6ac6908ac14d6af201d508cc4fbf30 | refs/heads/master | 2021-04-13T23:49:13.470657 | 2021-03-27T10:08:39 | 2021-03-27T10:08:39 | 249,195,717 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,593 | py | """
Sample Python/Pygame Programs
Simpson College Computer Science
http://programarcadegames.com/
http://simpson.edu/computer-science/
Explanation video: http://youtu.be/_XdrKSDmzqA
"""
# Import a library of functions called 'pygame'
import pygame
def draw_snowman(screen, x, y):
""" --- Function for a snowman ---
Define a function that will draw a snowman at a certain location.
"""
pygame.draw.ellipse(screen, WHITE, [35 + x, 0 + y, 25, 25])
pygame.draw.ellipse(screen, WHITE, [23 + x, 20 + y, 50, 50])
pygame.draw.ellipse(screen, WHITE, [0 + x, 65 + y, 100, 100])
# Initialize the game engine
pygame.init()
# Define the colors we will use in RGB format
BLACK = [0, 0, 0]
WHITE = [255, 255, 255]
# Set the height and width of the screen
size = [400, 500]
screen = pygame.display.set_mode(size)
# Loop until the user clicks the close button.
done = False
clock = pygame.time.Clock()
while not done:
for event in pygame.event.get():
if event.type == pygame.QUIT:
done = True
# Clear the screen and set the screen background
screen.fill(BLACK)
# Snowman in upper left
draw_snowman(screen, 10, 10)
# Snowman in upper right
draw_snowman(screen, 300, 10)
# Snowman in lower left
draw_snowman(screen, 10, 300)
# Go ahead and update the screen with what we've drawn.
# This MUST happen after all the other drawing commands.
pygame.display.flip()
# This limits the while loop to a max of 60 times per second.
# Leave this out and we will use all CPU we can.
clock.tick(60) | [
"[email protected]"
]
| |
0b90c23e619a3ae46e07bc5db9c5a8cdcf249e3e | 4a76ac7ad1aaeec44729ab6d5b121b1cae0d910c | /Week 9/pathSum3.py | 4ff31f3c3811e17e09b8c3e224c844bfd576a84c | []
| no_license | kalmad99/CompetitiveProgramming | 2d825e839faa9e13ef43dbb45498bd3eef6723ab | 6cbb1f12f7670d0016fa2af8f2dd597d9123070d | refs/heads/main | 2023-03-25T20:18:23.389396 | 2021-03-24T21:36:52 | 2021-03-24T21:36:52 | 325,816,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,225 | py | # Definition for a binary tree node.
class TreeNode:
def __init__(self, val=0, left=None, right=None):
self.val = val
self.left = left
self.right = right
class Solution:
def pathSum(self, root: TreeNode, sum: int) -> int:
total, result, counter = {0: 1}, 0, [0]
if not root:
return 0
else:
self.dfs(root, total, result, counter, sum)
return counter[0]
def dfs(self, node, total, result, counter, k):
if not node.left and not node.right:
if result + node.val - k in total:
counter[0] += total[result + node.val - k]
return
else:
result += node.val
if result - k in total:
counter[0] += total[result - k]
if result in total:
total[result] += 1
else:
total[result] = 1
if node.left:
self.dfs(node.left, total, result, counter, k)
if node.right:
self.dfs(node.right, total, result, counter, k)
if total[result] == 1:
total.pop(result)
else:
total[result] -= 1
result -= node.val | [
"[email protected]"
]
| |
edc85322d46ee344788712c6051ad5c5a397f1bf | 03f9b8bdea312636afb4df3737b55cb0cc4b21ff | /AddTwoNumbers.py | 7895bccd673b9da3b411d758be1b46902a6bbfaa | []
| no_license | ellinx/LC-python | f29dd17bbe15407ba0d06ad68386efdc9a343b56 | 9190d3d178f1733aa226973757ee7e045b7bab00 | refs/heads/master | 2021-06-01T15:21:24.379811 | 2020-10-29T04:37:07 | 2020-10-29T04:37:07 | 132,704,788 | 1 | 1 | null | 2019-05-15T03:26:11 | 2018-05-09T05:13:26 | Python | UTF-8 | Python | false | false | 1,404 | py | """
You are given two non-empty linked lists representing two non-negative integers.
The digits are stored in reverse order and each of their nodes contain a single digit.
Add the two numbers and return it as a linked list.
You may assume the two numbers do not contain any leading zero, except the number 0 itself.
Example:
Input: (2 -> 4 -> 3) + (5 -> 6 -> 4)
Output: 7 -> 0 -> 8
Explanation: 342 + 465 = 807.
"""
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def addTwoNumbers(self, l1, l2):
"""
:type l1: ListNode
:type l2: ListNode
:rtype: ListNode
"""
dummy = ListNode(0)
cur = dummy
c = 0
while l1 is not None and l2 is not None:
s = l1.val+l2.val+c
c = s//10
s %= 10
cur.next = ListNode(s)
cur, l1, l2 = cur.next, l1.next, l2.next
while l1 is not None:
s = l1.val+c
c = s//10
s %= 10
cur.next = ListNode(s)
cur, l1 = cur.next, l1.next
while l2 is not None:
s = l2.val+c
c = s//10
s %= 10
cur.next = ListNode(s)
cur, l2 = cur.next, l2.next
if c>0:
cur.next = ListNode(c)
return dummy.next
| [
"[email protected]"
]
| |
900fbf0db76c5448cde1af59a49eaad0854051d5 | c7663b9ca3ccbef97e311408b18aca7630fd3d9d | /src/python/a.py | e630a6dcb60d8f696d2a104d790ec4bfd1d11931 | []
| no_license | majkl99/programming_examples | 8b5e1857491001129d944beabaa60442f84c95a0 | cca5971eb507485b47cf7e44b28e203a276184b3 | refs/heads/master | 2020-06-01T19:42:41.824314 | 2013-07-12T17:10:02 | 2013-07-12T17:10:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 27 | py | def f():
return 1
f()
| [
"[email protected]"
]
| |
a8b91d81f59792d3e00a09a34b0fe6942174e581 | a0a8fbda5657169ad180ae7008a505caff500182 | /courses/admin.py | 8e17ab02c432207b3c3e296d96d3896576f94f59 | []
| no_license | WellingtonIdeao/educa-learning | 4e32d88c741dd7e2fbe45a4194c0ee1970697ca0 | 393c4849e59ee7533e7048d75b42aea2e3e64121 | refs/heads/main | 2023-06-26T04:29:50.741232 | 2021-07-24T18:13:28 | 2021-07-24T18:13:28 | 386,041,014 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 656 | py | from django.contrib import admin
from .models import Subject, Course, Module
# use memcache admin index site
admin.site.index_template = 'memcache_status/admin_index.html'
@admin.register(Subject)
class SubjectAdmin(admin.ModelAdmin):
list_display = ('title', 'slug')
prepopulated_fields = {'slug': ('title',)}
class ModuleInline(admin.StackedInline):
model = Module
@admin.register(Course)
class CourseAdmin(admin.ModelAdmin):
list_display = ('title', 'subject', 'created')
list_filter = ('created', 'subject')
search_fields = ('title', 'overview')
prepopulated_fields = {'slug': ('title',)}
inlines = [ModuleInline] | [
"[email protected]"
]
| |
8b34bc1e8dd03cd34cb5b8d1adc629cdc9f628c6 | b7b2f80ab5e1ee0ea028576e3014b62b8d3a8d7e | /pyserv/pyserv-010/pycli_enc.py | 4a49b1d65c35c16af0fadd3f6430ecdc0b665737 | []
| no_license | pglen/pgpygtk | 4d1405478a714f003984cf3e3db04ff1f767470b | 33f58010e304f1a312f2356de453ecedb7aa21ef | refs/heads/master | 2021-01-22T01:18:52.238415 | 2019-01-01T01:37:24 | 2019-01-01T01:37:24 | 102,215,955 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,291 | py | #!/usr/bin/env python
# ------------------------------------------------------------------------
# Test client for the pyserv project. Encrypt test.
import os, sys, getopt, signal, select, socket, time, struct
import random, stat
import pyserv.pydata, pyserv.pyservsup, pyserv.pycrypt
from pyserv.pyclisup import *
# ------------------------------------------------------------------------
# Globals
version = 1.0
# ------------------------------------------------------------------------
# Functions from command line
def phelp():
print
print "Usage: " + os.path.basename(sys.argv[0]) + " [options]"
print
print "Options: -d level - Debug level 0-10"
print " -p port - Port to use (default: 9999)"
print " -v - Verbose"
print " -q - Quiet"
print " -h - Help"
print
sys.exit(0)
def pversion():
print os.path.basename(sys.argv[0]), "Version", version
sys.exit(0)
# option, var_name, initial_val, function
optarr = \
["d:", "pgdebug", 0, None], \
["p:", "port", 9999, None], \
["v", "verbose", 0, None], \
["q", "quiet", 0, None], \
["t", "test", "x", None], \
["V", None, None, pversion], \
["h", None, None, phelp] \
conf = Config(optarr)
# ------------------------------------------------------------------------
if __name__ == '__main__':
args = conf.comline(sys.argv[1:])
if len(args) == 0:
ip = '127.0.0.1'
else:
ip = args[0]
s1 = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
init_handler(s1)
try:
s1.connect((ip, conf.port))
except:
print "Cannot connect to:", ip + ":" + str(conf.port), sys.exc_info()[1]
sys.exit(1)
client(s1, "ver", conf.verbose)
client(s1, "user peter", conf.verbose)
client(s1, "pass 1234", conf.verbose)
xkey = "" #"1234"
#client(s1, "ekey " + xkey, conf.verbose)
client(s1, "ver ", conf.verbose, xkey)
client(s1, "ekey ", conf.verbose, xkey)
xkey = ""
client(s1, "ver ", conf.verbose, xkey)
client(s1, "quit", conf.verbose, xkey)
s1.close();
sys.exit(0)
| [
"[email protected]"
]
| |
43a7a2f7b8f4cd2cf1ef4e1d2914694c7c2d63f4 | 6a95b330e1beec08b917ff45eccfd6be3fd4629f | /kubernetes/test/test_v1beta2_replica_set_spec.py | 90125f05d99575fb7f67ab126e67908b79902a85 | [
"Apache-2.0"
]
| permissive | TokkoLabs/client-python | f4a83d6540e64861b59e322c951380a670578d7f | f1ad9c6889105d8510472606c98f8d3807f82020 | refs/heads/master | 2023-07-14T01:36:46.152341 | 2017-12-21T21:32:11 | 2017-12-21T21:32:11 | 115,042,671 | 0 | 0 | Apache-2.0 | 2021-08-06T03:29:17 | 2017-12-21T20:05:15 | Python | UTF-8 | Python | false | false | 1,003 | py | # coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.8.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import os
import sys
import unittest
import kubernetes.client
from kubernetes.client.rest import ApiException
from kubernetes.client.models.v1beta2_replica_set_spec import V1beta2ReplicaSetSpec
class TestV1beta2ReplicaSetSpec(unittest.TestCase):
""" V1beta2ReplicaSetSpec unit test stubs """
def setUp(self):
pass
def tearDown(self):
pass
def testV1beta2ReplicaSetSpec(self):
"""
Test V1beta2ReplicaSetSpec
"""
# FIXME: construct object with mandatory attributes with example values
#model = kubernetes.client.models.v1beta2_replica_set_spec.V1beta2ReplicaSetSpec()
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
d998e32f40906238b53714101a60abb3a69745ed | 550116c99a6b4572945324dddb7bd70d66f9a8e2 | /src/Lists_Basics_More_Exercises/04_Battle_Ships.py | 11666b8448ab02aa5f19bb88e2f831a38ac655d8 | []
| no_license | ivelinakaraivanova/SoftUniPythonFundamentals | 810cc92796b335f31bae662fa255de66418fb1fd | 31bb4bbe817a65400bc444dbc5b692bd2ef94432 | refs/heads/main | 2022-12-28T08:53:33.587999 | 2020-10-18T16:15:09 | 2020-10-18T16:15:09 | 305,124,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 384 | py | n = int(input())
field = []
for i in range(n):
split_row = list(map(int, input().split(" ")))
field.append(split_row)
attacked = input().split(" ")
ships = 0
for item in attacked:
row = int(item.split("-")[0])
col = int(item.split("-")[1])
if field[row][col] > 0:
field[row][col] -= 1
if field[row][col] == 0:
ships += 1
print(ships) | [
"[email protected]"
]
| |
9e33d666bbf4ced1f73f62d0f4f02a6ca92f334f | 938c55df0653b377318cd434f0fedb97036cfe7d | /day26/flask_web/app.py | a21e33c290a456cd124540302ff1aaf6fe620363 | []
| no_license | elliajen/pyworks | 6f754d0caaa4d110549f7704ade72f0002e63adb | a24a7c02f338fa8d7cfdab5a0d8bc005532dfa99 | refs/heads/master | 2023-08-26T17:27:11.893396 | 2021-10-22T04:28:43 | 2021-10-22T04:28:43 | 402,286,435 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 452 | py | # 웹서버(local 내 컴퓨터) 가동
from flask import Flask
app = Flask(__name__) #flask 클래스에서 app 객체 생성
@app.route('/') #라우트(경로 설정)
def index():
return "<h1>Hello~ Flask!</h1>" #인텍스페이지에서 문자 출력
@app.route('/login')
def login():
return "<h2>로그인 페이지입니다.</h2>"
@app.route('/member')
def member():
return "<h2>회원가입 페이지입니다.</h2>"
app.run() | [
"[email protected]"
]
| |
d7c4ef7b34329b7553264d71940737cdf5fc1cdd | 29bd55d171733586f24f42151d44f4312b6a610e | /keras/keras102_lamda.py | 91b649ecbf888df8c63544cd18c8276d7f322857 | []
| no_license | votus777/AI_study | 66ab1da2b8e760d0c52b0ed2b2f74158e14f435b | f4e38d95690c8ee84d87c02dc20a1ea59c495f04 | refs/heads/master | 2022-12-04T15:52:14.855624 | 2020-08-20T06:12:52 | 2020-08-20T06:12:52 | 262,975,960 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 196 | py |
gradient = lambda x : 2*x - 4
def gradient2(x) :
temp = 2*x - 4
return temp
# 두 개의 다른 표현식이다.
x = 3
print(gradient(x))
print(gradient2(x))
| [
"[email protected]"
]
| |
4ca23f52cc73ef2d7d1dca3de553acebb8354157 | e9ef3cd143478660d098668a10e67544a42b5878 | /Lib/corpuscrawler/crawl_vag.py | ad0c810fa086afc10452c3e9d5ab39025ec7411c | [
"Apache-2.0"
]
| permissive | google/corpuscrawler | a5c790c19b26e6397b768ce26cf12bbcb641eb90 | 10adaecf4ed5a7d0557c8e692c186023746eb001 | refs/heads/master | 2023-08-26T04:15:59.036883 | 2022-04-20T08:18:11 | 2022-04-20T08:18:11 | 102,909,145 | 119 | 40 | NOASSERTION | 2022-04-20T08:18:12 | 2017-09-08T22:21:03 | Python | UTF-8 | Python | false | false | 809 | py | # Copyright 2018 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, print_function, unicode_literals
from corpuscrawler.util import crawl_bibleis
def crawl(crawler):
out = crawler.get_output(language='vag')
crawl_bibleis(crawler, out, bible='VAGWBT')
| [
"[email protected]"
]
| |
039ccf93cec028a95a10e965719e2644dea90629 | f445450ac693b466ca20b42f1ac82071d32dd991 | /generated_tempdir_2019_09_15_163300/generated_part004501.py | ed45a8b978dd61582172fdc173ae6454577b7843 | []
| no_license | Upabjojr/rubi_generated | 76e43cbafe70b4e1516fb761cabd9e5257691374 | cd35e9e51722b04fb159ada3d5811d62a423e429 | refs/heads/master | 2020-07-25T17:26:19.227918 | 2019-09-15T15:41:48 | 2019-09-15T15:41:48 | 208,357,412 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,292 | py | from sympy.abc import *
from matchpy.matching.many_to_one import CommutativeMatcher
from matchpy import *
from matchpy.utils import VariableWithCount
from collections import deque
from multiset import Multiset
from sympy.integrals.rubi.constraints import *
from sympy.integrals.rubi.utility_function import *
from sympy.integrals.rubi.rules.miscellaneous_integration import *
from sympy import *
class CommutativeMatcher96697(CommutativeMatcher):
_instance = None
patterns = {
0: (0, Multiset({}), [
(VariableWithCount('i2.4.1.0', 1, 1, None), Mul),
(VariableWithCount('i2.4.1.0_1', 1, 1, S(1)), Mul)
])
}
subjects = {}
subjects_by_id = {}
bipartite = BipartiteGraph()
associative = Mul
max_optional_count = 1
anonymous_patterns = set()
def __init__(self):
self.add_subject(None)
@staticmethod
def get():
if CommutativeMatcher96697._instance is None:
CommutativeMatcher96697._instance = CommutativeMatcher96697()
return CommutativeMatcher96697._instance
@staticmethod
def get_match_iter(subject):
subjects = deque([subject]) if subject is not None else deque()
subst0 = Substitution()
# State 96696
return
yield
from collections import deque | [
"[email protected]"
]
| |
68e38d1f78e67803869ed7a572190babe8b2f9b7 | 41de4210af23a8a8a3ca7dd090bb51faecf4a0c8 | /lib/python3.5/site-packages/statsmodels/regression/recursive_ls.py | 9765f8474537019ac360f604b9cc30b332e0d99c | [
"Python-2.0"
]
| permissive | randybrown-github/ziplineMacOS | 42a0c2bfca2a54baa03d2803dc41317647811285 | eb5872c0903d653e19f259f0800fb7aecee0ee5c | refs/heads/master | 2022-11-07T15:51:39.808092 | 2020-06-18T20:06:42 | 2020-06-18T20:06:42 | 272,631,387 | 0 | 1 | null | 2022-11-02T03:21:45 | 2020-06-16T06:48:53 | Python | UTF-8 | Python | false | false | 26,190 | py | """
Recursive least squares model
Author: Chad Fulton
License: Simplified-BSD
"""
from __future__ import division, absolute_import, print_function
from warnings import warn
from statsmodels.compat.collections import OrderedDict
import numpy as np
import pandas as pd
from statsmodels.regression.linear_model import OLS
from statsmodels.tools.data import _is_using_pandas
from statsmodels.tsa.statespace.mlemodel import (
MLEModel, MLEResults, MLEResultsWrapper)
from statsmodels.tools.tools import Bunch
from statsmodels.tools.decorators import cache_readonly, resettable_cache
import statsmodels.base.wrapper as wrap
# Columns are alpha = 0.1, 0.05, 0.025, 0.01, 0.005
_cusum_squares_scalars = np.array([
[1.0729830, 1.2238734, 1.3581015, 1.5174271, 1.6276236],
[-0.6698868, -0.6700069, -0.6701218, -0.6702672, -0.6703724],
[-0.5816458, -0.7351697, -0.8858694, -1.0847745, -1.2365861]
])
class RecursiveLS(MLEModel):
r"""
Recursive least squares
Parameters
----------
endog : array_like
The observed time-series process :math:`y`
exog : array_like
Array of exogenous regressors, shaped nobs x k.
Notes
-----
Recursive least squares (RLS) corresponds to expanding window ordinary
least squares (OLS).
This model applies the Kalman filter to compute recursive estimates of the
coefficients and recursive residuals.
References
----------
.. [*] Durbin, James, and Siem Jan Koopman. 2012.
Time Series Analysis by State Space Methods: Second Edition.
Oxford University Press.
"""
def __init__(self, endog, exog, **kwargs):
# Standardize data
if not _is_using_pandas(endog, None):
endog = np.asanyarray(endog)
exog_is_using_pandas = _is_using_pandas(exog, None)
if not exog_is_using_pandas:
exog = np.asarray(exog)
# Make sure we have 2-dimensional array
if exog.ndim == 1:
if not exog_is_using_pandas:
exog = exog[:, None]
else:
exog = pd.DataFrame(exog)
self.k_exog = exog.shape[1]
# Handle coefficient initialization
# By default, do not calculate likelihood while it is controlled by
# diffuse initial conditions.
kwargs.setdefault('loglikelihood_burn', self.k_exog)
kwargs.setdefault('initialization', 'approximate_diffuse')
kwargs.setdefault('initial_variance', 1e9)
# Initialize the state space representation
super(RecursiveLS, self).__init__(
endog, k_states=self.k_exog, exog=exog, **kwargs
)
# Setup the state space representation
self['design'] = self.exog[:, :, None].T
self['transition'] = np.eye(self.k_states)
# Notice that the filter output does not depend on the measurement
# variance, so we set it here to 1
self['obs_cov', 0, 0] = 1.
@classmethod
def from_formula(cls, formula, data, subset=None):
"""
Not implemented for state space models
"""
return super(MLEModel, cls).from_formula(formula, data, subset)
def fit(self):
"""
Fits the model by application of the Kalman filter
Returns
-------
RecursiveLSResults
"""
# Get the smoother results with an arbitrary measurement variance
smoother_results = self.smooth(return_ssm=True)
# Compute the MLE of sigma2 (see Harvey, 1989 equation 4.2.5)
resid = smoother_results.standardized_forecasts_error[0]
sigma2 = (np.inner(resid, resid) /
(self.nobs - self.loglikelihood_burn))
# Now construct a results class, where the params are the final
# estimates of the regression coefficients
self['obs_cov', 0, 0] = sigma2
return self.smooth()
def filter(self, return_ssm=False, **kwargs):
# Get the state space output
result = super(RecursiveLS, self).filter([], transformed=True,
cov_type='none',
return_ssm=True, **kwargs)
# Wrap in a results object
if not return_ssm:
params = result.filtered_state[:, -1]
cov_kwds = {
'custom_cov_type': 'nonrobust',
'custom_cov_params': result.filtered_state_cov[:, :, -1],
'custom_description': ('Parameters and covariance matrix'
' estimates are RLS estimates'
' conditional on the entire sample.')
}
result = RecursiveLSResultsWrapper(
RecursiveLSResults(self, params, result, cov_type='custom',
cov_kwds=cov_kwds)
)
return result
def smooth(self, return_ssm=False, **kwargs):
# Get the state space output
result = super(RecursiveLS, self).smooth([], transformed=True,
cov_type='none',
return_ssm=True, **kwargs)
# Wrap in a results object
if not return_ssm:
params = result.filtered_state[:, -1]
cov_kwds = {
'custom_cov_type': 'nonrobust',
'custom_cov_params': result.filtered_state_cov[:, :, -1],
'custom_description': ('Parameters and covariance matrix'
' estimates are RLS estimates'
' conditional on the entire sample.')
}
result = RecursiveLSResultsWrapper(
RecursiveLSResults(self, params, result, cov_type='custom',
cov_kwds=cov_kwds)
)
return result
@property
def param_names(self):
return self.exog_names
@property
def start_params(self):
# Only parameter is the measurement disturbance standard deviation
return np.zeros(0)
def update(self, params, **kwargs):
"""
Update the parameters of the model
Updates the representation matrices to fill in the new parameter
values.
Parameters
----------
params : array_like
Array of new parameters.
transformed : boolean, optional
Whether or not `params` is already transformed. If set to False,
`transform_params` is called. Default is True..
Returns
-------
params : array_like
Array of parameters.
"""
pass
class RecursiveLSResults(MLEResults):
"""
Class to hold results from fitting a recursive least squares model.
Parameters
----------
model : RecursiveLS instance
The fitted model instance
Attributes
----------
specification : dictionary
Dictionary including all attributes from the recursive least squares
model instance.
See Also
--------
statsmodels.tsa.statespace.kalman_filter.FilterResults
statsmodels.tsa.statespace.mlemodel.MLEResults
"""
def __init__(self, model, params, filter_results, cov_type='opg',
**kwargs):
super(RecursiveLSResults, self).__init__(
model, params, filter_results, cov_type, **kwargs)
self.df_resid = np.inf # attribute required for wald tests
# Save _init_kwds
self._init_kwds = self.model._get_init_kwds()
# Save the model specification
self.specification = Bunch(**{
'k_exog': self.model.k_exog})
@property
def recursive_coefficients(self):
"""
Estimates of regression coefficients, recursively estimated
Returns
-------
out: Bunch
Has the following attributes:
- `filtered`: a time series array with the filtered estimate of
the component
- `filtered_cov`: a time series array with the filtered estimate of
the variance/covariance of the component
- `smoothed`: a time series array with the smoothed estimate of
the component
- `smoothed_cov`: a time series array with the smoothed estimate of
the variance/covariance of the component
- `offset`: an integer giving the offset in the state vector where
this component begins
"""
out = None
spec = self.specification
start = offset = 0
end = offset + spec.k_exog
out = Bunch(
filtered=self.filtered_state[start:end],
filtered_cov=self.filtered_state_cov[start:end, start:end],
smoothed=None, smoothed_cov=None,
offset=offset
)
if self.smoothed_state is not None:
out.smoothed = self.smoothed_state[start:end]
if self.smoothed_state_cov is not None:
out.smoothed_cov = (
self.smoothed_state_cov[start:end, start:end])
return out
@cache_readonly
def resid_recursive(self):
"""
Recursive residuals
Returns
-------
resid_recursive : array_like
An array of length `nobs` holding the recursive
residuals.
Notes
-----
The first `k_exog` residuals are typically unreliable due to
initialization.
"""
# See Harvey (1989) section 5.4; he defines the standardized
# innovations in 5.4.1, but they have non-unit variance, whereas
# the standardized forecast errors assume unit variance. To convert
# to Harvey's definition, we need to multiply by the standard
# deviation.
return (self.filter_results.standardized_forecasts_error.squeeze() *
self.filter_results.obs_cov[0, 0]**0.5)
@cache_readonly
def cusum(self):
r"""
Cumulative sum of standardized recursive residuals statistics
Returns
-------
cusum : array_like
An array of length `nobs - k_exog` holding the
CUSUM statistics.
Notes
-----
The CUSUM statistic takes the form:
.. math::
W_t = \frac{1}{\hat \sigma} \sum_{j=k+1}^t w_j
where :math:`w_j` is the recursive residual at time :math:`j` and
:math:`\hat \sigma` is the estimate of the standard deviation
from the full sample.
Excludes the first `k_exog` datapoints.
Due to differences in the way :math:`\hat \sigma` is calculated, the
output of this function differs slightly from the output in the
R package strucchange and the Stata contributed .ado file cusum6. The
calculation in this package is consistent with the description of
Brown et al. (1975)
References
----------
.. [*] Brown, R. L., J. Durbin, and J. M. Evans. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
llb = self.loglikelihood_burn
return (np.cumsum(self.resid_recursive[self.loglikelihood_burn:]) /
np.std(self.resid_recursive[llb:], ddof=1))
@cache_readonly
def cusum_squares(self):
r"""
Cumulative sum of squares of standardized recursive residuals
statistics
Returns
-------
cusum_squares : array_like
An array of length `nobs - k_exog` holding the
CUSUM of squares statistics.
Notes
-----
The CUSUM of squares statistic takes the form:
.. math::
s_t = \left ( \sum_{j=k+1}^t w_j^2 \right ) \Bigg /
\left ( \sum_{j=k+1}^T w_j^2 \right )
where :math:`w_j` is the recursive residual at time :math:`j`.
Excludes the first `k_exog` datapoints.
References
----------
.. [*] Brown, R. L., J. Durbin, and J. M. Evans. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
numer = np.cumsum(self.resid_recursive[self.loglikelihood_burn:]**2)
denom = numer[-1]
return numer / denom
def plot_recursive_coefficient(self, variables=0, alpha=0.05,
legend_loc='upper left', fig=None,
figsize=None):
r"""
Plot the recursively estimated coefficients on a given variable
Parameters
----------
variables : int or str or iterable of int or string, optional
Integer index or string name of the variable whose coefficient will
be plotted. Can also be an iterable of integers or strings. Default
is the first variable.
alpha : float, optional
The confidence intervals for the coefficient are (1 - alpha) %
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
All plots contain (1 - `alpha`) % confidence intervals.
"""
# Get variables
if isinstance(variables, (int, str)):
variables = [variables]
k_variables = len(variables)
# If a string was given for `variable`, try to get it from exog names
exog_names = self.model.exog_names
for i in range(k_variables):
variable = variables[i]
if isinstance(variable, str):
variables[i] = exog_names.index(variable)
# Create the plot
from scipy.stats import norm
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
for i in range(k_variables):
variable = variables[i]
ax = fig.add_subplot(k_variables, 1, i + 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
llb = self.loglikelihood_burn
# Plot the coefficient
coef = self.recursive_coefficients
ax.plot(dates[llb:], coef.filtered[variable, llb:],
label='Recursive estimates: %s' % exog_names[variable])
# Legend
handles, labels = ax.get_legend_handles_labels()
# Get the critical value for confidence intervals
if alpha is not None:
critical_value = norm.ppf(1 - alpha / 2.)
# Plot confidence intervals
std_errors = np.sqrt(coef.filtered_cov[variable, variable, :])
ci_lower = (
coef.filtered[variable] - critical_value * std_errors)
ci_upper = (
coef.filtered[variable] + critical_value * std_errors)
ci_poly = ax.fill_between(
dates[llb:], ci_lower[llb:], ci_upper[llb:], alpha=0.2
)
ci_label = ('$%.3g \\%%$ confidence interval'
% ((1 - alpha)*100))
# Only add CI to legend for the first plot
if i == 0:
# Proxy artist for fill_between legend entry
# See http://matplotlib.org/1.3.1/users/legend_guide.html
p = plt.Rectangle((0, 0), 1, 1,
fc=ci_poly.get_facecolor()[0])
handles.append(p)
labels.append(ci_label)
ax.legend(handles, labels, loc=legend_loc)
# Remove xticks for all but the last plot
if i < k_variables - 1:
ax.xaxis.set_ticklabels([])
fig.tight_layout()
return fig
def _cusum_significance_bounds(self, alpha, ddof=0, points=None):
"""
Parameters
----------
alpha : float, optional
The significance bound is alpha %.
ddof : int, optional
The number of periods additional to `k_exog` to exclude in
constructing the bounds. Default is zero. This is usually used
only for testing purposes.
points : iterable, optional
The points at which to evaluate the significance bounds. Default is
two points, beginning and end of the sample.
Notes
-----
Comparing against the cusum6 package for Stata, this does not produce
exactly the same confidence bands (which are produced in cusum6 by
lw, uw) because they burn the first k_exog + 1 periods instead of the
first k_exog. If this change is performed
(so that `tmp = (self.nobs - llb - 1)**0.5`), then the output here
matches cusum6.
The cusum6 behavior does not seem to be consistent with
Brown et al. (1975); it is likely they did that because they needed
three initial observations to get the initial OLS estimates, whereas
we do not need to do that.
"""
# Get the constant associated with the significance level
if alpha == 0.01:
scalar = 1.143
elif alpha == 0.05:
scalar = 0.948
elif alpha == 0.10:
scalar = 0.950
else:
raise ValueError('Invalid significance level.')
# Get the points for the significance bound lines
llb = self.loglikelihood_burn
tmp = (self.nobs - llb - ddof)**0.5
upper_line = lambda x: scalar * tmp + 2 * scalar * (x - llb) / tmp
if points is None:
points = np.array([llb, self.nobs])
return -upper_line(points), upper_line(points)
def plot_cusum(self, alpha=0.05, legend_loc='upper left',
fig=None, figsize=None):
r"""
Plot the CUSUM statistic and significance bounds.
Parameters
----------
alpha : float, optional
The plotted significance bounds are alpha %.
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
Evidence of parameter instability may be found if the CUSUM statistic
moves out of the significance bounds.
References
----------
.. [*] Brown, R. L., J. Durbin, and J. M. Evans. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
"""
# Create the plot
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
ax = fig.add_subplot(1, 1, 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
llb = self.loglikelihood_burn
# Plot cusum series and reference line
ax.plot(dates[llb:], self.cusum, label='CUSUM')
ax.hlines(0, dates[llb], dates[-1], color='k', alpha=0.3)
# Plot significance bounds
lower_line, upper_line = self._cusum_significance_bounds(alpha)
ax.plot([dates[llb], dates[-1]], upper_line, 'k--',
label='%d%% significance' % (alpha * 100))
ax.plot([dates[llb], dates[-1]], lower_line, 'k--')
ax.legend(loc=legend_loc)
return fig
def _cusum_squares_significance_bounds(self, alpha, points=None):
"""
Notes
-----
Comparing against the cusum6 package for Stata, this does not produce
exactly the same confidence bands (which are produced in cusum6 by
lww, uww) because they use a different method for computing the
critical value; in particular, they use tabled values from
Table C, pp. 364-365 of "The Econometric Analysis of Time Series"
Harvey, (1990), and use the value given to 99 observations for any
larger number of observations. In contrast, we use the approximating
critical values suggested in Edgerton and Wells (1994) which allows
computing relatively good approximations for any number of
observations.
"""
# Get the approximate critical value associated with the significance
# level
llb = self.loglikelihood_burn
n = 0.5 * (self.nobs - llb) - 1
try:
ix = [0.1, 0.05, 0.025, 0.01, 0.005].index(alpha / 2)
except ValueError:
raise ValueError('Invalid significance level.')
scalars = _cusum_squares_scalars[:, ix]
crit = scalars[0] / n**0.5 + scalars[1] / n + scalars[2] / n**1.5
# Get the points for the significance bound lines
if points is None:
points = np.array([llb, self.nobs])
line = (points - llb) / (self.nobs - llb)
return line - crit, line + crit
def plot_cusum_squares(self, alpha=0.05, legend_loc='upper left',
fig=None, figsize=None):
r"""
Plot the CUSUM of squares statistic and significance bounds.
Parameters
----------
alpha : float, optional
The plotted significance bounds are alpha %.
legend_loc : string, optional
The location of the legend in the plot. Default is upper left.
fig : Matplotlib Figure instance, optional
If given, subplots are created in this figure instead of in a new
figure. Note that the grid will be created in the provided
figure using `fig.add_subplot()`.
figsize : tuple, optional
If a figure is created, this argument allows specifying a size.
The tuple is (width, height).
Notes
-----
Evidence of parameter instability may be found if the CUSUM of squares
statistic moves out of the significance bounds.
Critical values used in creating the significance bounds are computed
using the approximate formula of [1]_.
References
----------
.. [*] Brown, R. L., J. Durbin, and J. M. Evans. 1975.
"Techniques for Testing the Constancy of
Regression Relationships over Time."
Journal of the Royal Statistical Society.
Series B (Methodological) 37 (2): 149-92.
.. [1] Edgerton, David, and Curt Wells. 1994.
"Critical Values for the Cusumsq Statistic
in Medium and Large Sized Samples."
Oxford Bulletin of Economics and Statistics 56 (3): 355-65.
"""
# Create the plot
from statsmodels.graphics.utils import _import_mpl, create_mpl_fig
plt = _import_mpl()
fig = create_mpl_fig(fig, figsize)
ax = fig.add_subplot(1, 1, 1)
# Get dates, if applicable
if hasattr(self.data, 'dates') and self.data.dates is not None:
dates = self.data.dates._mpl_repr()
else:
dates = np.arange(self.nobs)
llb = self.loglikelihood_burn
# Plot cusum series and reference line
ax.plot(dates[llb:], self.cusum_squares, label='CUSUM of squares')
ref_line = (np.arange(llb, self.nobs) - llb) / (self.nobs - llb)
ax.plot(dates[llb:], ref_line, 'k', alpha=0.3)
# Plot significance bounds
lower_line, upper_line = self._cusum_squares_significance_bounds(alpha)
ax.plot([dates[llb], dates[-1]], upper_line, 'k--',
label='%d%% significance' % (alpha * 100))
ax.plot([dates[llb], dates[-1]], lower_line, 'k--')
ax.legend(loc=legend_loc)
return fig
class RecursiveLSResultsWrapper(MLEResultsWrapper):
_attrs = {}
_wrap_attrs = wrap.union_dicts(MLEResultsWrapper._wrap_attrs,
_attrs)
_methods = {}
_wrap_methods = wrap.union_dicts(MLEResultsWrapper._wrap_methods,
_methods)
wrap.populate_wrapper(RecursiveLSResultsWrapper, RecursiveLSResults)
| [
"[email protected]"
]
| |
3e87a4ddd10506d80c44ca4532849331eefd3f4b | 1bebf42f17c558488fce0bea5c58b55fd049c286 | /django_table_filtering/wsgi.py | 6962142610ad722b2c01ceb12560021ec1f4c8a0 | [
"Unlicense"
]
| permissive | ckbelvedere/django_table_filtering | 60ab100fde25e9fbe4e38f7e7b5b4257f45d08fa | d82b89a871779b6a000945da14455c9acb8c47bb | refs/heads/master | 2021-01-02T22:30:01.663587 | 2016-11-22T11:22:44 | 2016-11-22T11:22:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | """
WSGI config for django_table_filtering project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "django_table_filtering.settings")
application = get_wsgi_application()
| [
"[email protected]"
]
| |
51e8fae298d3258f1bc488df509e29b0599f907b | 5fe72bb13baf3649058ebe11aa86ad4fc56c69ed | /hard-gists/299905/snippet.py | 463be82ad5d7f9e9ed4f498a8c2cafdb1dd3dfbf | [
"Apache-2.0"
]
| permissive | dockerizeme/dockerizeme | 8825fed45ff0ce8fb1dbe34959237e8048900a29 | 408f3fa3d36542d8fc1236ba1cac804de6f14b0c | refs/heads/master | 2022-12-10T09:30:51.029846 | 2020-09-02T13:34:49 | 2020-09-02T13:34:49 | 144,501,661 | 24 | 20 | Apache-2.0 | 2022-11-21T12:34:29 | 2018-08-12T21:21:04 | Python | UTF-8 | Python | false | false | 1,239 | py | "Memcached cache backend"
from django.core.cache.backends import memcached
from django.utils.encoding import smart_unicode, smart_str
MIN_COMPRESS_LEN = 150000
class CacheClass(memcached.CacheClass):
def add(self, key, value, timeout=None, min_compress_len=MIN_COMPRESS_LEN):
if isinstance(value, unicode):
value = value.encode('utf-8')
if timeout is None:
timeout = self.default_timeout
return self._cache.add(smart_str(key), value, timeout, min_compress_len)
def set(self, key, value, timeout=None, min_compress_len=MIN_COMPRESS_LEN):
if isinstance(value, unicode):
value = value.encode('utf-8')
if timeout is None:
timeout = self.default_timeout
self._cache.set(smart_str(key), value, timeout)
def set_many(self, data, timeout=None, min_compress_len=MIN_COMPRESS_LEN):
safe_data = {}
for key, value in data.items():
if isinstance(value, unicode):
value = value.encode('utf-8')
safe_data[smart_str(key)] = value
if timeout is None:
timeout = self.default_timeout
self._cache.set_multi(safe_data, timeout, min_compress_len=min_compress_len)
| [
"[email protected]"
]
| |
d23bdd30e8190210edd9107c2e3d8da1127f0046 | 24c5c46f1d281fc15de7f6b72a5148ae85f89fb4 | /SRC/unittest/case_interface_bak.py | b566f32234c2a6425ea57ff17e5e87a1788e972a | []
| no_license | enterpriseih/easyTest | 22d87c7ffe40fb10a07f7c5cdd505f63dd45adc0 | 43b8d294e898f25055c78313cfece2753352c250 | refs/heads/master | 2023-08-23T22:55:14.798341 | 2020-02-11T09:13:43 | 2020-02-11T09:13:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,127 | py | # coding=utf-8
import functools
import json
import unittest as UT
import requests
from SRC import settings
from SRC.common.const import RunStatus, RequestMethod, RequestDataType, RunResult
from SRC.common.decorator import assert_dec, codeException_dec
from SRC.common.exceptions import JsonLoadsException
from SRC.common.fileHelper import isNoneOrEmpty
from SRC.common.loga import putSystemLog
from SRC.common.param import Param
from SRC.common.utils import impClass, isAbsoluteUrl
from SRC.interface_info import projectClass
'''
接口测试用例
'''
class TestCase(UT.TestCase):
def __init__(self, jsonParam):
super(TestCase, self).__init__('runTest')
self.__param = Param(jsonParam['paramsDict'])
self.logger = jsonParam['logger']
self.scriptId = jsonParam['scriptId']
self.projectObj = None # 包含产品信息的对象
self.requestData = None # 请求参数
self.response = None # 响应结果
@property
def param(self):
'''
参数化驱动属性名称
:return:
'''
return self.__param
def setUp(self):
putSystemLog('开始运行脚本%s' % (str(self.__class__)), self.logger)
try:
self.initProjectObj() # 根据不同项目动态初始化对象
self.initRequestData() # 初始化请求参数数据
except JsonLoadsException as e:
putSystemLog(e, self.logger, True, RunStatus.RUNNING, RunResult.ERROR, True, '异常')
except Exception as e:
putSystemLog('[ERROR-2007-0]:测试用例初始化数据引发的异常.请检查参数是否配置正确%s' % e, self.logger, True, RunStatus.RUNNING,
RunResult.ERROR,
True, '异常')
raise
def initRequestData(self):
dataType = self.param.dataType # 请求类型
if dataType == RequestDataType.JSON: # 请求类型为json
self.requestData = self.getFullRequestJsonData()
def initProjectObj(self):
project = impClass(projectClass[self.param.projectClass]) # 动态获取对象
self.projectObj = project(self.param.data.replace("'", "\"")) # 初始化一个项目对象
self.setAbsoluteUrl() # 设置url
def setAbsoluteUrl(self):
if not isAbsoluteUrl(self.param.url):
self.param.url = self.projectObj.getFullUrl(self.param.url) # 获取完整的url
def getFullRequestJsonData(self):
return self.projectObj.getFullRequestData()
@codeException_dec('3')
def runTest(self):
url = self.param.url
method = self.param.method
data = self.requestData
putSystemLog('测试项目简称:%s' % (self.param.projectClass), self.logger, True, RunStatus.RUNNING, RunResult.PASS,
False, '测试项目简称')
putSystemLog('待测试接口:%s' % (url), self.logger, True, RunStatus.RUNNING, RunResult.PASS, False, '待测接口')
putSystemLog('请求方式:%s' % (method), self.logger, True, RunStatus.RUNNING, RunResult.PASS, False, '请求方式')
putSystemLog('请求数据类型:%s' % (self.param.dataType), self.logger, True, RunStatus.RUNNING, RunResult.PASS, False,
'请求数据类型')
putSystemLog('返回数据类型:%s' % (self.param.expectType), self.logger, True, RunStatus.RUNNING, RunResult.PASS, False,
'返回数据类型')
putSystemLog('请求参数:%s' % (json.dumps(self.requestData,indent=4)), self.logger, True, RunStatus.RUNNING, RunResult.PASS, False,
'请求参数')
if method == RequestMethod.GET:
self.response = requests.get(url, params=data)
elif method == RequestMethod.POST:
self.response = requests.post(url, data=data)
def compareResult(self):
param = self.param
r = self.response
expectType = param.expectType
putSystemLog('响应值:%s' % (r.status_code), self.logger, True, RunStatus.RUNNING, RunResult.PASS, True, '响应值')
if expectType == RequestDataType.JSON:
if isNoneOrEmpty(self.param.expect):
pass
else:
compareResult = self.compare()
putSystemLog('Json对比结果:%s,%s' % compareResult[0],compareResult[1], self.logger, True, RunStatus.RUNNING, RunResult.PASS, True,'Json对比结果')
elif expectType == RequestDataType.STRING:
putSystemLog(r.text, self.logger)
def tearDown(self):
self.compareResult()
putSystemLog('脚本运行完毕...', self.logger)
def compare(self):
'''
继承该方法实现对比的重写
:return:
'''
return self.__compareJson(keyCompare=True, formatCompare=True, valueCompare=True, equal=False)
def __compareJson(self, keyCompare=True, formatCompare=True, valueCompare=False, equal=False):
try:
expectJson = json.loads(self.param.expect.strip().replace("'", "\""))
resultJson = json.loads(self.response.text.strip().replace("'", "\""))
compareResult = (False, '')
if keyCompare:
compareResult = self.projectObj.compareKey(expectJson, resultJson, equal)
if not compareResult[0]:
return compareResult
if formatCompare:
compareResult = self.projectObj.compareFormat(expectJson, resultJson, equal)
if not compareResult[0]:
return compareResult
if valueCompare:
compareResult = self.projectObj.compareAllValue(expectJson, resultJson, equal)
if not compareResult[0]:
return compareResult
return compareResult
except:
raise
| [
"yaolihui0506"
]
| yaolihui0506 |
df7fd05f0a9a837110aabf93f811e9f0522e6c47 | 2bdb128188c40c670fd0a26ca8a447cae58a8848 | /tests/commands/test_command_sharer.py | 670f1b14dde21a3390b02c5eb845a98bb1b06c84 | []
| no_license | cl33per/chat_thief | 3de834cbb722a36f37c5e033e3f8c7672c325ec4 | afb7660eacb3b5f476367eb388b4e6981a49e54a | refs/heads/master | 2022-11-14T13:07:49.213617 | 2020-07-01T16:27:45 | 2020-07-01T16:27:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,401 | py | import pytest
from chat_thief.commands.command_sharer import CommandSharer
from chat_thief.models.user import User
from chat_thief.models.command import Command
from tests.support.database_setup import DatabaseConfig
class TestCommandSharer(DatabaseConfig):
def test_share(self):
user = User("fake_user")
user.update_cool_points(1)
command = Command("damn")
friend = User("bizmarkie")
command.allow_user(user.name)
assert user.name in command.users()
assert friend.name not in command.users()
subject = CommandSharer(
user=user.name, command=command.name, friend=friend.name,
)
subject.share()
assert user.name in command.users()
assert friend.name in command.users()
assert user.cool_points() == 0
assert command.cost() == 3
def test_broke_boi_share(self):
user = User("fake_user")
command = Command("damn")
friend = User("bizmarkie")
command.allow_user(user.name)
assert user.name in command.users()
assert friend.name not in command.users()
subject = CommandSharer(
user=user.name, command=command.name, friend=friend.name,
)
subject.share()
assert user.name in command.users()
assert friend.name not in command.users()
assert command.cost() == 1
| [
"[email protected]"
]
| |
66a91e069e433fe6749b553ace1b1cfe2a703151 | bad62c2b0dfad33197db55b44efeec0bab405634 | /sdk/sql/azure-mgmt-sql/azure/mgmt/sql/aio/operations/_restorable_dropped_databases_operations.py | c80d04d117ea91e660881b8a916c7bb7cfcefcdf | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | test-repo-billy/azure-sdk-for-python | 20c5a2486456e02456de17515704cb064ff19833 | cece86a8548cb5f575e5419864d631673be0a244 | refs/heads/master | 2022-10-25T02:28:39.022559 | 2022-10-18T06:05:46 | 2022-10-18T06:05:46 | 182,325,031 | 0 | 0 | MIT | 2019-07-25T22:28:52 | 2019-04-19T20:59:15 | Python | UTF-8 | Python | false | false | 8,928 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Optional, TypeVar
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._restorable_dropped_databases_operations import build_get_request, build_list_by_server_request
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class RestorableDroppedDatabasesOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.sql.aio.SqlManagementClient`'s
:attr:`restorable_dropped_databases` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list_by_server(
self, resource_group_name: str, server_name: str, **kwargs: Any
) -> AsyncIterable["_models.RestorableDroppedDatabase"]:
"""Gets a list of restorable dropped databases.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server. Required.
:type server_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either RestorableDroppedDatabase or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.sql.models.RestorableDroppedDatabase]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-05-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.RestorableDroppedDatabaseListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_by_server_request(
resource_group_name=resource_group_name,
server_name=server_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list_by_server.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
request = HttpRequest("GET", next_link)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("RestorableDroppedDatabaseListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list_by_server.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/restorableDroppedDatabases"} # type: ignore
@distributed_trace_async
async def get(
self, resource_group_name: str, server_name: str, restorable_dropped_database_id: str, **kwargs: Any
) -> _models.RestorableDroppedDatabase:
"""Gets a restorable dropped database.
:param resource_group_name: The name of the resource group that contains the resource. You can
obtain this value from the Azure Resource Manager API or the portal. Required.
:type resource_group_name: str
:param server_name: The name of the server. Required.
:type server_name: str
:param restorable_dropped_database_id: Required.
:type restorable_dropped_database_id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: RestorableDroppedDatabase or the result of cls(response)
:rtype: ~azure.mgmt.sql.models.RestorableDroppedDatabase
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-05-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.RestorableDroppedDatabase]
request = build_get_request(
resource_group_name=resource_group_name,
server_name=server_name,
restorable_dropped_database_id=restorable_dropped_database_id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("RestorableDroppedDatabase", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Sql/servers/{serverName}/restorableDroppedDatabases/{restorableDroppedDatabaseId}"} # type: ignore
| [
"[email protected]"
]
| |
9625b08110ec98c0f1151eaf9b340266f63fcddd | 579e19a7b861e0549874b6e263c24aa418f3fdfc | /samples/QueryChangeNotification.py | 992136bbfffae0d1ea7199297095a6391b650545 | [
"LicenseRef-scancode-unknown-license-reference",
"BSD-3-Clause"
]
| permissive | 33bunny/python-cx_Oracle | 3f724df9a19ffd3f68e168fe7c57e4e057c8a780 | 26c3a4c2439642f9598bc9b1ca0d6ddf73890428 | refs/heads/master | 2021-05-07T15:17:18.873197 | 2017-11-06T20:41:59 | 2017-11-06T20:41:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,702 | py | #------------------------------------------------------------------------------
# Copyright 2016, 2017, Oracle and/or its affiliates. All rights reserved.
#
# Portions Copyright 2007-2015, Anthony Tuininga. All rights reserved.
#
# Portions Copyright 2001-2007, Computronix (Canada) Ltd., Edmonton, Alberta,
# Canada. All rights reserved.
#------------------------------------------------------------------------------
#------------------------------------------------------------------------------
# QueryChangeNotification.py
# This script demonstrates using query change notification in Python, a
# feature that is available in Oracle 11g. Once this script is running, use
# another session to insert, update or delete rows from the table
# cx_Oracle.TestTempTable and you will see the notification of that change.
#
# This script requires cx_Oracle 5.3 and higher.
#------------------------------------------------------------------------------
from __future__ import print_function
import cx_Oracle
import SampleEnv
import threading
import time
registered = True
def callback(message):
global registered
print("Message type:", message.type)
if message.type == cx_Oracle.EVENT_DEREG:
print("Deregistration has taken place...")
registered = False
return
print("Message database name:", message.dbname)
print("Message queries:")
for query in message.queries:
print("--> Query ID:", query.id)
print("--> Query Operation:", query.operation)
for table in query.tables:
print("--> --> Table Name:", table.name)
print("--> --> Table Operation:", table.operation)
if table.rows is not None:
print("--> --> Table Rows:")
for row in table.rows:
print("--> --> --> Row RowId:", row.rowid)
print("--> --> --> Row Operation:", row.operation)
print("-" * 60)
print("=" * 60)
connection = cx_Oracle.Connection(SampleEnv.MAIN_CONNECT_STRING, events = True)
sub = connection.subscribe(callback = callback, timeout = 1800,
qos = cx_Oracle.SUBSCR_QOS_QUERY | cx_Oracle.SUBSCR_QOS_ROWIDS)
print("Subscription:", sub)
print("--> Connection:", sub.connection)
print("--> Callback:", sub.callback)
print("--> Namespace:", sub.namespace)
print("--> Protocol:", sub.protocol)
print("--> Timeout:", sub.timeout)
print("--> Operations:", sub.operations)
print("--> Rowids?:", bool(sub.qos & cx_Oracle.SUBSCR_QOS_ROWIDS))
queryId = sub.registerquery("select * from TestTempTable")
print("Registered query:", queryId)
while registered:
print("Waiting for notifications....")
time.sleep(5)
| [
"[email protected]"
]
| |
098d0ead2935f83e1c9bb6331d974b2bb2b08474 | f92c74b694ba08b272576fdfb8afcfe5beb43157 | /seleniumbase/console_scripts/run.py | 36c1579414708f1297a78acf0c4206d53fe6f77d | [
"MIT"
]
| permissive | priscilabertolazzi/SeleniumBase | e6842d4b048178a7cdcc9b4863b2fd26f9fab881 | 808ca141694939376d88ce3fac149e2e3f62942c | refs/heads/master | 2022-12-23T00:18:02.640208 | 2020-09-09T18:22:38 | 2020-09-09T18:22:38 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 31,260 | py | """
SeleniumBase console scripts runner
Usage:
seleniumbase [COMMAND] [PARAMETERS]
OR sbase [COMMAND] [PARAMETERS]
Examples:
sbase install chromedriver
sbase mkdir browser_tests
sbase mkfile new_test.py
sbase options
sbase convert old_webdriver_unittest.py
sbase print my_first_test.py -n
sbase translate my_first_test.py --zh -p
sbase extract-objects my_first_test.py
sbase inject-objects my_first_test.py
sbase objectify my_first_test.py
sbase revert-objects my_first_test.py
sbase encrypt
sbase decrypt
sbase download server
sbase grid-hub start
sbase grid-node start --hub=127.0.0.1
"""
import colorama
import sys
def show_usage():
show_basic_usage()
sc = ("")
sc += ('Type "sbase help [COMMAND]" for specific command info.\n')
sc += ('For info on all commands, type: "seleniumbase --help".\n')
sc += ('* (Use "pytest" for running tests) *\n')
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
c4 = colorama.Fore.MAGENTA + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = sc.replace("seleniumbase", c1 + "selenium" + c2 + "base" + cr)
sc = sc.replace("sbase", c1 + "s" + c2 + "base" + cr)
sc = sc.replace("pytest", c3 + "pytest" + cr)
sc = sc.replace("--help", c4 + "--help" + cr)
sc = sc.replace("help", c4 + "help" + cr)
print(sc)
def show_basic_usage():
from seleniumbase.console_scripts import logo_helper
seleniumbase_logo = logo_helper.get_seleniumbase_logo()
print(seleniumbase_logo)
print("%s" % get_version()[0:1])
print("")
sc = ("")
sc += ('Usage: "seleniumbase [COMMAND] [PARAMETERS]"\n')
sc += ('(simplified): "sbase [COMMAND] [PARAMETERS]"\n')
sc += ("\n")
sc += ("Commands:\n")
sc += (" install [DRIVER_NAME] [OPTIONS]\n")
sc += (" mkdir [DIRECTORY_NAME]\n")
sc += (" mkfile [FILE_NAME.py]\n")
sc += (" options (display common pytest options)\n")
sc += (" convert [PYTHON_WEBDRIVER_UNITTEST_FILE]\n")
sc += (" print [FILE] [OPTIONS]\n")
sc += (" translate [SB_PYTHON_FILE] [LANGUAGE] [ACTION]\n")
sc += (" extract-objects [SB_PYTHON_FILE]\n")
sc += (" inject-objects [SB_PYTHON_FILE] [OPTIONS]\n")
sc += (" objectify [SB_PYTHON_FILE] [OPTIONS]\n")
sc += (" revert-objects [SB_PYTHON_FILE]\n")
sc += (" encrypt (OR: obfuscate)\n")
sc += (" decrypt (OR: unobfuscate)\n")
sc += (" download server (Selenium Server JAR file)\n")
sc += (" grid-hub [start|stop] [OPTIONS]\n")
sc += (" grid-node [start|stop] --hub=[HUB_IP] [OPTIONS]\n")
sc += (' * (EXAMPLE: "sbase install chromedriver latest") *\n')
sc += ("")
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
cr = colorama.Style.RESET_ALL
sc = sc.replace("seleniumbase", c1 + "selenium" + c2 + "base" + cr)
sc = sc.replace("sbase", c1 + "s" + c2 + "base" + cr)
print(sc)
def show_install_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "install" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase install [DRIVER_NAME] [OPTIONS]")
print(" OR: sbase install [DRIVER_NAME] [OPTIONS]")
print(" (Drivers: chromedriver, geckodriver, edgedriver")
print(" iedriver, operadriver)")
print(" Options:")
print(" VERSION Specify the version.")
print(" (Default Chromedriver version = 2.44)")
print(' Use "latest" for the latest version.')
print(" -p OR --path Also copy the driver to /usr/local/bin")
print(" Example:")
print(" sbase install chromedriver")
print(" sbase install geckodriver")
print(" sbase install chromedriver 83.0.4103.39")
print(" sbase install chromedriver latest")
print(" sbase install chromedriver -p")
print(" sbase install chromedriver latest -p")
print(" Output:")
print(" Installs the chosen webdriver to seleniumbase/drivers/")
print(" (chromedriver is required for Chrome automation)")
print(" (geckodriver is required for Firefox automation)")
print(" (edgedriver is required for Microsoft Edge automation)")
print(" (iedriver is required for InternetExplorer automation)")
print(" (operadriver is required for Opera Browser automation)")
print("")
def show_mkdir_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "mkdir" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase mkdir [DIRECTORY_NAME]")
print(" OR: sbase mkdir [DIRECTORY_NAME]")
print(" Example:")
print(" sbase mkdir browser_tests")
print(" Output:")
print(" Creates a new folder for running SBase scripts.")
print(" The new folder contains default config files,")
print(" sample tests for helping new users get started,")
print(" and Python boilerplates for setting up customized")
print(" test frameworks.")
print("")
def show_mkfile_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "mkfile" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase mkfile [FILE_NAME.py]")
print(" OR: sbase mkfile [FILE_NAME.py]")
print(" Example:")
print(" sbase mkfile new_test.py")
print(" Options:")
print(" -b / --basic (Basic boilerplate / single-line test)")
print(" Language Options:")
print(" --en / --English | --zh / --Chinese")
print(" --nl / --Dutch | --fr / --French")
print(" --it / --Italian | --ja / --Japanese")
print(" --ko / --Korean | --pt / --Portuguese")
print(" --ru / --Russian | --es / --Spanish")
print(" Output:")
print(" Creates a new SBase test file with boilerplate code.")
print(" If the file already exists, an error is raised.")
print(" By default, uses English mode and creates a")
print(" boilerplate with the 5 most common SeleniumBase")
print(' methods, which are "open", "click", "update_text",')
print(' "assert_element", and "assert_text". If using the')
print(' basic boilerplate option, only the "open" method')
print(' is included.')
print("")
def show_convert_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "convert" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase convert [PYTHON_WEBDRIVER_UNITTEST_FILE]")
print(" OR: sbase convert [PYTHON_WEBDRIVER_UNITTEST_FILE]")
print(" Output:")
print(" Converts a Selenium IDE exported WebDriver unittest")
print(" file into a SeleniumBase file. Adds _SB to the new")
print(" file name while keeping the original file intact.")
print(" Works with Katalon Recorder scripts.")
print(" See: http://www.katalon.com/automation-recorder")
print("")
def show_print_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "print" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase print [FILE] [OPTIONS]")
print(" OR: sbase print [FILE] [OPTIONS]")
print(" Options:")
print(" -n (Add line Numbers to the rows)")
print(" Output:")
print(" Prints the code/text of any file")
print(" with syntax-highlighting.")
print("")
def show_translate_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "translate" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase translate [SB_FILE.py] [LANGUAGE] [ACTION]")
print(" OR: sbase translate [SB_FILE.py] [LANGUAGE] [ACTION]")
print(" Languages:")
print(" --en / --English | --zh / --Chinese")
print(" --nl / --Dutch | --fr / --French")
print(" --it / --Italian | --ja / --Japanese")
print(" --ko / --Korean | --pt / --Portuguese")
print(" --ru / --Russian | --es / --Spanish")
print(" Actions:")
print(" -p / --print (Print translation output to the screen)")
print(" -o / --overwrite (Overwrite the file being translated)")
print(" -c / --copy (Copy the translation to a new .py file)")
print(" Options:")
print(" -n (include line Numbers when using the Print action)")
print(" Output:")
print(" Translates a SeleniumBase Python file into the language")
print(' specified. Method calls and "import" lines get swapped.')
print(" Both a language and an action must be specified.")
print(' The "-p" action can be paired with one other action.')
print(' When running with "-c" (or "--copy"), the new file name')
print(' will be the orginal name appended with an underscore')
print(" plus the 2-letter language code of the new language.")
print(' (Example: Translating "test_1.py" into Japanese with')
print(' "-c" will create a new file called "test_1_ja.py".)')
print("")
def show_extract_objects_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "extract-objects" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase extract-objects [SELENIUMBASE_PYTHON_FILE]")
print(" OR: sbase extract-objects [SELENIUMBASE_PYTHON_FILE]")
print(" Output:")
print(" Creates page objects based on selectors found in a")
print(" seleniumbase Python file and saves those objects to the")
print(' "page_objects.py" file in the same folder as the tests.')
print("")
def show_inject_objects_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "inject-objects" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase inject-objects [SELENIUMBASE_PYTHON_FILE]")
print(" OR: sbase inject-objects [SELENIUMBASE_PYTHON_FILE]")
print(" Options:")
print(" -c, --comments (Add object selectors to the comments.)")
print(" (Default: No added comments.)")
print(" Output:")
print(' Takes the page objects found in the "page_objects.py"')
print(' file and uses those to replace matching selectors in')
print(' the selected seleniumbase Python file.')
print("")
def show_objectify_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "objectify" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase objectify [SELENIUMBASE_PYTHON_FILE]")
print(" OR: sbase objectify [SELENIUMBASE_PYTHON_FILE]")
print(" Options:")
print(" -c, --comments (Add object selectors to the comments.)")
print(" (Default: No added comments.)")
print(" Output:")
print(' A modified version of the file where the selectors')
print(' have been replaced with variable names defined in')
print(' "page_objects.py", supporting the Page Object Pattern.')
print("")
print(' (seleniumbase "objectify" has the same outcome as')
print(' combining "extract-objects" with "inject-objects")')
print("")
def show_revert_objects_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "revert-objects" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase revert-objects [SELENIUMBASE_PYTHON_FILE]")
print(" OR: sbase revert-objects [SELENIUMBASE_PYTHON_FILE]")
print(" Options:")
print(" -c, --comments (Keep existing comments for the lines.)")
print(" (Default: No comments are kept.)")
print(" Output:")
print(' Reverts the changes made by "seleniumbase objectify" or')
print(' "seleniumbase inject-objects" when run against a')
print(' seleniumbase Python file. Objects will get replaced by')
print(' selectors stored in the "page_objects.py" file.')
print("")
def show_encrypt_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "encrypt OR obfuscate" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase encrypt || seleniumbase obfuscate")
print(" --OR--")
print(" sbase encrypt || sbase obfuscate")
print(" Output:")
print(" Runs the password encryption/obfuscation tool.")
print(" (Where you can enter a password to encrypt/obfuscate.)")
print("")
def show_decrypt_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "decrypt OR unobfuscate" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase decrypt || seleniumbase unobfuscate")
print(" --OR--")
print(" sbase decrypt || sbase unobfuscate")
print(" Output:")
print(" Runs the password decryption/unobfuscation tool.")
print(" (Where you can enter an encrypted password to decrypt.)")
print("")
def show_download_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "download" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase download server")
print(" OR: sbase download server")
print(" Output:")
print(" Downloads the Selenium Standalone Server.")
print(" (Server is required for using your own Selenium Grid.)")
print("")
def show_grid_hub_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "grid-hub" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase grid-hub {start|stop|restart} [OPTIONS]")
print(" OR: sbase grid-hub {start|stop|restart} [OPTIONS]")
print(" Options:")
print(" -v, --verbose (Increase verbosity of logging output.)")
print(" (Default: Quiet logging / not verbose.)")
print(" --timeout=TIMEOUT (Close idle browser after TIMEOUT.)")
print(" (The default TIMEOUT: 230 seconds.)")
print(" (Use --timeout=0 to skip timeouts.)")
print(" Example:")
print(" seleniumbase grid-hub start")
print(" Output:")
print(" Controls the Selenium Grid Hub Server, which allows")
print(" for running tests on multiple machines in parallel")
print(" to speed up test runs and reduce the total time")
print(" of test suite execution.")
print(' You can "start" or "stop" the Grid Hub server.')
print("")
def show_grid_node_usage():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = (" " + c2 + "** " + c3 + "grid-node" + c2 + " **" + cr)
print(sc)
print("")
print(" Usage:")
print(" seleniumbase grid-node {start|stop|restart} [OPTIONS]")
print(" OR: sbase grid-node {start|stop|restart} [OPTIONS]")
print(" Options:")
print(" --hub=[HUB_IP] (The Grid Hub IP Address to connect to.)")
print(" (Default: 127.0.0.1 if not set)")
print(" -v, --verbose (Increase verbosity of logging output.)")
print(" (Default: Quiet logging / not verbose.)")
print(" Example:")
print(" seleniumbase grid-node start --hub=127.0.0.1")
print(" Output:")
print(" Controls the Selenium Grid node, which serves as a")
print(" worker machine for your Selenium Grid Hub server.")
print(' You can "start" or "stop" the Grid node.')
print("")
def get_version():
import pkg_resources
version_info = None
try:
version_info = pkg_resources.require("seleniumbase")[0:1]
except Exception:
version_info = ["ERROR: Cannot detect version! Please reinstall!"]
return version_info
def show_version_info():
version = get_version()
print('\n%s\n' % version)
def show_options():
c1 = colorama.Fore.BLUE + colorama.Back.LIGHTCYAN_EX
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
sc = ("\n " + c2 + " ** " + c3 + " pytest CMD Options " + c2 + " ** " + cr)
print(sc)
print("")
line = "Here are some common pytest options to use with SeleniumBase:"
line = c1 + line + cr
print(line)
print("")
print('--browser=BROWSER (The web browser to use. Default: "chrome".)')
print('--headless (Run tests headlessly. Default mode on Linux OS.)')
print('--demo (Slow down and visually see test actions as they occur.)')
print('--slow (Slow down the automation. Faster than using Demo Mode.)')
print('--reuse-session / --rs (Reuse the browser session between tests.)')
print('--crumbs (Delete all cookies between tests reusing a session.)')
print('--maximize (Start tests with the web browser window maximized.)')
print("--incognito (Enable Chrome's Incognito mode.)")
print("--guest (Enable Chrome's Guest mode.)")
print('-m MARKER (Run tests with the specified pytest marker.)')
print('-n NUM (Multithread the tests using that many threads.)')
print('-v (Verbose mode. Prints the full names of each test run.)')
print('--html=report.html (Create a detailed pytest-html report.)')
print('--collect-only / --co (Only show discovered tests. No run.)')
print('--co -q (Only show full names of discovered tests. No run.)')
print('--trace (Enter Debug Mode immediately after starting any test.')
print(' n: Next line of method. s: Step through. c: Continue.)')
print('--pdb (Enter Debug Mode if a test fails. h: Help. c: Continue.')
print(' where: Stacktrace location. u: Up stack. d: Down stack.')
print(' longlist: Stack code. dir(): List objects in namespace.)')
print('-x (Stop running the tests after the first failure is reached.)')
print('--archive-logs (Archive old log files instead of deleting them.)')
print('--save-screenshot (Save a screenshot at the end of each test.)')
print('--check-js (Check for JavaScript errors after page loads.)')
print('--start-page=URL (The browser start page when tests begin.)')
print("--agent=STRING (Modify the web browser's User-Agent string.)")
print('--mobile (Use the mobile device emulator while running tests.)')
print('--metrics=STRING (Set mobile "CSSWidth,CSSHeight,PixelRatio".)')
print('--ad-block (Block some types of display ads after page loads.)')
print('--settings-file=FILE (Override default SeleniumBase settings.)')
print('--env=ENV (Set the test env. Access with "self.env" in tests.)')
print('--data=DATA (Extra test data. Access with "self.data" in tests.)')
print('--disable-csp (Disable the Content Security Policy of websites.)')
print('--server=SERVER (The Selenium Grid server/IP used for tests.)')
print('--port=PORT (The Selenium Grid port used by the test server.)')
print('--proxy=SERVER:PORT (Connect to a proxy server:port for tests.)')
print('--proxy=USER:PASS@SERVER:PORT (Use authenticated proxy server.)')
print("")
line = 'For the full list of ' + c2 + 'command-line options' + cr
line += ', type: "' + c1 + 'pytest' + cr + ' ' + c3 + '--help' + cr + '".'
print(line)
print("")
def show_detailed_help():
c2 = colorama.Fore.BLUE + colorama.Back.LIGHTGREEN_EX
c3 = colorama.Fore.BLUE + colorama.Back.LIGHTYELLOW_EX
c6 = colorama.Back.CYAN
cr = colorama.Style.RESET_ALL
show_basic_usage()
print(c6 + " " + c2 + " Commands: " + c6 + " ")
print(cr)
show_install_usage()
show_mkdir_usage()
show_mkfile_usage()
show_convert_usage()
show_print_usage()
show_translate_usage()
show_extract_objects_usage()
show_inject_objects_usage()
show_objectify_usage()
show_revert_objects_usage()
show_encrypt_usage()
show_decrypt_usage()
show_download_usage()
show_grid_hub_usage()
show_grid_node_usage()
print('* (Use "' + c3 + 'pytest' + cr + '" for running tests) *\n')
def main():
command = None
command_args = None
num_args = len(sys.argv)
if num_args == 1:
show_usage()
return
elif num_args == 2:
command = sys.argv[1]
command_args = []
elif num_args > 2:
command = sys.argv[1]
command_args = sys.argv[2:]
command = command.lower()
if command == "install":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_install
sb_install.main()
else:
show_basic_usage()
show_install_usage()
elif command == "mkdir":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_mkdir
sb_mkdir.main()
else:
show_basic_usage()
show_mkdir_usage()
elif command == "mkfile":
if len(command_args) >= 1:
from seleniumbase.console_scripts import sb_mkfile
sb_mkfile.main()
else:
show_basic_usage()
show_mkfile_usage()
elif command == "convert":
if len(command_args) == 1:
from seleniumbase.utilities.selenium_ide import convert_ide
convert_ide.main()
else:
show_basic_usage()
show_convert_usage()
elif command == "print":
if len(command_args) >= 1:
if sys.version_info[0] == 2:
colorama.init(autoreset=True)
c5 = colorama.Fore.RED + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
msg = '"sbase print" does NOT support Python 2! '
msg += 'Try using the Unix "cat" command instead!'
message = "\n" + c5 + msg + cr + "\n"
print("")
raise Exception(message)
from seleniumbase.console_scripts import sb_print
sb_print.main()
else:
show_basic_usage()
show_print_usage()
elif command == "translate":
if len(command_args) >= 1:
if sys.version_info[0] == 2:
colorama.init(autoreset=True)
c5 = colorama.Fore.RED + colorama.Back.LIGHTYELLOW_EX
cr = colorama.Style.RESET_ALL
msg = "The SeleniumBase Translator does NOT support Python 2!"
message = "\n" + c5 + msg + cr + "\n"
print("")
raise Exception(message)
from seleniumbase.translate import translator
translator.main()
else:
show_basic_usage()
show_translate_usage()
elif command == "extract-objects" or command == "extract_objects":
if len(command_args) >= 1:
from seleniumbase.console_scripts import objectify
objectify.extract_objects()
else:
show_basic_usage()
show_extract_objects_usage()
elif command == "inject-objects" or command == "inject_objects":
if len(command_args) >= 1:
from seleniumbase.console_scripts import objectify
objectify.inject_objects()
else:
show_basic_usage()
show_inject_objects_usage()
elif command == "objectify":
if len(command_args) >= 1:
from seleniumbase.console_scripts import objectify
objectify.objectify()
else:
show_basic_usage()
show_objectify_usage()
elif command == "revert-objects" or command == "revert_objects":
if len(command_args) >= 1:
from seleniumbase.console_scripts import objectify
objectify.revert_objects()
else:
show_basic_usage()
show_revert_objects_usage()
elif command == "encrypt" or command == "obfuscate":
if len(command_args) >= 0:
from seleniumbase.common import obfuscate
obfuscate.main()
else:
show_basic_usage()
show_encrypt_usage()
elif command == "decrypt" or command == "unobfuscate":
if len(command_args) >= 0:
from seleniumbase.common import unobfuscate
unobfuscate.main()
else:
show_basic_usage()
show_decrypt_usage()
elif command == "download":
if len(command_args) >= 1 and command_args[0].lower() == "server":
from seleniumbase.utilities.selenium_grid import (
download_selenium_server)
download_selenium_server.main(force_download=True)
else:
show_basic_usage()
show_download_usage()
elif command == "grid-hub" or command == "grid_hub":
if len(command_args) >= 1:
from seleniumbase.utilities.selenium_grid import grid_hub
grid_hub.main()
else:
show_basic_usage()
show_grid_hub_usage()
elif command == "grid-node" or command == "grid_node":
if len(command_args) >= 1:
from seleniumbase.utilities.selenium_grid import grid_node
grid_node.main()
else:
show_basic_usage()
show_grid_node_usage()
elif command == "version" or command == "--version":
if len(command_args) == 0:
show_version_info()
else:
show_basic_usage()
elif command == "options" or command == "--options":
show_options()
elif command == "help" or command == "--help":
if len(command_args) >= 1:
if command_args[0] == "install":
print("")
show_install_usage()
return
elif command_args[0] == "mkdir":
print("")
show_mkdir_usage()
return
elif command_args[0] == "mkfile":
print("")
show_mkfile_usage()
return
elif command_args[0] == "convert":
print("")
show_convert_usage()
return
elif command_args[0] == "print":
print("")
show_print_usage()
return
elif command_args[0] == "translate":
print("")
show_translate_usage()
return
elif command_args[0] == "extract-objects":
print("")
show_extract_objects_usage()
return
elif command_args[0] == "inject-objects":
print("")
show_inject_objects_usage()
return
elif command_args[0] == "objectify":
print("")
show_objectify_usage()
return
elif command_args[0] == "revert-objects":
print("")
show_revert_objects_usage()
return
elif command_args[0] == "encrypt":
print("")
show_encrypt_usage()
return
elif command_args[0] == "obfuscate":
print("")
show_encrypt_usage()
return
elif command_args[0] == "decrypt":
print("")
show_decrypt_usage()
return
elif command_args[0] == "unobfuscate":
print("")
show_decrypt_usage()
return
elif command_args[0] == "download":
print("")
show_download_usage()
return
elif command_args[0] == "grid-hub":
print("")
show_grid_hub_usage()
return
elif command_args[0] == "grid-node":
print("")
show_grid_node_usage()
return
show_detailed_help()
else:
show_usage()
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
229cda51cb090287639fac9f73866fc0fc07e7f9 | 76742bf1c7dee6a01a0a41402fe734eeb0da3d74 | /tango_with_django_project/populate_rango.py | e7ae46306107f30b3103b2bbed482e688ef88477 | []
| no_license | Zacharilius/tangoProject | e5490c80af3caaabe2cf132a40387db2574713dc | 305fa20e344f8ad24514dff959be3e4e3632645e | refs/heads/master | 2021-01-22T23:26:51.921743 | 2015-03-17T17:52:22 | 2015-03-17T17:52:22 | 29,359,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,679 | py | import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'tango_with_django_project.settings')
import django
django.setup()
from rango.models import Category, Page
def populate():
python_cat = add_cat('Python', 128, 64)
add_page(cat=python_cat,
title = "Official Python Tutorial",
url = "http://docs.python.org/2/tutorial/")
add_page(cat = python_cat,
title = "How to think like a computer scientist",
url = "http://www.greenteapress.com/thinkpython/")
add_page(cat = python_cat,
title = "Learn Python in 10 minutes",
url = "http://www.korokithakis.net/tutorials/python/")
django_cat = add_cat("Django", 64, 32)
add_page(cat = django_cat,
title = "Official Django Tutorial",
url = "http://bottlepy.org/docs/dev/")
add_page(cat = django_cat,
title = "Django Rocks",
url = "http://www.djangorocks.com/")
add_page(cat = django_cat,
title = "How to Tango with Django",
url = "http://www.tangowithdjango.com/")
frame_cat = add_cat("Other Frameworks", 32, 16)
add_page(cat = frame_cat,
title = "Bottle",
url = "http://bottlepy.org/docs/dev")
add_page(cat = frame_cat,
title = "Flask",
url = "http://flask.pocoo.org")
for c in Category.objects.all():
for p in Page.objects.filter(category = c):
print "- {0} - {1}".format(str(c), str(p))
def add_page(cat, title, url, views = 0):
p = Page.objects.get_or_create(category=cat, title = title)[0]
p.url = url
p.views = views
p.save()
return p
def add_cat(name, views=0, likes=0):
c = Category.objects.get_or_create(name=name)[0]
c.views = views
c.likes = likes
c.save()
return c
if __name__ == '__main__':
print "Starting Rango population script..."
populate()
| [
"[email protected]"
]
| |
40f6ac93d8d20662057715e8f2cbaa97ea5555a2 | 850d778687e3692ab2a38d4d2227391d92c21e6b | /atcoder.jp/code-formula-2014-qualb/code_formula_2014_qualB_c/Main.py | 64274ab66ae68c3b570b2c37dd74d7c974e4130a | []
| no_license | Valkyrja3607/AtCoder | 77e2e5e66c0e8e12bb902c35f679119c6576fad7 | 9218a50b1eb83e4498845d15d9dda41fab90ed73 | refs/heads/master | 2023-07-15T20:38:52.911301 | 2018-05-30T17:56:22 | 2018-05-30T17:56:22 | 294,980,006 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 902 | py | a=input()
b=input()
n=len(a)
import collections
l1=collections.Counter(list(a))
l2=collections.Counter(list(b))
if l1!=l2:
print("NO")
exit()
cn=0
s1=[]
s2=[]
l=[]
for i,j in zip(a,b):
if i!=j:
s1.append(i)
s2.append(j)
cn+=1
else:
l.append(i)
c=collections.Counter(l)
ll=c.most_common()
stock=[list(tup) for tup in ll]
if cn>6:
print("NO")
else:
while len(s1)<6:
if len(s1)==n:
break
stock[0][1]-=1
s1.append(stock[0][0])
s2.append(stock[0][0])
if stock[0][1]==0:
del stock[0]
import itertools
cn=len(s1)
swaps=list(itertools.combinations(list(range(cn)),2))
for p in itertools.product(swaps,repeat=3):
s=s1.copy()
for i,j in p:
s[i],s[j]=s[j],s[i]
if s==s2:
print("YES")
exit()
print("NO")
| [
"[email protected]"
]
| |
fb21e4714b35a21708dd10bbf7e8713cdec95421 | ae71e532468e861e3a9fcb90f613eddca267ace6 | /routes/class_reports.py | 72d6d93574c1858ca22af86f2c905041302f7431 | [
"CC-BY-4.0"
]
| permissive | soon14/proms-4.0 | 0b4ed398125e529c13dc8f0d9b0c14e0348ae5c6 | 6c3a1fd62c9394761664e100fc1dde50fd79dc11 | refs/heads/master | 2020-09-23T20:33:56.716317 | 2019-06-09T04:01:29 | 2019-06-09T04:01:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,412 | py | from .class_incoming import IncomingClass
import io
import uuid
from rdflib import Graph, Namespace, URIRef, Literal, RDF, XSD
from . import api_functions
import modules.rulesets.reports as report_rulesets
import settings
from modules.ldapi import LDAPI
from datetime import datetime
class IncomingReport(IncomingClass):
def __init__(self, request):
IncomingClass.__init__(self, request)
self.type = None
self._generate_named_graph_uri()
def valid(self):
"""Validates an incoming Report using direct tests (can it be parsed?) and appropriate RuleSets"""
# try to parse the Report data
try:
#print([item[1] for item in LDAPI.MIMETYPES_PARSERS if item[0] == self.request.mimetype][0])
self.graph = Graph().parse(
#io.StringIO(self.request.data),
data=self.request.data.decode(encoding="utf-8"),
format=[item[1] for item in LDAPI.MIMETYPES_PARSERS if item[0] == self.request.mimetype][0]
)
except Exception as e:
self.error_messages = ['The serialised data cannot be parsed. Is it valid RDF?',
'Parser says: ' + str(e)]
return False
# try to determine Report type
result = self.graph.query('''
PREFIX proms: <http://promsns.org/def/proms#>
SELECT DISTINCT ?type WHERE {
?r a ?type .
FILTER (?type = proms:BasicReport || ?type = proms:ExternalReport || ?type = proms:InternalReport)
}
''')
if len(result) != 1:
self.error_messages = [
'Could not determine Report type. Must be one of proms:BasicReport, proms:ExternalReport or '
'proms:InternalReport'
]
return False
else:
for row in result:
self.type = str(row[0])
# choose RuleSet based on Report type
if self.type == 'http://promsns.org/def/proms#BasicReport':
conformant_report = report_rulesets.BasicReport(self.graph)
elif self.type == 'http://promsns.org/def/proms#ExternalReport':
conformant_report = report_rulesets.ExternalReport(self.graph)
else: # self.report_type == 'InternalReport':
conformant_report = report_rulesets.InternalReport(self.graph)
if not conformant_report.passed:
self.error_messages = conformant_report.fail_reasons
return False
# if the Report has been parsed, we have found the Report type and it's passed it's relevant RuleSet, it's valid
return True
def determine_uri(self):
"""Determines the URI for this Report"""
# TODO: replace these two SPARQL queries with one, use the inverse of the "placeholder" find
# if this Report has a placeholder URI, generate a new one
q = '''
SELECT ?uri
WHERE {
{ ?uri a <http://promsns.org/def/proms#BasicReport> . }
UNION
{ ?uri a <http://promsns.org/def/proms#ExternalReport> . }
UNION
{ ?uri a <http://promsns.org/def/proms#InternalReport> . }
FILTER regex(str(?uri), "placeholder")
}
'''
uri = None
for r in self.graph.query(q):
uri = r['uri']
if uri is not None:
self._generate_new_uri(uri)
else:
# since it has an existing URI, not a placeholder one, use the existing one
q = '''
SELECT ?uri
WHERE {
{ ?uri a <http://promsns.org/def/proms#BasicReport> . }
UNION
{ ?uri a <http://promsns.org/def/proms#ExternalReport> . }
UNION
{ ?uri a <http://promsns.org/def/proms#InternalReport> . }
}
'''
for r in self.graph.query(q):
self.uri = r['uri']
return True
def _generate_new_uri(self, old_uri):
# ask PROMS Server for a new Report URI
new_uri = settings.REPORT_BASE_URI + str(uuid.uuid4())
self.uri = new_uri
# add that new URI to the in-memory graph
api_functions.replace_uri(self.graph, old_uri, new_uri)
def _generate_named_graph_uri(self):
self.named_graph_uri = settings.REPORT_NAMED_GRAPH_BASE_URI + str(uuid.uuid4())
def generate_named_graph_metadata(self):
PROV = Namespace('http://www.w3.org/ns/prov#')
self.graph.bind('prov', PROV)
PROMS = Namespace('http://promsns.org/def/proms#')
self.graph.bind('proms', PROMS)
DCT = Namespace('http://purl.org/dc/terms/')
self.graph.bind('dct', DCT)
self.graph.add((
URIRef(self.named_graph_uri),
RDF.type,
PROMS.ReportNamedGraph
))
# ... the date this Report was sent to this PROMS Server
self.graph.add((
URIRef(self.named_graph_uri),
DCT.dateSubmitted,
Literal(datetime.now().isoformat(), datatype=XSD.dateTime)
))
# ... who contributed this Report
self.graph.add((
URIRef(self.named_graph_uri),
DCT.contributor,
URIRef(self.request.remote_addr)
))
| [
"[email protected]"
]
| |
9535709abf88b3bbdab94e25bd9aad683cadde85 | 63b0fed007d152fe5e96640b844081c07ca20a11 | /ARC/ARC122/a.py | 9e9961e0f2d5cbd1eda85e6f6dbb01fe302c0653 | []
| no_license | Nikkuniku/AtcoderProgramming | 8ff54541c8e65d0c93ce42f3a98aec061adf2f05 | fbaf7b40084c52e35c803b6b03346f2a06fb5367 | refs/heads/master | 2023-08-21T10:20:43.520468 | 2023-08-12T09:53:07 | 2023-08-12T09:53:07 | 254,373,698 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 241 | py | n=int(input())
a=list(map(int,input().split()))
dp0=[0]
dp1=[0]
mod=10**9+7
x=0
y=1
for i in range(n):
p=a[i]
q=dp0[-1]
r=dp1[-1]
dp0.append(q+r+y*p)
dp1.append(q-x*p)
x,y=y,x+y
ans=(dp0[n]+dp1[n])%mod
print(ans)
| [
"[email protected]"
]
| |
953361aa2a76f53cdaddda8221489ba1aab88156 | 9b6f36f544af5a2c1c042b18dda920c78fd11331 | /omsBackend/apps/zbmanager/serializers.py | dcfe3133e33a0c61dbd9aaed1ab434d86e103d18 | []
| no_license | Nikita-stels/MyOms | a946f08b4ba7abfa8392e98c579320b501a7ca2a | fdaf9d5a2a29b5386c1a86fcf89a2c0d5527687a | refs/heads/master | 2022-09-17T20:40:45.228067 | 2020-01-08T14:41:04 | 2020-01-08T14:41:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | # -*- coding: utf-8 -*-
# author: huashaw
from rest_framework import serializers
class ZbHostSerializer(serializers.Serializer):
hostid = serializers.IntegerField()
host = serializers.CharField()
status = serializers.CharField()
groups = serializers.JSONField()
parentTemplates = serializers.JSONField()
interfaces = serializers.JSONField()
class ZbHostGroupSerializer(serializers.Serializer):
groupid = serializers.IntegerField()
name = serializers.CharField()
hosts = serializers.JSONField()
class ZbTemplateSerializer(serializers.Serializer):
templateid = serializers.IntegerField()
host = serializers.CharField()
| [
"[email protected]"
]
| |
9d3d87b1db818f478f4aa85b0c257eee39b0700b | c609730a43596a2d3303f072fc97d9cf681fac7b | /cagey/usedcar/main_haoche99.py | 386a9e6ee701ee754cd28189f895ff6701bf3b18 | []
| no_license | sinnettluo/ChenProject | 5403311c0c7b78c484145e16d692abff00d2a110 | 0e33ecf1683afb22f1deb4bd54294c41aed8a46b | refs/heads/master | 2023-03-22T23:48:08.430178 | 2020-09-02T15:05:02 | 2020-09-02T15:05:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | from scrapy.cmdline import execute
import sys
import os
website = "haoche99"
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
execute(["scrapy", "crawl", website])
| [
"[email protected]"
]
| |
d3550d7689399933bc52ca671f322510fc34bf23 | d94c5849e6308901f9af8a4edf8c8369d46576d1 | /BOJ/14499_주사위 굴리기.py | 22ad001dcfef81e9fc7a3e7aee0a5e29963d830e | []
| no_license | AhnDogeon/algorithm_study | b4c961b934b5e27afccdf2713a2ccb0174d9a698 | b8de39fff92cc98281ba7e94df82bcc9b1503243 | refs/heads/master | 2022-06-05T11:33:27.392131 | 2022-05-23T06:37:50 | 2022-05-23T06:37:50 | 188,783,176 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,095 | py | import sys
from copy import deepcopy
sys.stdin = open('14499_주사위 굴리기.txt', 'r')
N, M, x, y, K = map(int, input().split())
board = []
for _ in range(N):
board_list = list(map(int, input().split()))
board.append(board_list)
move = list(map(int, input().split()))
# print(move)
#
# print('===========디버깅=====================')
# for i in range(N):
# for j in range(M):
# print(board[i][j], end=' ')
# print()
# print('=====================================')
up = 0
middle = 0
left = 0
right = 0
down = 0
bottom = 0
def RIGHT(a, b):
global board, up, middle, left, right, down, bottom
copy_up = deepcopy(up)
copy_middle = deepcopy(middle)
copy_left = deepcopy(left)
copy_right = deepcopy(right)
copy_down = deepcopy(down)
copy_bottom = deepcopy(bottom)
if board[a][b] == 0:
up = copy_up
middle = copy_left
left = copy_bottom
right = copy_middle
down = copy_down
bottom = copy_right
board[a][b] = bottom
else:
up = copy_up
middle = copy_left
left = copy_bottom
right = copy_middle
down = copy_down
bottom = board[a][b]
board[a][b] = 0
print(middle)
def LEFT(a, b):
global board, up, middle, left, right, down, bottom
copy_up = deepcopy(up)
copy_middle = deepcopy(middle)
copy_left = deepcopy(left)
copy_right = deepcopy(right)
copy_down = deepcopy(down)
copy_bottom = deepcopy(bottom)
if board[a][b] == 0:
up = copy_up
middle = copy_right
left = copy_middle
right = copy_bottom
down = copy_down
bottom = copy_left
board[a][b] = bottom
else:
up = copy_up
middle = copy_right
left = copy_middle
right = copy_bottom
down = copy_down
bottom = board[a][b]
board[a][b] = 0
print(middle)
def UP(a, b):
global board, up, middle, left, right, down, bottom
copy_up = deepcopy(up)
copy_middle = deepcopy(middle)
copy_left = deepcopy(left)
copy_right = deepcopy(right)
copy_down = deepcopy(down)
copy_bottom = deepcopy(bottom)
if board[a][b] == 0:
up = copy_middle
middle = copy_down
left = copy_left
right = copy_right
down = copy_bottom
bottom = copy_up
board[a][b] = bottom
else:
up = copy_middle
middle = copy_down
left = copy_left
right = copy_right
down = copy_bottom
bottom = board[a][b]
board[a][b] = 0
print(middle)
def DOWN(a, b):
global board, up, middle, left, right, down, bottom
copy_up = deepcopy(up)
copy_middle = deepcopy(middle)
copy_left = deepcopy(left)
copy_right = deepcopy(right)
copy_down = deepcopy(down)
copy_bottom = deepcopy(bottom)
if board[a][b] == 0:
up = copy_bottom
middle = copy_up
left = copy_left
right = copy_right
down = copy_middle
bottom = copy_down
board[a][b] = bottom
else:
up = copy_bottom
middle = copy_up
left = copy_left
right = copy_right
down = copy_middle
bottom = board[a][b]
board[a][b] = 0
print(middle)
for i in move:
if i == 1:
dx, dy = x, y + 1
if 0 <= dx < N and 0 <= dy < M:
RIGHT(dx, dy)
x, y = dx, dy
elif i == 2:
dx, dy = x, y - 1
if 0 <= dx < N and 0 <= dy < M:
LEFT(dx, dy)
x, y = dx, dy
elif i == 3:
dx, dy = x - 1, y
if 0 <= dx < N and 0 <= dy < M:
UP(dx, dy)
x, y = dx, dy
elif i == 4:
dx, dy = x + 1, y
if 0 <= dx < N and 0 <= dy < M:
DOWN(dx, dy)
x, y = dx, dy
#
# print('===========디버깅=====================')
# for i in range(N):
# for j in range(M):
# print(board[i][j], end=' ')
# print()
# print('=====================================')
#
| [
"[email protected]"
]
| |
0d5757a1a9ed5bcbb7dbb9f0d6480b75d12b5efe | 4d1f1e188a4db8e909430b55bddf0d8113a28fcf | /reinforcement_learning/0x00-q_learning/2-epsilon_greedy.py | 5b5895e3aafe8d93a6fc7131ffb272cf3044f4a9 | []
| no_license | paurbano/holbertonschool-machine_learning | b0184a71733a1f51633ba7c7f4d3a82b8d50e94f | ff1af62484620b599cc3813068770db03b37036d | refs/heads/master | 2023-07-02T16:20:13.668083 | 2023-06-18T06:25:26 | 2023-06-18T06:25:26 | 279,967,511 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 921 | py | #!/usr/bin/env python3
'''Epsilon Greedy
https://github.com/simoninithomas/Deep_reinforcement_learning_Course/blob/
master/Q%20learning/FrozenLake/Q%20Learning%20with%20FrozenLake.ipynb
'''
import numpy as np
def epsilon_greedy(Q, state, epsilon):
'''uses epsilon-greedy to determine the next action:
Args:
Q is a numpy.ndarray containing the q-table
state is the current state
epsilon is the epsilon to use for the calculation
Returns: the next action index
'''
# First we randomize a number
p = np.random.uniform(0, 1)
# If this number > greater than epsilon -->
# exploitation (taking the biggest Q value for this state)
if p > epsilon:
action = np.argmax(Q[state, :])
# Else doing a random choice --> exploration
else:
# action = env.action_space.sample()
action = np.random.randint(0, int(Q.shape[1]))
return action
| [
"[email protected]"
]
| |
0eed1e43e88e22d5e74f9010387e7ad031989714 | 472baa2414822520f7cb8d491d4bf5608f765ad8 | /zqxt4396/tools/views.py | 3d5f7f76bda31af965d9c812557cadea6c386f1e | []
| no_license | Umi101108/django-projects | cdcf0c9bb8bd272e04a4b7a702f09adb16c28404 | 50edfdc3511e1de5b4a5a3e92fe9ddad932b5396 | refs/heads/master | 2021-01-12T08:20:48.113696 | 2017-06-11T14:45:20 | 2017-06-11T14:45:20 | 76,545,822 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | from django.http import HttpResponse
from django.shortcuts import render
def index(request):
return render(request, 'index.html')
def add(request):
a = request.GET['a']
b = request.GET['b']
a = int(a)
b = int(b)
return HttpResponse(str(a+b))
| [
"[email protected]"
]
| |
6434ee69271aa8ef76600a1a8e6d60014f9b18f6 | ba1a1e90406230eeb0a86ef22a3a94a7b227b7b8 | /taskmanager/tcp_protocol/message_templates.py | b8b520e92459b14aa099bebaebb9efa8afc3f62b | [
"MIT"
]
| permissive | spanickroon/Task-Management-Tools | 6e47ac05a1ff9ddf21a988cf6fc63670bf921d63 | ab8ddba79830fe46bf8e0280832f94ece97a3edb | refs/heads/master | 2022-06-10T18:51:15.235038 | 2020-05-05T23:47:18 | 2020-05-05T23:47:18 | 259,631,581 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 109 | py | START_APP = '!START!'
STOP_APP = '!STOP!'
SEND_MSG = '!SENDMSG!'
UPD_RPOCESS = '!UPD!'
CONNECT = '!CONNECT!'
| [
"[email protected]"
]
| |
a5680836916c2ce43cd2b4b36b019cde8f18cee4 | 1adf769cf9234f9b6c619f808d2723b99451d679 | /rusentrel/classic/mi/pcnn.py | 825d23c100525d15bf520d848194da8230315155 | [
"MIT"
]
| permissive | DAVMARROS/attitude-extraction-with-attention-and-ds | 4e85fa154ead0cd9499aaedf5d752ac565f37b92 | fb8e9d0d9488363738a88c4c447c7a8cb3e2ec1d | refs/heads/master | 2023-02-09T04:56:24.090380 | 2020-12-30T10:09:34 | 2020-12-30T10:09:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,447 | py | #!/usr/bin/python
import sys
sys.path.append('../../../')
from io_utils import RuSentRelBasedExperimentsIOUtils
from arekit.contrib.experiments.callback import CustomCallback
from arekit.contrib.networks.multi.configurations.max_pooling import MaxPoolingOverSentencesConfig
from arekit.contrib.networks.multi.architectures.max_pooling import MaxPoolingOverSentences
from arekit.common.evaluation.evaluators.two_class import TwoClassEvaluator
from arekit.contrib.networks.context.configurations.cnn import CNNConfig
from arekit.contrib.networks.context.architectures.pcnn import PiecewiseCNN
from arekit.contrib.experiments.multi.model import MultiInstanceTensorflowModel
from arekit.contrib.experiments.nn_io.rusentrel import RuSentRelBasedNeuralNetworkIO
from arekit.contrib.experiments.engine import run_testing
from rusentrel.mi_names import MaxPoolingModelNames
from rusentrel.classic.ctx.pcnn import ctx_pcnn_custom_config
from rusentrel.classic.common import \
classic_common_callback_modification_func, \
classic_mi_common_config_settings
def mi_pcnn_custom_config(config):
ctx_pcnn_custom_config(config.ContextConfig)
config.fix_context_parameters()
def run_testing_pcnn(name_prefix=u'',
cv_count=1,
model_names_classtype=MaxPoolingModelNames,
network_classtype=MaxPoolingOverSentences,
config_classtype=MaxPoolingOverSentencesConfig,
custom_config_func=mi_pcnn_custom_config,
custom_callback_func=classic_common_callback_modification_func):
run_testing(full_model_name=name_prefix + model_names_classtype().PCNN,
create_network=lambda: network_classtype(context_network=PiecewiseCNN()),
create_config=lambda: config_classtype(context_config=CNNConfig()),
create_nn_io=RuSentRelBasedNeuralNetworkIO,
cv_count=cv_count,
create_model=MultiInstanceTensorflowModel,
evaluator_class=TwoClassEvaluator,
create_callback=CustomCallback,
experiments_io=RuSentRelBasedExperimentsIOUtils(),
common_callback_modification_func=custom_callback_func,
custom_config_modification_func=custom_config_func,
common_config_modification_func=classic_mi_common_config_settings)
if __name__ == "__main__":
run_testing_pcnn()
| [
"[email protected]"
]
| |
ae9f47dcd6973ca4c8e603f1503be4d5ca8b26ce | a9063fd669162d4ce0e1d6cd2e35974274851547 | /test/test_role_members_add.py | ed565058c42a11f8a5eb9894159405db3ff757a7 | []
| no_license | rootalley/py-zoom-api | 9d29a8c750e110f7bd9b65ff7301af27e8518a3d | bfebf3aa7b714dcac78be7c0affb9050bbce8641 | refs/heads/master | 2022-11-07T14:09:59.134600 | 2020-06-20T18:13:50 | 2020-06-20T18:13:50 | 273,760,906 | 1 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,402 | py | # coding: utf-8
"""
Zoom API
The Zoom API allows developers to safely and securely access information from Zoom. You can use this API to build private services or public applications on the [Zoom App Marketplace](http://marketplace.zoom.us). To learn how to get your credentials and create private/public applications, read our [Authorization Guide](https://marketplace.zoom.us/docs/guides/authorization/credentials). All endpoints are available via `https` and are located at `api.zoom.us/v2/`. For instance you can list all users on an account via `https://api.zoom.us/v2/users/`. # noqa: E501
OpenAPI spec version: 2.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from models.role_members_add import RoleMembersAdd # noqa: E501
from swagger_client.rest import ApiException
class TestRoleMembersAdd(unittest.TestCase):
"""RoleMembersAdd unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testRoleMembersAdd(self):
"""Test RoleMembersAdd"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.role_members_add.RoleMembersAdd() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
]
| |
cad08c6af20f321507af6bc050e428731b67a33f | 7dc240e587213e4b420676c60aa1b24905b1b2e4 | /src/app/tests/mailchimp/conftest.py | d5af1f4a3624389007aae35e1b133692b303f6ce | [
"MIT"
]
| permissive | denokenya/education-backend | 834d22280717f15f93407108846e2eea767421c8 | 3b43ba0cc54c6a2fc2f1716170393f943323a29b | refs/heads/master | 2023-08-27T09:07:48.257108 | 2021-11-03T00:19:04 | 2021-11-03T00:19:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 821 | py | import pytest
import requests_mock
from app.integrations.mailchimp import AppMailchimp, MailchimpMember
pytestmark = [pytest.mark.django_db]
@pytest.fixture(autouse=True)
def _set_mailchimp_credentials(settings):
settings.MAILCHIMP_API_KEY = 'key-us05'
settings.MAILCHIMP_CONTACT_LIST_ID = '123cba'
@pytest.fixture
def mailchimp():
client = AppMailchimp()
with requests_mock.Mocker() as http_mock:
client.http_mock = http_mock
yield client
@pytest.fixture
def mailchimp_member(user):
return MailchimpMember.from_django_user(user)
@pytest.fixture
def post(mocker):
return mocker.patch('app.integrations.mailchimp.http.MailchimpHTTP.post')
@pytest.fixture
def user(mixer):
return mixer.blend('users.User', email='[email protected]', first_name='Rulon', last_name='Oboev')
| [
"[email protected]"
]
| |
09a5dcf778c742d075bd8decf005f393a6b3b6e6 | e6d1bbac91b97ee7a9d028c3aafa5d85a0ee593c | /Python04Month/chapter/chapter3/demo/code/3-1_abnormal_check.py | bd08daf230d7e50525b8458610580eb8e1138662 | []
| no_license | LiuJingGitLJ/PythonSuanFa_2 | 82159043523d6fe69beef7f86421cd4be2242919 | 0afba93c4c29231bc6c2aaf6e4663beee2b5cbbb | refs/heads/master | 2021-09-20T13:49:08.521080 | 2018-08-10T06:13:22 | 2018-08-10T06:13:22 | 124,337,675 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,135 | py | #-*- coding: utf-8 -*-
import pandas as pd
catering_sale = '../data/catering_sale.xls' #餐饮数据
data = pd.read_excel(catering_sale, index_col = u'日期') #读取数据,指定“日期”列为索引列
print(data)
import matplotlib.pyplot as plt #导入图像库
plt.rcParams['font.sans-serif'] = ['SimHei'] #用来正常显示中文标签
plt.rcParams['axes.unicode_minus'] = False #用来正常显示负号
plt.figure() #建立图像
p = data.boxplot(return_type='dict') #画箱线图,直接使用DataFrame的方法
x = p['fliers'][0].get_xdata() # 'flies'即为异常值的标签
y = p['fliers'][0].get_ydata()
y.sort() #从小到大排序,该方法直接改变原对象
#用annotate添加注释
#其中有些相近的点,注解会出现重叠,难以看清,需要一些技巧来控制。
#以下参数都是经过调试的,需要具体问题具体调试。
for i in range(len(x)):
if i>0:
plt.annotate(y[i], xy = (x[i],y[i]), xytext=(x[i]+0.05 -0.8/(y[i]-y[i-1]),y[i]))
else:
plt.annotate(y[i], xy = (x[i],y[i]), xytext=(x[i]+0.08,y[i]))
plt.show() #展示箱线图
| [
"[email protected]"
]
| |
5b3165a574457eeb1f369cd70b0259bd520aec67 | 8e2404c7bcfd28329bed789839192b2c4e85ea1b | /LeetCode/Linked_List_Cycle_II.py | ca97be57324afaacc01727943d36debb9971ccae | []
| no_license | Pabitra-26/Problem-Solved | 408bd51bbffc69f8c5e1def92797c2e6f027f91d | c27de1dd6c4ad14444fa5ee911a16186c200a7f9 | refs/heads/master | 2023-07-30T16:51:28.062349 | 2021-09-27T06:06:54 | 2021-09-27T06:06:54 | 269,935,039 | 2 | 0 | null | 2021-09-27T06:06:55 | 2020-06-06T09:39:33 | Python | UTF-8 | Python | false | false | 886 | py | # Problem name: Linked List Cycle II
# Description: Given a linked list, return the node where the cycle begins. If there is no cycle, return null.
# To represent a cycle in the given linked list, we use an integer pos which represents the position (0-indexed) in the linked list where tail connects to.
# If pos is -1, then there is no cycle in the linked list.
# Definition for singly-linked list.
# class ListNode:
# def __init__(self, x):
# self.val = x
# self.next = None
class Solution:
def detectCycle(self, head: ListNode) -> ListNode:
table={}
curr=head
i=0
m=0
while(curr is not None):
if(curr in table):
m=1
return curr
else:
table[curr]=1
curr=curr.next
if(m==0):
return None | [
"[email protected]"
]
| |
2809b47d249d56790cb08fb8a0c7d5f1fbdd146e | d53baf0a3aaa10521cfc28a7be8f2c498bc9e741 | /examples/CaffeModels/load-vgg16.py | 96780e85eac94a3b1709a479d22cf2e3faa232fd | [
"Apache-2.0"
]
| permissive | qianlinjun/tensorpack | 8f6e99ba17095334de1163d6412e740642343752 | 7f505225cd41aaeee3a0b0688fe67afc0af8fb30 | refs/heads/master | 2020-03-29T22:38:22.269889 | 2018-09-25T07:20:48 | 2018-09-25T07:20:48 | 150,432,021 | 1 | 0 | Apache-2.0 | 2018-09-26T13:35:19 | 2018-09-26T13:35:18 | null | UTF-8 | Python | false | false | 3,493 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: load-vgg16.py
from __future__ import print_function
import cv2
import tensorflow as tf
import numpy as np
import os
import six
import argparse
from tensorpack import *
from tensorpack.dataflow.dataset import ILSVRCMeta
enable_argscope_for_module(tf.layers)
def tower_func(image):
is_training = get_current_tower_context().is_training
with argscope([tf.layers.conv2d], kernel_size=3, activation=tf.nn.relu, padding='same'):
x = image
x = tf.layers.conv2d(x, 64, name='conv1_1')
x = tf.layers.conv2d(x, 64, name='conv1_2')
x = tf.layers.max_pooling2d(x, 2, 2, name='pool1')
x = tf.layers.conv2d(x, 128, name='conv2_1')
x = tf.layers.conv2d(x, 128, name='conv2_2')
x = tf.layers.max_pooling2d(x, 2, 2, name='pool2')
x = tf.layers.conv2d(x, 256, name='conv3_1')
x = tf.layers.conv2d(x, 256, name='conv3_2')
x = tf.layers.conv2d(x, 256, name='conv3_3')
x = tf.layers.max_pooling2d(x, 2, 2, name='pool3')
x = tf.layers.conv2d(x, 512, name='conv4_1')
x = tf.layers.conv2d(x, 512, name='conv4_2')
x = tf.layers.conv2d(x, 512, name='conv4_3')
x = tf.layers.max_pooling2d(x, 2, 2, name='pool4')
x = tf.layers.conv2d(x, 512, name='conv5_1')
x = tf.layers.conv2d(x, 512, name='conv5_2')
x = tf.layers.conv2d(x, 512, name='conv5_3')
x = tf.layers.max_pooling2d(x, 2, 2, name='pool5')
x = tf.layers.flatten(x, name='flatten')
x = tf.layers.dense(x, 4096, activation=tf.nn.relu, name='fc6')
x = tf.layers.dropout(x, rate=0.5, name='drop0', training=is_training)
x = tf.layers.dense(x, 4096, activation=tf.nn.relu, name='fc7')
x = tf.layers.dropout(x, rate=0.5, name='drop1', training=is_training)
logits = tf.layers.dense(x, 1000, activation=tf.identity, name='fc8')
tf.nn.softmax(logits, name='prob')
def run_test(path, input):
param_dict = dict(np.load(path))
param_dict = {k.replace('/W', '/kernel').replace('/b', '/bias'): v for k, v in six.iteritems(param_dict)}
predict_func = OfflinePredictor(PredictConfig(
inputs_desc=[InputDesc(tf.float32, (None, 224, 224, 3), 'input')],
tower_func=tower_func,
session_init=DictRestore(param_dict),
input_names=['input'],
output_names=['prob'] # prob:0 is the probability distribution
))
im = cv2.imread(input)
assert im is not None, input
im = cv2.cvtColor(im, cv2.COLOR_BGR2RGB)
im = cv2.resize(im, (224, 224)).reshape((1, 224, 224, 3)).astype('float32')
# VGG16 requires channelwise mean substraction
VGG_MEAN = [103.939, 116.779, 123.68]
im -= VGG_MEAN[::-1]
outputs = predict_func(im)[0]
prob = outputs[0]
ret = prob.argsort()[-10:][::-1]
print("Top10 predictions:", ret)
meta = ILSVRCMeta().get_synset_words_1000()
print("Top10 class names:", [meta[k] for k in ret])
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--gpu', help='comma separated list of GPU(s) to use.')
parser.add_argument('--load', required=True,
help='.npz model file generated by tensorpack.utils.loadcaffe')
parser.add_argument('--input', help='an input image', required=True)
args = parser.parse_args()
if args.gpu:
os.environ['CUDA_VISIBLE_DEVICES'] = args.gpu
run_test(args.load, args.input)
| [
"[email protected]"
]
| |
b6a4a9e47571cdc8e1f355c4ff97f2f25ce41edb | ee7e42417d9d1e76b0e84e44dc6eb037adc3ebad | /.history/pet/api_20190703151654.py | 3b027aed09213348242bbcfd996055000b31003a | []
| no_license | web3-qa/pets-api | 4632127ee84a299f207d95754f409fc1e4c0013d | ee4a04e7291740ac8eb6147c305b41d27d5be29c | refs/heads/master | 2023-05-12T09:09:47.509063 | 2019-07-18T15:07:13 | 2019-07-18T15:07:13 | 197,611,701 | 0 | 0 | null | 2023-05-01T19:42:17 | 2019-07-18T15:19:59 | Python | UTF-8 | Python | false | false | 7 | py | from fl | [
"[email protected]"
]
| |
50ddae41737c1856fdea70885af523908cdebab0 | d83fa072a084642ebaa40317dda61f7a2f660284 | /cleancoderscom/gateways/codecast_gateway.py | 89edbc42cf18f413af36449ce9f5bf8e0749df70 | []
| no_license | xstrengthofonex/CleanCodeCaseStudy | 479ca1f0c028f3f481635b23bf44363fd50dec18 | 312aeef9f2127033f2b9e0b4a2c41baf4e6cc01e | refs/heads/master | 2021-01-02T22:55:50.471384 | 2017-08-06T14:36:17 | 2017-08-06T14:36:17 | 99,425,289 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 352 | py | from abc import ABCMeta, abstractmethod
from typing import List, Optional
from cleancoderscom.entities.codecast import Codecast
class CodecastGateway(metaclass=ABCMeta):
@abstractmethod
def find_all_codecasts_ordered_by_date(self) -> List[Codecast]:
pass
@abstractmethod
def find_codecast_by_title(self, title) -> Optional[Codecast]:
pass
| [
"[email protected]"
]
| |
48035def9dc27ef8655ec0557839d1a7558ed009 | 08bfc8a1f8e44adc624d1f1c6250a3d9635f99de | /SDKs/Qt/5.12.3_python_37/msvc2017_64/PySide/PySide2/scripts/uic.py | 1471f24152ba72980656c2caa300f5e965452b38 | []
| no_license | Personwithhat/CE_SDKs | cd998a2181fcbc9e3de8c58c7cc7b2156ca21d02 | 7afbd2f7767c9c5e95912a1af42b37c24d57f0d4 | refs/heads/master | 2020-04-09T22:14:56.917176 | 2019-07-04T00:19:11 | 2019-07-04T00:19:11 | 160,623,495 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 129 | py | version https://git-lfs.github.com/spec/v1
oid sha256:7342dc46431b086d9ffeed1ae7e528d3b0e53a3dc1ccd79003825db7ec8dad8e
size 2880
| [
"[email protected]"
]
| |
fca8833ff2ffcf10a7e5395e8b705cd0a33fad29 | cd4bbecc3f713b0c25508d0c5674d9e103db5df4 | /toontown/building/DistributedAnimDoor.py | 37bb7065eba4aa04a774aaff39c4ee732815e3bb | []
| no_license | peppythegod/ToontownOnline | dce0351cfa1ad8c476e035aa3947fdf53de916a6 | 2e5a106f3027714d301f284721382cb956cd87a0 | refs/heads/master | 2020-04-20T05:05:22.934339 | 2020-01-02T18:05:28 | 2020-01-02T18:05:28 | 168,646,608 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 7,294 | py | from pandac.PandaModules import NodePath, VBase3
from direct.directnotify import DirectNotifyGlobal
from direct.interval.IntervalGlobal import Parallel, Sequence, Wait, HprInterval, LerpHprInterval, SoundInterval
from toontown.building import DistributedDoor
from toontown.building import DoorTypes
if __debug__:
import pdb
class DistributedAnimDoor(DistributedDoor.DistributedDoor):
def __init__(self, cr):
DistributedDoor.DistributedDoor.__init__(self, cr)
base.animDoor = self
def getBuilding(self):
if 'building' not in self.__dict__:
if self.doorType == DoorTypes.EXT_ANIM_STANDARD:
searchStr = '**/??' + \
str(self.block) + ':animated_building_*_DNARoot;+s'
self.notify.debug('searchStr=%s' % searchStr)
self.building = self.cr.playGame.hood.loader.geom.find(
searchStr)
else:
self.notify.error(
'DistributedAnimDoor.getBuiding with doorType=%s' %
self.doorType)
return self.building
def getDoorNodePath(self):
if self.doorType == DoorTypes.EXT_ANIM_STANDARD:
if hasattr(self, 'tempDoorNodePath'):
return self.tempDoorNodePath
else:
building = self.getBuilding()
doorNP = building.find('**/door_origin')
self.notify.debug('creating doorOrigin at %s %s' % (str(
doorNP.getPos()), str(doorNP.getHpr())))
otherNP = NodePath('doorOrigin')
otherNP.setPos(doorNP.getPos())
otherNP.setHpr(doorNP.getHpr())
otherNP.reparentTo(doorNP.getParent())
self.tempDoorNodePath = otherNP
else:
self.notify.error(
'DistributedAnimDoor.getDoorNodePath with doorType=%s' %
self.doorType)
return otherNP
def setTriggerName(self):
if self.doorType == DoorTypes.EXT_ANIM_STANDARD:
building = self.getBuilding()
if not building.isEmpty():
doorTrigger = building.find('**/door_0_door_trigger')
if not doorTrigger.isEmpty():
doorTrigger.node().setName(self.getTriggerName())
else:
self.notify.warning('setTriggerName failed no building')
else:
self.notify.error('setTriggerName doorTYpe=%s' % self.doorType)
def getAnimBuilding(self):
if 'animBuilding' not in self.__dict__:
if self.doorType == DoorTypes.EXT_ANIM_STANDARD:
bldg = self.getBuilding()
key = bldg.getParent().getParent()
animPropList = self.cr.playGame.hood.loader.animPropDict.get(
key)
if animPropList:
for prop in animPropList:
if bldg == prop.getActor().getParent():
self.animBuilding = prop
break
continue
else:
self.notify.error('could not find' + str(key))
else:
self.notify.error('No such door type as ' + str(self.doorType))
return self.animBuilding
def getBuildingActor(self):
result = self.getAnimBuilding().getActor()
return result
def enterOpening(self, ts):
bldgActor = self.getBuildingActor()
rightDoor = bldgActor.controlJoint(None, 'modelRoot', 'def_right_door')
if rightDoor.isEmpty():
self.notify.warning('enterOpening(): did not find rightDoor')
return None
otherNP = self.getDoorNodePath()
trackName = 'doorOpen-%d' % self.doId
if self.rightSwing:
h = 100
else:
h = -100
self.finishDoorTrack()
self.doorTrack = Parallel(
SoundInterval(self.openSfx, node=rightDoor),
Sequence(
HprInterval(rightDoor, VBase3(0, 0, 0)),
Wait(0.40000000000000002),
LerpHprInterval(
nodePath=rightDoor,
duration=0.59999999999999998,
hpr=VBase3(h, 0, 0),
startHpr=VBase3(0, 0, 0),
blendType='easeInOut')),
name=trackName)
self.doorTrack.start(ts)
def enterClosing(self, ts):
bldgActor = self.getBuildingActor()
rightDoor = bldgActor.controlJoint(None, 'modelRoot', 'def_right_door')
if rightDoor.isEmpty():
self.notify.warning('enterClosing(): did not find rightDoor')
return None
otherNP = self.getDoorNodePath()
trackName = 'doorClose-%d' % self.doId
if self.rightSwing:
h = 100
else:
h = -100
self.finishDoorTrack()
self.doorTrack = Sequence(
LerpHprInterval(
nodePath=rightDoor,
duration=1.0,
hpr=VBase3(0, 0, 0),
startHpr=VBase3(h, 0, 0),
blendType='easeInOut'),
SoundInterval(self.closeSfx, node=rightDoor),
name=trackName)
self.doorTrack.start(ts)
if hasattr(self, 'done'):
request = self.getRequestStatus()
messenger.send('doorDoneEvent', [request])
def exitDoorEnterOpening(self, ts):
bldgActor = self.getBuildingActor()
leftDoor = bldgActor.controlJoint(None, 'modelRoot', 'def_left_door')
if self.leftSwing:
h = -100
else:
h = 100
if not leftDoor.isEmpty():
otherNP = self.getDoorNodePath()
trackName = 'doorDoorExitTrack-%d' % self.doId
self.finishDoorExitTrack()
self.doorExitTrack = Parallel(
SoundInterval(self.openSfx, node=leftDoor),
Sequence(
LerpHprInterval(
nodePath=leftDoor,
duration=0.59999999999999998,
hpr=VBase3(h, 0, 0),
startHpr=VBase3(0, 0, 0),
blendType='easeInOut')),
name=trackName)
self.doorExitTrack.start(ts)
else:
self.notify.warning(
'exitDoorEnterOpening(): did not find leftDoor')
def exitDoorEnterClosing(self, ts):
bldgActor = self.getBuildingActor()
leftDoor = bldgActor.controlJoint(None, 'modelRoot', 'def_left_door')
if self.leftSwing:
h = -100
else:
h = 100
if not leftDoor.isEmpty():
otherNP = self.getDoorNodePath()
trackName = 'doorExitTrack-%d' % self.doId
self.finishDoorExitTrack()
self.doorExitTrack = Sequence(
LerpHprInterval(
nodePath=leftDoor,
duration=1.0,
hpr=VBase3(0, 0, 0),
startHpr=VBase3(h, 0, 0),
blendType='easeInOut'),
SoundInterval(self.closeSfx, node=leftDoor),
name=trackName)
self.doorExitTrack.start(ts)
| [
"[email protected]"
]
| |
aa718ed8354abdea50f56b54e171775a136dd57a | dd116fe1e94191749ab7a9b00be25bfd88641d82 | /cairis/cairis/SearchDialog.py | c128364ca182e31bbb94073ecd249cd1315fc760 | [
"Apache-2.0"
]
| permissive | RobinQuetin/CAIRIS-web | fbad99327707ea3b995bdfb4841a83695989e011 | 4a6822db654fecb05a09689c8ba59a4b1255c0fc | HEAD | 2018-12-28T10:53:00.595152 | 2015-06-20T16:53:39 | 2015-06-20T16:53:39 | 33,935,403 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 2,369 | py | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import wx
import armid
import ARM
from SearchPanel import SearchPanel
from Borg import Borg
class SearchDialog(wx.Dialog):
def __init__(self,parent):
wx.Dialog.__init__(self,parent,armid.SEARCHMODEL_ID,'Search model',style=wx.DEFAULT_DIALOG_STYLE|wx.MAXIMIZE_BOX|wx.THICK_FRAME|wx.RESIZE_BORDER,size=(700,500))
b = Borg()
self.dbProxy = b.dbProxy
mainSizer = wx.BoxSizer(wx.VERTICAL)
self.panel = SearchPanel(self)
mainSizer.Add(self.panel,1,wx.EXPAND)
self.SetSizer(mainSizer)
wx.EVT_BUTTON(self,armid.SEARCHMODEL_BUTTONFIND_ID,self.onFind)
def onFind(self,evt):
ssCtrl = self.FindWindowById(armid.SEARCHMODEL_TEXTSEARCHSTRING_ID)
ssValue = ssCtrl.GetValue()
if (len(ssValue) == 0) or (ssValue == ' '):
dlg = wx.MessageDialog(self,'Search string empty','Search model',wx.OK)
dlg.ShowModal()
dlg.Destroy()
return
listCtrl = self.FindWindowById(armid.SEARCHMODEL_LISTRESULTS_ID)
listCtrl.DeleteAllItems()
searchOptionsCtrl = self.FindWindowById(armid.SEARCHOPTIONSPANEL_ID)
searchOptions = searchOptionsCtrl.optionFlags()
try:
searchResults = self.dbProxy.searchModel(ssValue,searchOptions)
for idx,result in enumerate(searchResults):
listCtrl.InsertStringItem(idx,result[0])
listCtrl.SetStringItem(idx,1,result[1])
listCtrl.SetStringItem(idx,2,result[2])
except ARM.ARMException,errorText:
dlg = wx.MessageDialog(self,str(errorText),'Search model',wx.OK | wx.ICON_ERROR)
dlg.ShowModal()
dlg.Destroy()
return
| [
"[email protected]"
]
| |
aa0d2e6554684c54501f6f150d32cf14d1cc827e | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/40/usersdata/136/21959/submittedfiles/funcoes.py | efca9f8ab430ae8fca7e83512158b118f168e4d3 | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,338 | py | #ARQUIVO COM SUAS FUNCOES
from __future__ import division
def calcula_valor_absoluto(x):
if x < 0:
x = x*(-1)
return x
def calcula_pi(m):
expr = 0
i = 1
x = 2
while i<=m:
if 1<=m<=2000: #para m maior ou igual a 1 e menor ou igual a 2000
if i%2==0: #se i for par
expr = expr - (4/(x*(x+1)*(x+2)))
else: #caso contrário
expr = expr + (4/(x*(x+1)*(x+2)))
x = x +2
i = i +1
calcula_pi = 3 + expr #pi será igual a 3 + a expressão final
return calcula_pi #a função retorna o valor de pi
def fatorial(n):
fatorial = 1
for i in range (0, n, 1):
fatorial = fatorial * i
return fatorial
def calcula_co_seno(z, epsilon):
soma = 0
i = 1
expoente = 2
fracao = (z**expoente)/fatorial(expoente) # observa-se, aqui, que é chamada a função fatorial com o exponte dentro da mesma
while fracao>epsilon:
fracao = (z**expoente)/fatorial(expoente)
if i%2==1:
soma = soma - fracao
else:
soma = soma + fracao
expoente = expoente + 2
i = i + 1
calcula_co_seno = soma + 1
return calcula_co_seno
def calcula_razao_aurea(m, epsilon):
fi = 2 * calcula_co_seno(calcula_pi(m)/5, epsilon)
return fi
| [
"[email protected]"
]
| |
1def8bfa91528ad23d33f5f84710747a8dc3cf57 | c0f86b926fc82baa633862896096c149dd9913cf | /Python/Numpy/Mean-Var-and-Std/Python2/solution.py | 74b8d96a55af697e4421abd696b485c3a4ebf3f7 | []
| no_license | qxzsilver1/HackerRank | 8df74dd0cd4a9dedd778cdecea395f4234eda767 | bcb1b74711a625d8ad329a3f9fdd9f49b1bebc54 | refs/heads/master | 2021-09-09T15:45:35.681284 | 2021-09-07T00:11:16 | 2021-09-07T00:11:16 | 75,671,896 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 195 | py | import numpy
n, m = map(int, raw_input().split())
a = numpy.array([raw_input().split() for _ in xrange(n)], int)
print numpy.mean(a, axis=1)
print numpy.var(a, axis=0)
print numpy.std(a, None)
| [
"[email protected]"
]
| |
98447ab158842379f6445b580543c5b19f094a29 | e6c65e2e354336a4bea5b6a4ccbccd3682915fe2 | /out-bin/py/google/fhir/models/run_locally.runfiles/com_google_fhir/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/keras/engine/training_arrays.py | 5de18f2e9cb1d7dd00b968bd7ddef3a828ccaf01 | [
"Apache-2.0"
]
| permissive | rasalt/fhir-datalab | c30ab773d84983dd04a37e9d0ddec8bf2824b8a4 | 3e329fc8b4226d3e3a4a7c23c306a86e7a9ea0de | refs/heads/master | 2021-10-09T05:51:04.593416 | 2018-12-21T18:11:03 | 2018-12-22T05:38:32 | 162,744,237 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 190 | py | /home/rkharwar/.cache/bazel/_bazel_rkharwar/0ddaa3627472ad9d1367a008236ce2f5/external/pypi__tensorflow_1_12_0/tensorflow-1.12.0.data/purelib/tensorflow/python/keras/engine/training_arrays.py | [
"[email protected]"
]
| |
15585c539acb0e4546ebbccb70364de39847516c | e57d7785276053332c633b57f6925c90ad660580 | /sdk/synapse/azure-mgmt-synapse/azure/mgmt/synapse/operations/_workspace_managed_sql_server_extended_blob_auditing_policies_operations.py | 516ad87a451dfc9680c799edc043f4ea896578f1 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
]
| permissive | adriananeci/azure-sdk-for-python | 0d560308497616a563b6afecbb494a88535da4c5 | b2bdfe659210998d6d479e73b133b6c51eb2c009 | refs/heads/main | 2023-08-18T11:12:21.271042 | 2021-09-10T18:48:44 | 2021-09-10T18:48:44 | 405,684,423 | 1 | 0 | MIT | 2021-09-12T15:51:51 | 2021-09-12T15:51:50 | null | UTF-8 | Python | false | false | 17,241 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class WorkspaceManagedSqlServerExtendedBlobAuditingPoliciesOperations(object):
"""WorkspaceManagedSqlServerExtendedBlobAuditingPoliciesOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.synapse.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def get(
self,
resource_group_name, # type: str
workspace_name, # type: str
blob_auditing_policy_name, # type: Union[str, "_models.BlobAuditingPolicyName"]
**kwargs # type: Any
):
# type: (...) -> "_models.ExtendedServerBlobAuditingPolicy"
"""Get server's extended blob auditing policy.
Get a workspace SQL server's extended blob auditing policy.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param blob_auditing_policy_name: The name of the blob auditing policy.
:type blob_auditing_policy_name: str or ~azure.mgmt.synapse.models.BlobAuditingPolicyName
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ExtendedServerBlobAuditingPolicy, or the result of cls(response)
:rtype: ~azure.mgmt.synapse.models.ExtendedServerBlobAuditingPolicy
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExtendedServerBlobAuditingPolicy"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'blobAuditingPolicyName': self._serialize.url("blob_auditing_policy_name", blob_auditing_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ExtendedServerBlobAuditingPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/extendedAuditingSettings/{blobAuditingPolicyName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
workspace_name, # type: str
blob_auditing_policy_name, # type: Union[str, "_models.BlobAuditingPolicyName"]
parameters, # type: "_models.ExtendedServerBlobAuditingPolicy"
**kwargs # type: Any
):
# type: (...) -> Optional["_models.ExtendedServerBlobAuditingPolicy"]
cls = kwargs.pop('cls', None) # type: ClsType[Optional["_models.ExtendedServerBlobAuditingPolicy"]]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'blobAuditingPolicyName': self._serialize.url("blob_auditing_policy_name", blob_auditing_policy_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'ExtendedServerBlobAuditingPolicy')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = None
if response.status_code == 200:
deserialized = self._deserialize('ExtendedServerBlobAuditingPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/extendedAuditingSettings/{blobAuditingPolicyName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
workspace_name, # type: str
blob_auditing_policy_name, # type: Union[str, "_models.BlobAuditingPolicyName"]
parameters, # type: "_models.ExtendedServerBlobAuditingPolicy"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.ExtendedServerBlobAuditingPolicy"]
"""Create or Update server's extended blob auditing policy.
Create or Update a workspace managed sql server's extended blob auditing policy.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:param blob_auditing_policy_name: The name of the blob auditing policy.
:type blob_auditing_policy_name: str or ~azure.mgmt.synapse.models.BlobAuditingPolicyName
:param parameters: Properties of extended blob auditing policy.
:type parameters: ~azure.mgmt.synapse.models.ExtendedServerBlobAuditingPolicy
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either ExtendedServerBlobAuditingPolicy or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.synapse.models.ExtendedServerBlobAuditingPolicy]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExtendedServerBlobAuditingPolicy"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
workspace_name=workspace_name,
blob_auditing_policy_name=blob_auditing_policy_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('ExtendedServerBlobAuditingPolicy', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
'blobAuditingPolicyName': self._serialize.url("blob_auditing_policy_name", blob_auditing_policy_name, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/extendedAuditingSettings/{blobAuditingPolicyName}'} # type: ignore
def list_by_workspace(
self,
resource_group_name, # type: str
workspace_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.ExtendedServerBlobAuditingPolicyListResult"]
"""List server's extended blob auditing policies.
List workspace managed sql server's extended blob auditing policies.
:param resource_group_name: The name of the resource group. The name is case insensitive.
:type resource_group_name: str
:param workspace_name: The name of the workspace.
:type workspace_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ExtendedServerBlobAuditingPolicyListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.synapse.models.ExtendedServerBlobAuditingPolicyListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ExtendedServerBlobAuditingPolicyListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2021-06-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_by_workspace.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str', min_length=1),
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str', max_length=90, min_length=1),
'workspaceName': self._serialize.url("workspace_name", workspace_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('ExtendedServerBlobAuditingPolicyListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_by_workspace.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Synapse/workspaces/{workspaceName}/extendedAuditingSettings'} # type: ignore
| [
"[email protected]"
]
| |
cfbdf7c3da7f8b2699eaf24f527932d1c674b6d1 | 4e44c4bbe274b0a8ccca274f29c4140dfad16d5e | /Push2_MIDI_Scripts/decompiled 10.1.2b5 scripts/pushbase/touch_encoder_element.py | f9f76e3eeae43809b8f5db8daf6b10d1825bf8fa | []
| no_license | intergalacticfm/Push2_MIDI_Scripts | b48841e46b7a322f2673259d1b4131d2216f7db6 | a074e2337b2e5d2e5d2128777dd1424f35580ae1 | refs/heads/master | 2021-06-24T15:54:28.660376 | 2020-10-27T11:53:57 | 2020-10-27T11:53:57 | 137,673,221 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,161 | py | # uncompyle6 version 3.0.1
# Python bytecode 2.7 (62211)
# Decompiled from: Python 2.7.13 (default, Jan 19 2017, 14:48:08)
# [GCC 6.3.0 20170118]
# Embedded file name: c:\Jenkins\live\output\win_64_static\Release\python-bundle\MIDI Remote Scripts\pushbase\touch_encoder_element.py
# Compiled at: 2018-11-27 11:59:28
from __future__ import absolute_import, print_function, unicode_literals
from ableton.v2.control_surface.elements import TouchEncoderElement as TouchEncoderElementBase
class TouchEncoderObserver(object):
u""" Interface for observing the state of one or more TouchEncoderElements """
def on_encoder_touch(self, encoder):
pass
def on_encoder_parameter(self, encoder):
pass
class TouchEncoderElement(TouchEncoderElementBase):
u""" Class representing an encoder that is touch sensitive """
def __init__(self, undo_step_handler=None, delete_handler=None, *a, **k):
super(TouchEncoderElement, self).__init__(*a, **k)
self._trigger_undo_step = False
self._undo_step_open = False
self._undo_step_handler = undo_step_handler
self._delete_handler = delete_handler
self.set_observer(None)
return
def set_observer(self, observer):
if observer is None:
observer = TouchEncoderObserver()
self._observer = observer
return
def on_nested_control_element_value(self, value, control):
self._trigger_undo_step = value
if value:
param = self.mapped_parameter()
if self._delete_handler and self._delete_handler.is_deleting and param:
self._delete_handler.delete_clip_envelope(param)
else:
self.begin_gesture()
self._begin_undo_step()
self._observer.on_encoder_touch(self)
self.notify_touch_value(value)
else:
self._end_undo_step()
self._observer.on_encoder_touch(self)
self.notify_touch_value(value)
self.end_gesture()
def connect_to(self, parameter):
if parameter != self.mapped_parameter():
self.last_mapped_parameter = parameter
super(TouchEncoderElement, self).connect_to(parameter)
self._observer.on_encoder_parameter(self)
def release_parameter(self):
if self.mapped_parameter() != None:
super(TouchEncoderElement, self).release_parameter()
self._observer.on_encoder_parameter(self)
return
def receive_value(self, value):
self._begin_undo_step()
super(TouchEncoderElement, self).receive_value(value)
def disconnect(self):
super(TouchEncoderElement, self).disconnect()
self._undo_step_handler = None
return
def _begin_undo_step(self):
if self._undo_step_handler and self._trigger_undo_step:
self._undo_step_handler.begin_undo_step()
self._trigger_undo_step = False
self._undo_step_open = True
def _end_undo_step(self):
if self._undo_step_handler and self._undo_step_open:
self._undo_step_handler.end_undo_step() | [
"[email protected]"
]
| |
acd9a985926faad6a4fcbdf4d441313cd62cd668 | b0741867b842fe177205c2fd714cabd34652ced4 | /crawling/mmtaobao/sexpic.py | dd4edbee55c824bc1e1e6a92158773afc91f5084 | []
| no_license | zdYng/python | 6737ea43b041f57e0d23598cfa2e5e23d5bd11ff | fd074f5700ec9733958e8640eb63af83aac3001f | refs/heads/master | 2021-07-22T13:50:24.745405 | 2020-04-02T02:15:29 | 2020-04-02T02:15:29 | 93,690,795 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,062 | py | # -*- coding: utf-8 -*-
import requests
import urllib2,re
import os
from mmtaobao.cons import headers
from lxml import etree
from parsel import Selector
import datetime
html =requests.get("http://cl.j4q.pw/htm_data/2/1709/2664044.html")
html.encoding = 'utf-8'
# req = urllib2.Request('http://cl.j4q.pw/htm_data/2/1709/2664044.html')
# req.add_header('user-agent', headers())
# html = urllib2.urlopen(req).read()
print html.content
# select = Selector(html.text)
# content =select.xpath('//div//img/@src')
regt = r'<img src="(.*?)" onclick="(?#...)" style="cursor:pointer>"'
hh = re.findall(regt, html)
print hh
# for imgurl in content:
#
# x=datetime.datetime.now()
#
# name = imgurl[-7:-1]
# os.chdir(r"D://pic")
# req = urllib2.Request(imgurl)
# req.add_header('User-agent', headers())
# #html = urllib2.urlopen(req).read().decode('gbk').encode('utf-8')
# response =urllib2.urlopen(req)
# f = open(name,'wb')
# f.write(response.read())
# f.close()
# y=datetime.datetime.now()
#
# print imgurl,(y-x).seconds
| [
"[email protected]"
]
| |
0e3aebd5a6b8e7490e4f7f478497e0a2e46b2f3d | 61f9553eedc2ec936ea87f06da5b986091e3b8ff | /workspace/buildout-cache/eggs/plone.app.upgrade-1.3.4-py2.7.egg/plone/app/upgrade/v40/tests.py | 5d20ec6119c77470822fbbc82a2aec777d5bd649 | []
| no_license | gruhter/gso | 47880b055455cc99d63eec72498048c857e7831b | c0eb949f8a06aab6b97329d51a6d046e2fc0a653 | refs/heads/master | 2016-09-01T18:28:05.589620 | 2015-05-14T19:38:18 | 2015-05-14T19:38:18 | 35,579,298 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 26,186 | py | import time
from zope.component import getSiteManager, queryUtility
from zope.ramcache.interfaces.ram import IRAMCache
from Products.CMFCore.ActionInformation import Action
from Products.CMFCore.Expression import Expression
from Products.CMFCore.utils import getToolByName
from Products.MailHost.interfaces import IMailHost
from plone.app.upgrade.utils import loadMigrationProfile
from plone.app.upgrade.v40.alphas import _KNOWN_ACTION_ICONS
from plone.app.upgrade.v40.alphas import migrateActionIcons
from plone.app.upgrade.v40.alphas import migrateTypeIcons
from plone.app.upgrade.v40.alphas import addOrReplaceRamCache
from plone.app.upgrade.v40.alphas import changeWorkflowActorVariableExpression
from plone.app.upgrade.v40.alphas import changeAuthenticatedResourcesCondition
from plone.app.upgrade.v40.alphas import setupReferencebrowser
from plone.app.upgrade.v40.alphas import migrateMailHost
from plone.app.upgrade.v40.alphas import migrateFolders
from plone.app.upgrade.v40.alphas import renameJoinFormFields
from plone.app.upgrade.v40.alphas import updateLargeFolderType
from plone.app.upgrade.v40.alphas import addRecursiveGroupsPlugin
from plone.app.upgrade.v40.alphas import cleanUpClassicThemeResources
from plone.app.upgrade.v40.betas import repositionRecursiveGroupsPlugin
from plone.app.upgrade.v40.betas import updateIconMetadata
from plone.app.upgrade.v40.betas import removeLargePloneFolder
from plone.app.upgrade.tests.base import MigrationTest
class FakeSecureMailHost(object):
meta_type = 'Secure Mail Host'
id = 'MailHost'
title = 'Fake MailHost'
smtp_host = 'smtp.example.com'
smtp_port = 587
smtp_userid='me'
smtp_pass='secret'
smtp_notls=False
def manage_fixupOwnershipAfterAdd(self):
pass
class TestMigrations_v4_0alpha1(MigrationTest):
profile = "profile-plone.app.upgrade.v40:3-4alpha1"
def afterSetUp(self):
self.atool = getToolByName(self.portal, 'portal_actions')
self.aitool = getToolByName(self.portal, 'portal_actionicons')
self.cptool = getToolByName(self.portal, 'portal_controlpanel')
self.wftool = getToolByName(self.portal, 'portal_workflow')
self.csstool = getToolByName(self.portal, 'portal_css')
self.jstool = getToolByName(self.portal, 'portal_javascripts')
def testProfile(self):
# This tests the whole upgrade profile can be loaded
self.setRoles(['Manager'])
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
def testMigrateActionIcons(self):
_KNOWN_ACTION_ICONS['object_buttons'].extend(['test_id', 'test2_id'])
self.aitool.addActionIcon(
category='object_buttons',
action_id='test_id',
icon_expr='test.gif',
title='Test my icon',
)
self.aitool.addActionIcon(
category='object_buttons',
action_id='test2_id',
icon_expr='python:context.getIcon()',
title='Test my second icon',
)
test_action = Action('test_id',
title='Test me',
description='',
url_expr='',
icon_expr='',
available_expr='',
permissions=('View', ),
visible = True)
test2_action = Action('test2_id',
title='Test me too',
description='',
url_expr='',
icon_expr='',
available_expr='',
permissions=('View', ),
visible = True)
object_buttons = self.atool.object_buttons
if getattr(object_buttons, 'test_id', None) is None:
object_buttons._setObject('test_id', test_action)
if getattr(object_buttons, 'test2_id', None) is None:
object_buttons._setObject('test2_id', test2_action)
self.assertEqual(object_buttons.test_id.icon_expr, '')
self.assertEqual(object_buttons.test2_id.icon_expr, '')
self.assertEqual(
self.aitool.getActionIcon('object_buttons', 'test_id'),
'test.gif')
# Test it twice
for i in range(2):
migrateActionIcons(self.portal)
icons = [ic.getActionId() for ic in self.aitool.listActionIcons()]
self.failIf('test_id' in icons)
self.failIf('test2_id' in icons)
self.assertEqual(object_buttons.test_id.icon_expr,
'string:$portal_url/test.gif')
self.assertEqual(object_buttons.test2_id.icon_expr,
'python:context.getIcon()')
def testMigrateControlPanelActionIcons(self):
_KNOWN_ACTION_ICONS['controlpanel'].extend(['test_id'])
self.aitool.addActionIcon(
category='controlpanel',
action_id='test_id',
icon_expr='test.gif',
title='Test my icon',
)
self.cptool.registerConfiglet(
id='test_id',
name='Test Configlet',
action='string:${portal_url}/test',
permission='Manage portal',
category='Plone',
visible=True,
appId='',
icon_expr='',
)
action = self.cptool.getActionObject('Plone/test_id')
self.assertEqual(action.getIconExpression(), '')
self.assertEqual(self.aitool.getActionIcon('controlpanel', 'test_id'),
'test.gif')
# Test it twice
for i in range(2):
migrateActionIcons(self.portal)
icons = [ic.getActionId() for ic in self.aitool.listActionIcons()]
self.failIf('test_id' in icons)
self.assertEqual(action.getIconExpression(),
'string:$portal_url/test.gif')
def testContentTypeIconExpressions(self):
"""
FTIs should now be using icon_expr instead of content_icon.
(The former caches the expression object.)
"""
tt = getToolByName(self.portal, "portal_types")
tt.Document.icon_expr = None
loadMigrationProfile(self.portal, self.profile, ('typeinfo', ))
self.assertEqual(tt.Document.icon_expr,
"string:${portal_url}/document_icon.png")
def testMigrateTypeIcons(self):
"""
FTIs having content_icon should be upgraded to icon_expr.
"""
tt = getToolByName(self.portal, "portal_types")
del tt.Document.icon_expr
tt.Document.content_icon = 'document_icon.gif'
migrateTypeIcons(self.portal)
self.assertEqual(tt.Document.icon_expr,
"string:${portal_url}/document_icon.gif")
self.assertTrue(hasattr(tt.Document, 'icon_expr_object'))
#Don't upgrade if there is already an icon_expr.
tt.Document.icon_expr = "string:${portal_url}/document_icon.png"
tt.Document.content_icon = 'document_icon.gif'
migrateTypeIcons(self.portal)
self.assertEqual(tt.Document.icon_expr,
"string:${portal_url}/document_icon.png")
def testPngContentIcons(self):
tt = getToolByName(self.portal, "portal_types")
tt.Document.icon_expr = "string:${portal_url}/document_icon.gif"
loadMigrationProfile(self.portal, self.profile, ('typeinfo', ))
self.assertEqual(tt.Document.icon_expr,
"string:${portal_url}/document_icon.png")
def testAddRAMCache(self):
# Test it twice
for i in range(2):
sm = getSiteManager()
sm.unregisterUtility(provided=IRAMCache)
util = queryUtility(IRAMCache)
self.assertEqual(util.maxAge, 86400)
addOrReplaceRamCache(self.portal)
util = queryUtility(IRAMCache)
self.assertEqual(util.maxAge, 3600)
def testReplaceOldRamCache(self):
sm = getSiteManager()
# Test it twice
for i in range(2):
sm.unregisterUtility(provided=IRAMCache)
from zope.app.cache.interfaces.ram import IRAMCache as OldIRAMCache
from zope.app.cache.ram import RAMCache as OldRAMCache
sm.registerUtility(factory=OldRAMCache, provided=OldIRAMCache)
addOrReplaceRamCache(self.portal)
util = queryUtility(IRAMCache)
self.assertEqual(util.maxAge, 3600)
def testChangeWorkflowActorVariableExpression(self):
self.wftool.intranet_folder_workflow.variables.actor.setProperties('')
for i in range(2):
changeWorkflowActorVariableExpression(self.portal)
wf = self.wftool.intranet_folder_workflow
self.assertEqual(wf.variables.actor.getDefaultExprText(),
'user/getId')
wf = self.wftool.one_state_workflow
self.assertEqual(wf.variables.actor.getDefaultExprText(),
'user/getId')
wf = self.wftool.simple_publication_workflow
self.assertEqual(wf.variables.actor.getDefaultExprText(),
'user/getId')
# make sure it doesn't break if the workflow is missing
wf = self.wftool.intranet_folder_workflow
self.wftool._delOb('intranet_folder_workflow')
changeWorkflowActorVariableExpression(self.portal)
self.wftool._setOb('intranet_folder_workflow', wf)
def testChangeAuthenticatedResourcesCondition(self):
# make sure CSS resource is updated
res = self.csstool.getResource('member.css')
res.setAuthenticated(False)
res.setExpression('not: portal/portal_membership/isAnonymousUser')
# test it twice
for i in range(2):
changeAuthenticatedResourcesCondition(self.portal)
self.assertEqual(res.getExpression(), '')
self.failUnless(res.getAuthenticated())
# make sure it doesn't update it if the expression has been
# customized
res.setExpression('python:False')
changeAuthenticatedResourcesCondition(self.portal)
self.assertEqual(res.getExpression(), 'python:False')
def testAddedUseEmailProperty(self):
tool = getToolByName(self.portal, 'portal_properties')
sheet = getattr(tool, 'site_properties')
#self.assertEqual(sheet.getProperty('use_email_as_login'), False)
self.removeSiteProperty('use_email_as_login')
loadMigrationProfile(self.portal, self.profile, ('propertiestool', ))
self.assertEqual(sheet.getProperty('use_email_as_login'), False)
def testReplaceReferencebrowser(self):
self.setRoles(['Manager'])
skins_tool = getToolByName(self.portal, 'portal_skins')
sels = skins_tool._getSelections()
for skinname, layer in sels.items():
layers = layer.split(',')
self.failIf('ATReferenceBrowserWidget' in layers)
layers.remove('referencebrowser')
new_layers = ','.join(layers)
sels[skinname] = new_layers
loadMigrationProfile(self.portal, self.profile)
setupReferencebrowser(self.portal)
sels = skins_tool._getSelections()
for skinname, layer in sels.items():
layers = layer.split(',')
self.failUnless('referencebrowser' in layers)
def testInstallNewDependencies(self):
self.setRoles(['Manager'])
# test for running the TinyMCE profile by checking for the skin layer
# it installs (the profile is marked as noninstallable, so we can't
# ask the quick installer)
skins_tool = getToolByName(self.portal, 'portal_skins')
del skins_tool['tinymce']
for i in range(2):
loadMigrationProfile(self.portal, self.profile)
self.failUnless('tinymce' in skins_tool)
# sleep to avoid a GS log filename collision :-o
time.sleep(1)
def testNewJSIsInstalled(self):
installedScriptIds = self.jstool.getResourceIds()
expected = [
# js resources that are part of plone.app.jquerytools
'++resource++plone.app.jquerytools.js',
'++resource++plone.app.jquerytools.overlayhelpers.js',
# js resource that is new in CMFPlone
'popupforms.js']
for e in expected:
self.failUnless(e in installedScriptIds, e)
def testReplaceSecureMailHost(self):
portal = self.portal
sm = getSiteManager(context=portal)
# try it with an unmodified site to ensure it doesn't give any errors
migrateMailHost(portal.portal_setup)
portal._delObject('MailHost')
# Run it with our MailHost replaced
portal._setObject('MailHost', FakeSecureMailHost())
self.assertEqual(portal.MailHost.meta_type, 'Secure Mail Host')
sm.unregisterUtility(provided=IMailHost)
sm.registerUtility(portal.MailHost, provided=IMailHost)
migrateMailHost(portal)
new_mh = portal.MailHost
self.failUnlessEqual(new_mh.meta_type, 'Mail Host')
self.failUnlessEqual(new_mh.title, 'Fake MailHost')
self.failUnlessEqual(new_mh.smtp_host, 'smtp.example.com')
self.failUnlessEqual(new_mh.smtp_port, 587)
self.failUnlessEqual(new_mh.smtp_uid, 'me')
self.failUnlessEqual(new_mh.smtp_pwd, 'secret')
#Force TLS is always false, because SMH has no equivalent option
self.failUnlessEqual(new_mh.force_tls, False)
def testFolderMigration(self):
from plone.app.folder.tests.content import create
from plone.app.folder.tests.test_migration import reverseMigrate
from plone.app.folder.tests.test_migration import isSaneBTreeFolder
# create a folder in an unmigrated state & check it's broken...
folder = create('Folder', self.portal, 'foo', title='Foo')
reverseMigrate(self.portal)
self.failIf(isSaneBTreeFolder(self.portal.foo))
# now run the migration step...
migrateFolders(self.portal)
folder = self.portal.foo
self.failUnless(isSaneBTreeFolder(folder))
self.assertEqual(folder.getId(), 'foo')
self.assertEqual(folder.Title(), 'Foo')
class TestMigrations_v4_0alpha2(MigrationTest):
def testMigrateJoinFormFields(self):
ptool = getToolByName(self.portal, 'portal_properties')
sheet = getattr(ptool, 'site_properties')
self.removeSiteProperty('user_registration_fields')
self.addSiteProperty('join_form_fields')
sheet.join_form_fields = (
'username', 'password', 'email', 'mail_me', 'groups')
renameJoinFormFields(self)
self.assertEqual(sheet.hasProperty('join_form_fields'), False)
self.assertEqual(sheet.hasProperty('user_registration_fields'), True)
self.assertEqual(sheet.getProperty('user_registration_fields'),
('username', 'password', 'email', 'mail_me'))
class TestMigrations_v4_0alpha3(MigrationTest):
profile = "profile-plone.app.upgrade.v40:4alpha2-4alpha3"
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
def testJoinActionURL(self):
self.portal.portal_actions.user.join.url_expr = 'foo'
loadMigrationProfile(self.portal, self.profile, ('actions', ))
self.assertEqual(self.portal.portal_actions.user.join.url_expr,
'string:${globals_view/navigationRootUrl}/@@register')
class TestMigrations_v4_0alpha5(MigrationTest):
profile = "profile-plone.app.upgrade.v40:4alpha4-4alpha5"
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
def testMigrateLargeFolderType(self):
portal = self.portal
catalog = getToolByName(portal, 'portal_catalog')
# set things up in the old way...
ids = 'news', 'events', 'Members'
for id in ids:
obj = portal[id]
obj._setPortalTypeName('Large Plone Folder')
obj.reindexObject()
self.assertEquals(obj.portal_type, 'Large Plone Folder')
# Type falls back to meta_type since there's no
# Large Plone Folder FTI
self.assertEquals(obj.Type(), 'ATFolder')
brain, = catalog(getId=id)
self.assertEquals(brain.portal_type, 'Large Plone Folder')
self.assertEquals(brain.Type, 'ATFolder')
# migrate & check again...
updateLargeFolderType(self.portal)
for id in ids:
obj = portal[id]
self.assertEquals(obj.portal_type, 'Folder')
self.assertEquals(obj.Type(), 'Folder')
brain, = catalog(getId=id)
self.assertEquals(brain.portal_type, 'Folder')
self.assertEquals(brain.Type, 'Folder')
def testAddRecursiveGroupsPlugin(self):
acl = getToolByName(self.portal, 'acl_users')
addRecursiveGroupsPlugin(self.portal)
self.failUnless('recursive_groups' in acl)
# Now that we have an existing one, let's make sure it's handled
# properly if this migration is run again.
addRecursiveGroupsPlugin(self.portal)
self.failUnless('recursive_groups' in acl)
def testClassicThemeResourcesCleanUp(self):
"""Test that the plonetheme.classic product doesn't have any
registered CSS resource in its metadata after migration.
"""
portal = self.portal
qi = getToolByName(portal, 'portal_quickinstaller')
qi.installProduct('plonetheme.classic')
classictheme = qi['plonetheme.classic']
classictheme.resources_css = ['something'] # add a random resource
cleanUpClassicThemeResources(portal)
self.failUnlessEqual(classictheme.resources_css, [])
def testGetObjPositionInParentIndex(self):
from plone.app.folder.nogopip import GopipIndex
catalog = self.portal.portal_catalog
catalog.delIndex('getObjPositionInParent')
catalog.addIndex('getObjPositionInParent', 'FieldIndex')
self.failIf(isinstance(catalog.Indexes['getObjPositionInParent'],
GopipIndex))
loadMigrationProfile(self.portal, self.profile)
self.failUnless('getObjPositionInParent' in catalog.indexes())
self.failUnless(isinstance(catalog.Indexes['getObjPositionInParent'],
GopipIndex))
def testGetEventTypeIndex(self):
catalog = self.portal.portal_catalog
catalog.addIndex('getEventType', 'KeywordIndex')
self.failUnless('getEventType' in catalog.indexes())
loadMigrationProfile(self.portal, self.profile)
self.failIf('getEventType' in catalog.indexes())
class TestMigrations_v4_0beta1(MigrationTest):
profile = "profile-plone.app.upgrade.v40:4alpha5-4beta1"
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
def testRepositionRecursiveGroupsPlugin(self):
# Ensure that the recursive groups plugin is moved to the bottom
# of the IGroups plugins list, if active.
addRecursiveGroupsPlugin(self.portal)
# Plugin is installed, but not active, run against this state.
from Products.PluggableAuthService.interfaces.plugins import \
IGroupsPlugin
acl = getToolByName(self.portal, 'acl_users')
plugins = acl.plugins
# The plugin was originally moved to the top of the list of
# IGroupsPlugin plugins by p.a.controlpanel. Recreate that state.
while (plugins.getAllPlugins('IGroupsPlugin')['active'].index(
'recursive_groups') > 0):
plugins.movePluginsUp(IGroupsPlugin, ['recursive_groups'])
active_groups = plugins.getAllPlugins('IGroupsPlugin')['active']
self.assertEqual(active_groups[0], 'recursive_groups')
# Rerun the migration, making sure that it's now the last item in the
# list of IGroupsPlugin plugins.
repositionRecursiveGroupsPlugin(self.portal)
active_groups = plugins.getAllPlugins('IGroupsPlugin')['active']
self.assertEqual(active_groups[-1], 'recursive_groups')
class TestMigrations_v4_0beta2(MigrationTest):
profile = "profile-plone.app.upgrade.v40:4beta1-4beta2"
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
def testCoreContentIconExprCleared(self):
types = getToolByName(self.portal, 'portal_types')
catalog = getToolByName(self.portal, 'portal_catalog')
# Reinstate the now-empty icon expression for the Document type
doc_icon_expr = Expression('string:${portal_url}/document_icon.png')
types['Document'].icon_expr_object = doc_icon_expr
front = self.portal['front-page']
catalog.reindexObject(front)
old_modified = front.modified()
# Make sure the getIcon metadata column shows the "original" value
brains = catalog(id='front-page')
self.assertEqual(brains[0].getIcon, 'document_icon.png')
# Run the migration
loadMigrationProfile(self.portal, self.profile)
updateIconMetadata(self.portal)
# The getIcon column should now be empty
self.assertEqual(catalog(id='front-page')[0].getIcon, '')
self.assertEquals(front.modified(), old_modified)
class TestMigrations_v4_0beta4(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4beta3-4beta4'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
def testRemoveLargePloneFolder(self):
# re-create pre-migration settings
ptool = self.portal.portal_properties
nav_props = ptool.navtree_properties
l = list(nav_props.parentMetaTypesNotToQuery)
nav_props.parentMetaTypesNotToQuery = l + ['Large Plone Folder']
site_props = ptool.site_properties
l = list(site_props.typesLinkToFolderContentsInFC)
site_props.typesLinkToFolderContentsInFC = l + ['Large Plone Folder']
temp_folder_fti = self.portal.portal_types['TempFolder']
l = list(temp_folder_fti.allowed_content_types)
temp_folder_fti.allowed_content_types = l + ['Large Plone Folder']
l = set(self.portal.portal_factory.getFactoryTypes())
l.add('Large Plone Folder')
ftool = self.portal.portal_factory
ftool.manage_setPortalFactoryTypes(listOfTypeIds=list(l))
for i in xrange(2):
loadMigrationProfile(self.portal, self.profile)
removeLargePloneFolder(self.portal)
self.failIf('Large Plone Folder' in self.portal.portal_types)
self.failIf('Large Plone Folder' in
temp_folder_fti.allowed_content_types)
self.failUnless('Folder' in temp_folder_fti.allowed_content_types)
self.failIf('Large Plone Folder' in ftool.getFactoryTypes())
self.failUnless('Folder' in ftool.getFactoryTypes())
self.failIf('Large Plone Folder' in
nav_props.parentMetaTypesNotToQuery)
self.failUnless('TempFolder' in
nav_props.parentMetaTypesNotToQuery)
self.failIf('Large Plone Folder' in
site_props.typesLinkToFolderContentsInFC)
self.failUnless('Folder' in
site_props.typesLinkToFolderContentsInFC)
# sleep to avoid a GS log filename collision :-o
time.sleep(1)
class TestMigrations_v4_0beta5(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4beta4-4beta5'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
class TestMigrations_v4_0rc1(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4beta5-4rc1'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
class TestMigrations_v4_0(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4rc1-4final'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
class TestMigrations_v4_0_1(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4.0-4.0.1'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
class TestMigrations_v4_0_2(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4.0.1-4.0.2'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
class TestMigrations_v4_0_3(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4.0.2-4.0.3'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
class TestMigrations_v4_0_4(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4.0.3-4.0.4'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
class TestMigrations_v4_0_5(MigrationTest):
profile = 'profile-plone.app.upgrade.v40:4.0.4-4.0.5'
def testProfile(self):
# This tests the whole upgrade profile can be loaded
loadMigrationProfile(self.portal, self.profile)
self.failUnless(True)
def test_suite():
from unittest import defaultTestLoader
return defaultTestLoader.loadTestsFromName(__name__)
| [
"[email protected]"
]
| |
2a4891fc504a6b60e310d8e66dfe03173c3f98d5 | 6a2a6408be018ba2772a2888c8b3a7ee6838ddeb | /weechat/python/wee_slack.py | 820f99f2061d275436bb6a7bf7d249a53139de9d | []
| no_license | gicmo/dot-files | c5b4598ffa399936f7d149039e558a89f5de7239 | 6ca9343cad5612e3c6daa61a7c80aa8bbfa01e28 | refs/heads/master | 2023-04-06T07:48:14.453990 | 2023-03-27T14:20:27 | 2023-03-27T14:20:27 | 41,631,064 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 149,634 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from collections import OrderedDict
from functools import wraps
from itertools import islice
import textwrap
import time
import json
import pickle
import sha
import os
import re
import urllib
import sys
import traceback
import collections
import ssl
import random
import string
try:
from cStringIO import StringIO
except:
from StringIO import StringIO
from websocket import create_connection, WebSocketConnectionClosedException
# hack to make tests possible.. better way?
try:
import weechat
except:
pass
SCRIPT_NAME = "slack"
SCRIPT_AUTHOR = "Ryan Huber <[email protected]>"
SCRIPT_VERSION = "2.0.0"
SCRIPT_LICENSE = "MIT"
SCRIPT_DESC = "Extends weechat for typing notification/search/etc on slack.com"
BACKLOG_SIZE = 200
SCROLLBACK_SIZE = 500
RECORD_DIR = "/tmp/weeslack-debug"
SLACK_API_TRANSLATOR = {
"channel": {
"history": "channels.history",
"join": "conversations.join",
"leave": "conversations.leave",
"mark": "channels.mark",
"info": "channels.info",
},
"im": {
"history": "im.history",
"join": "conversations.open",
"leave": "conversations.close",
"mark": "im.mark",
},
"mpim": {
"history": "mpim.history",
"join": "mpim.open", # conversations.open lacks unread_count_display
"leave": "conversations.close",
"mark": "mpim.mark",
"info": "groups.info",
},
"group": {
"history": "groups.history",
"join": "conversations.join",
"leave": "conversations.leave",
"mark": "groups.mark",
"info": "groups.info"
},
"thread": {
"history": None,
"join": None,
"leave": None,
"mark": None,
}
}
###### Decorators have to be up here
def slack_buffer_or_ignore(f):
"""
Only run this function if we're in a slack buffer, else ignore
"""
@wraps(f)
def wrapper(data, current_buffer, *args, **kwargs):
if current_buffer not in EVENTROUTER.weechat_controller.buffers:
return w.WEECHAT_RC_OK
return f(data, current_buffer, *args, **kwargs)
return wrapper
def slack_buffer_required(f):
"""
Only run this function if we're in a slack buffer, else print error
"""
@wraps(f)
def wrapper(data, current_buffer, *args, **kwargs):
if current_buffer not in EVENTROUTER.weechat_controller.buffers:
return w.WEECHAT_RC_ERROR
return f(data, current_buffer, *args, **kwargs)
return wrapper
def utf8_decode(f):
"""
Decode all arguments from byte strings to unicode strings. Use this for
functions called from outside of this script, e.g. callbacks from weechat.
"""
@wraps(f)
def wrapper(*args, **kwargs):
return f(*decode_from_utf8(args), **decode_from_utf8(kwargs))
return wrapper
NICK_GROUP_HERE = "0|Here"
NICK_GROUP_AWAY = "1|Away"
sslopt_ca_certs = {}
if hasattr(ssl, "get_default_verify_paths") and callable(ssl.get_default_verify_paths):
ssl_defaults = ssl.get_default_verify_paths()
if ssl_defaults.cafile is not None:
sslopt_ca_certs = {'ca_certs': ssl_defaults.cafile}
EMOJI = []
###### Unicode handling
def encode_to_utf8(data):
if isinstance(data, unicode):
return data.encode('utf-8')
if isinstance(data, bytes):
return data
elif isinstance(data, collections.Mapping):
return type(data)(map(encode_to_utf8, data.iteritems()))
elif isinstance(data, collections.Iterable):
return type(data)(map(encode_to_utf8, data))
else:
return data
def decode_from_utf8(data):
if isinstance(data, bytes):
return data.decode('utf-8')
if isinstance(data, unicode):
return data
elif isinstance(data, collections.Mapping):
return type(data)(map(decode_from_utf8, data.iteritems()))
elif isinstance(data, collections.Iterable):
return type(data)(map(decode_from_utf8, data))
else:
return data
class WeechatWrapper(object):
def __init__(self, wrapped_class):
self.wrapped_class = wrapped_class
# Helper method used to encode/decode method calls.
def wrap_for_utf8(self, method):
def hooked(*args, **kwargs):
result = method(*encode_to_utf8(args), **encode_to_utf8(kwargs))
# Prevent wrapped_class from becoming unwrapped
if result == self.wrapped_class:
return self
return decode_from_utf8(result)
return hooked
# Encode and decode everything sent to/received from weechat. We use the
# unicode type internally in wee-slack, but has to send utf8 to weechat.
def __getattr__(self, attr):
orig_attr = self.wrapped_class.__getattribute__(attr)
if callable(orig_attr):
return self.wrap_for_utf8(orig_attr)
else:
return decode_from_utf8(orig_attr)
# Ensure all lines sent to weechat specifies a prefix. For lines after the
# first, we want to disable the prefix, which is done by specifying a space.
def prnt_date_tags(self, buffer, date, tags, message):
message = message.replace("\n", "\n \t")
return self.wrap_for_utf8(self.wrapped_class.prnt_date_tags)(buffer, date, tags, message)
##### Helpers
def get_nick_color_name(nick):
info_name_prefix = "irc_" if int(weechat_version) < 0x1050000 else ""
return w.info_get(info_name_prefix + "nick_color_name", nick)
##### BEGIN NEW
IGNORED_EVENTS = [
# "pref_change",
# "reconnect_url",
]
###### New central Event router
class EventRouter(object):
def __init__(self):
"""
complete
Eventrouter is the central hub we use to route:
1) incoming websocket data
2) outgoing http requests and incoming replies
3) local requests
It has a recorder that, when enabled, logs most events
to the location specified in RECORD_DIR.
"""
self.queue = []
self.slow_queue = []
self.slow_queue_timer = 0
self.teams = {}
self.context = {}
self.weechat_controller = WeechatController(self)
self.previous_buffer = ""
self.reply_buffer = {}
self.cmds = {k[8:]: v for k, v in globals().items() if k.startswith("command_")}
self.proc = {k[8:]: v for k, v in globals().items() if k.startswith("process_")}
self.handlers = {k[7:]: v for k, v in globals().items() if k.startswith("handle_")}
self.local_proc = {k[14:]: v for k, v in globals().items() if k.startswith("local_process_")}
self.shutting_down = False
self.recording = False
self.recording_path = "/tmp"
def record(self):
"""
complete
Toggles the event recorder and creates a directory for data if enabled.
"""
self.recording = not self.recording
if self.recording:
if not os.path.exists(RECORD_DIR):
os.makedirs(RECORD_DIR)
def record_event(self, message_json, file_name_field, subdir=None):
"""
complete
Called each time you want to record an event.
message_json is a json in dict form
file_name_field is the json key whose value you want to be part of the file name
"""
now = time.time()
if subdir:
directory = "{}/{}".format(RECORD_DIR, subdir)
else:
directory = RECORD_DIR
if not os.path.exists(directory):
os.makedirs(directory)
mtype = message_json.get(file_name_field, 'unknown')
f = open('{}/{}-{}.json'.format(directory, now, mtype), 'w')
f.write("{}".format(json.dumps(message_json)))
f.close()
def store_context(self, data):
"""
A place to store data and vars needed by callback returns. We need this because
weechat's "callback_data" has a limited size and weechat will crash if you exceed
this size.
"""
identifier = ''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(40))
self.context[identifier] = data
dbg("stored context {} {} ".format(identifier, data.url))
return identifier
def retrieve_context(self, identifier):
"""
A place to retrieve data and vars needed by callback returns. We need this because
weechat's "callback_data" has a limited size and weechat will crash if you exceed
this size.
"""
data = self.context.get(identifier, None)
if data:
# dbg("retrieved context {} ".format(identifier))
return data
def delete_context(self, identifier):
"""
Requests can span multiple requests, so we may need to delete this as a last step
"""
if identifier in self.context:
# dbg("deleted eontext {} ".format(identifier))
del self.context[identifier]
def shutdown(self):
"""
complete
This toggles shutdown mode. Shutdown mode tells us not to
talk to Slack anymore. Without this, typing /quit will trigger
a race with the buffer close callback and may result in you
leaving every slack channel.
"""
self.shutting_down = not self.shutting_down
def register_team(self, team):
"""
complete
Adds a team to the list of known teams for this EventRouter.
"""
if isinstance(team, SlackTeam):
self.teams[team.get_team_hash()] = team
else:
raise InvalidType(type(team))
def reconnect_if_disconnected(self):
for team_id, team in self.teams.iteritems():
if not team.connected:
team.connect()
dbg("reconnecting {}".format(team))
def receive_ws_callback(self, team_hash):
"""
incomplete (reconnect)
This is called by the global method of the same name.
It is triggered when we have incoming data on a websocket,
which needs to be read. Once it is read, we will ensure
the data is valid JSON, add metadata, and place it back
on the queue for processing as JSON.
"""
try:
# Read the data from the websocket associated with this team.
data = decode_from_utf8(self.teams[team_hash].ws.recv())
message_json = json.loads(data)
metadata = WeeSlackMetadata({
"team": team_hash,
}).jsonify()
message_json["wee_slack_metadata"] = metadata
if self.recording:
self.record_event(message_json, 'type', 'websocket')
self.receive_json(json.dumps(message_json))
except WebSocketConnectionClosedException:
# TODO: handle reconnect here
self.teams[team_hash].set_disconnected()
return w.WEECHAT_RC_OK
except ssl.SSLWantReadError:
# Expected to happen occasionally on SSL websockets.
return w.WEECHAT_RC_OK
except Exception:
dbg("socket issue: {}\n".format(traceback.format_exc()))
return w.WEECHAT_RC_OK
def receive_httprequest_callback(self, data, command, return_code, out, err):
"""
complete
Receives the result of an http request we previously handed
off to weechat (weechat bundles libcurl). Weechat can fragment
replies, so it buffers them until the reply is complete.
It is then populated with metadata here so we can identify
where the request originated and route properly.
"""
request_metadata = self.retrieve_context(data)
try:
dbg("RECEIVED CALLBACK with request of {} id of {} and code {} of length {}".format(request_metadata.request, request_metadata.response_id, return_code, len(out)))
except:
dbg(request_metadata)
return
if return_code == 0:
if len(out) > 0:
if request_metadata.response_id not in self.reply_buffer:
self.reply_buffer[request_metadata.response_id] = StringIO()
self.reply_buffer[request_metadata.response_id].write(out)
try:
j = json.loads(self.reply_buffer[request_metadata.response_id].getvalue())
except:
pass
# dbg("Incomplete json, awaiting more", True)
try:
j["wee_slack_process_method"] = request_metadata.request_normalized
j["wee_slack_request_metadata"] = pickle.dumps(request_metadata)
self.reply_buffer.pop(request_metadata.response_id)
if self.recording:
self.record_event(j, 'wee_slack_process_method', 'http')
self.receive_json(json.dumps(j))
self.delete_context(data)
except:
dbg("HTTP REQUEST CALLBACK FAILED", True)
pass
# We got an empty reply and this is weird so just ditch it and retry
else:
dbg("length was zero, probably a bug..")
self.delete_context(data)
self.receive(request_metadata)
elif return_code != -1:
self.reply_buffer.pop(request_metadata.response_id, None)
self.delete_context(data)
else:
if request_metadata.response_id not in self.reply_buffer:
self.reply_buffer[request_metadata.response_id] = StringIO()
self.reply_buffer[request_metadata.response_id].write(out)
def receive_json(self, data):
"""
complete
Receives a raw JSON string from and unmarshals it
as dict, then places it back on the queue for processing.
"""
dbg("RECEIVED JSON of len {}".format(len(data)))
message_json = json.loads(data)
self.queue.append(message_json)
def receive(self, dataobj):
"""
complete
Receives a raw object and places it on the queue for
processing. Object must be known to handle_next or
be JSON.
"""
dbg("RECEIVED FROM QUEUE")
self.queue.append(dataobj)
def receive_slow(self, dataobj):
"""
complete
Receives a raw object and places it on the slow queue for
processing. Object must be known to handle_next or
be JSON.
"""
dbg("RECEIVED FROM QUEUE")
self.slow_queue.append(dataobj)
def handle_next(self):
"""
complete
Main handler of the EventRouter. This is called repeatedly
via callback to drain events from the queue. It also attaches
useful metadata and context to events as they are processed.
"""
if len(self.slow_queue) > 0 and ((self.slow_queue_timer + 1) < time.time()):
# for q in self.slow_queue[0]:
dbg("from slow queue", 0)
self.queue.append(self.slow_queue.pop())
# self.slow_queue = []
self.slow_queue_timer = time.time()
if len(self.queue) > 0:
j = self.queue.pop(0)
# Reply is a special case of a json reply from websocket.
kwargs = {}
if isinstance(j, SlackRequest):
if j.should_try():
if j.retry_ready():
local_process_async_slack_api_request(j, self)
else:
self.slow_queue.append(j)
else:
dbg("Max retries for Slackrequest")
else:
if "reply_to" in j:
dbg("SET FROM REPLY")
function_name = "reply"
elif "type" in j:
dbg("SET FROM type")
function_name = j["type"]
elif "wee_slack_process_method" in j:
dbg("SET FROM META")
function_name = j["wee_slack_process_method"]
else:
dbg("SET FROM NADA")
function_name = "unknown"
# Here we are passing the actual objects. No more lookups.
meta = j.get("wee_slack_metadata", None)
if meta:
try:
if isinstance(meta, basestring):
dbg("string of metadata")
team = meta.get("team", None)
if team:
kwargs["team"] = self.teams[team]
if "user" in j:
kwargs["user"] = self.teams[team].users[j["user"]]
if "channel" in j:
kwargs["channel"] = self.teams[team].channels[j["channel"]]
except:
dbg("metadata failure")
if function_name not in IGNORED_EVENTS:
dbg("running {}".format(function_name))
if function_name.startswith("local_") and function_name in self.local_proc:
self.local_proc[function_name](j, self, **kwargs)
elif function_name in self.proc:
self.proc[function_name](j, self, **kwargs)
elif function_name in self.handlers:
self.handlers[function_name](j, self, **kwargs)
else:
raise ProcessNotImplemented(function_name)
def handle_next(*args):
"""
complete
This is just a place to call the event router globally.
This is a dirty hack. There must be a better way.
"""
try:
EVENTROUTER.handle_next()
except:
if config.debug_mode:
traceback.print_exc()
else:
pass
return w.WEECHAT_RC_OK
class WeechatController(object):
"""
Encapsulates our interaction with weechat
"""
def __init__(self, eventrouter):
self.eventrouter = eventrouter
self.buffers = {}
self.previous_buffer = None
self.buffer_list_stale = False
def iter_buffers(self):
for b in self.buffers:
yield (b, self.buffers[b])
def register_buffer(self, buffer_ptr, channel):
"""
complete
Adds a weechat buffer to the list of handled buffers for this EventRouter
"""
if isinstance(buffer_ptr, basestring):
self.buffers[buffer_ptr] = channel
else:
raise InvalidType(type(buffer_ptr))
def unregister_buffer(self, buffer_ptr, update_remote=False, close_buffer=False):
"""
complete
Adds a weechat buffer to the list of handled buffers for this EventRouter
"""
if isinstance(buffer_ptr, basestring):
try:
self.buffers[buffer_ptr].destroy_buffer(update_remote)
if close_buffer:
w.buffer_close(buffer_ptr)
del self.buffers[buffer_ptr]
except:
dbg("Tried to close unknown buffer")
else:
raise InvalidType(type(buffer_ptr))
def get_channel_from_buffer_ptr(self, buffer_ptr):
return self.buffers.get(buffer_ptr, None)
def get_all(self, buffer_ptr):
return self.buffers
def get_previous_buffer_ptr(self):
return self.previous_buffer
def set_previous_buffer(self, data):
self.previous_buffer = data
def check_refresh_buffer_list(self):
return self.buffer_list_stale and self.last_buffer_list_update + 1 < time.time()
def set_refresh_buffer_list(self, setting):
self.buffer_list_stale = setting
###### New Local Processors
def local_process_async_slack_api_request(request, event_router):
"""
complete
Sends an API request to Slack. You'll need to give this a well formed SlackRequest object.
DEBUGGING!!! The context here cannot be very large. Weechat will crash.
"""
if not event_router.shutting_down:
weechat_request = 'url:{}'.format(request.request_string())
weechat_request += '&nonce={}'.format(''.join(random.choice(string.ascii_uppercase + string.digits) for _ in range(4)))
params = {'useragent': 'wee_slack {}'.format(SCRIPT_VERSION)}
request.tried()
context = event_router.store_context(request)
# TODO: let flashcode know about this bug - i have to 'clear' the hashtable or retry requests fail
w.hook_process_hashtable('url:', params, config.slack_timeout, "", context)
w.hook_process_hashtable(weechat_request, params, config.slack_timeout, "receive_httprequest_callback", context)
###### New Callbacks
@utf8_decode
def receive_httprequest_callback(data, command, return_code, out, err):
"""
complete
This is a dirty hack. There must be a better way.
"""
# def url_processor_cb(data, command, return_code, out, err):
EVENTROUTER.receive_httprequest_callback(data, command, return_code, out, err)
return w.WEECHAT_RC_OK
@utf8_decode
def receive_ws_callback(*args):
"""
complete
The first arg is all we want here. It contains the team
hash which is set when we _hook the descriptor.
This is a dirty hack. There must be a better way.
"""
EVENTROUTER.receive_ws_callback(args[0])
return w.WEECHAT_RC_OK
@utf8_decode
def reconnect_callback(*args):
EVENTROUTER.reconnect_if_disconnected()
return w.WEECHAT_RC_OK
@utf8_decode
def buffer_closing_callback(signal, sig_type, data):
"""
complete
Receives a callback from weechat when a buffer is being closed.
We pass the eventrouter variable name in as a string, as
that is the only way we can do dependency injection via weechat
callback, hence the eval.
"""
eval(signal).weechat_controller.unregister_buffer(data, True, False)
return w.WEECHAT_RC_OK
@utf8_decode
def buffer_input_callback(signal, buffer_ptr, data):
"""
incomplete
Handles everything a user types in the input bar. In our case
this includes add/remove reactions, modifying messages, and
sending messages.
"""
eventrouter = eval(signal)
channel = eventrouter.weechat_controller.get_channel_from_buffer_ptr(buffer_ptr)
if not channel:
return w.WEECHAT_RC_ERROR
reaction = re.match("^(\d*)(\+|-):(.*):\s*$", data)
substitute = re.match("^(\d*)s/", data)
if reaction:
if reaction.group(2) == "+":
channel.send_add_reaction(int(reaction.group(1) or 1), reaction.group(3))
elif reaction.group(2) == "-":
channel.send_remove_reaction(int(reaction.group(1) or 1), reaction.group(3))
elif substitute:
msgno = int(substitute.group(1) or 1)
try:
old, new, flags = re.split(r'(?<!\\)/', data)[1:]
except ValueError:
pass
else:
# Replacement string in re.sub() is a string, not a regex, so get
# rid of escapes.
new = new.replace(r'\/', '/')
old = old.replace(r'\/', '/')
channel.edit_nth_previous_message(msgno, old, new, flags)
else:
if data.startswith(('//', ' ')):
data = data[1:]
channel.send_message(data)
# this is probably wrong channel.mark_read(update_remote=True, force=True)
return w.WEECHAT_RC_OK
# Workaround for supporting multiline messages. It intercepts before the input
# callback is called, as this is called with the whole message, while it is
# normally split on newline before being sent to buffer_input_callback
def input_text_for_buffer_cb(data, modifier, current_buffer, string):
if current_buffer not in EVENTROUTER.weechat_controller.buffers:
return string
message = decode_from_utf8(string)
if not message.startswith("/") and "\n" in message:
buffer_input_callback("EVENTROUTER", current_buffer, message)
return ""
return string
@utf8_decode
def buffer_switch_callback(signal, sig_type, data):
"""
incomplete
Every time we change channels in weechat, we call this to:
1) set read marker 2) determine if we have already populated
channel history data
"""
eventrouter = eval(signal)
prev_buffer_ptr = eventrouter.weechat_controller.get_previous_buffer_ptr()
# this is to see if we need to gray out things in the buffer list
prev = eventrouter.weechat_controller.get_channel_from_buffer_ptr(prev_buffer_ptr)
if prev:
prev.mark_read()
new_channel = eventrouter.weechat_controller.get_channel_from_buffer_ptr(data)
if new_channel:
if not new_channel.got_history:
new_channel.get_history()
eventrouter.weechat_controller.set_previous_buffer(data)
return w.WEECHAT_RC_OK
@utf8_decode
def buffer_list_update_callback(data, somecount):
"""
incomplete
A simple timer-based callback that will update the buffer list
if needed. We only do this max 1x per second, as otherwise it
uses a lot of cpu for minimal changes. We use buffer short names
to indicate typing via "#channel" <-> ">channel" and
user presence via " name" <-> "+name".
"""
eventrouter = eval(data)
# global buffer_list_update
for b in eventrouter.weechat_controller.iter_buffers():
b[1].refresh()
# buffer_list_update = True
# if eventrouter.weechat_controller.check_refresh_buffer_list():
# # gray_check = False
# # if len(servers) > 1:
# # gray_check = True
# eventrouter.weechat_controller.set_refresh_buffer_list(False)
return w.WEECHAT_RC_OK
def quit_notification_callback(signal, sig_type, data):
stop_talking_to_slack()
return w.WEECHAT_RC_OK
@utf8_decode
def typing_notification_cb(signal, sig_type, data):
msg = w.buffer_get_string(data, "input")
if len(msg) > 8 and msg[:1] != "/":
global typing_timer
now = time.time()
if typing_timer + 4 < now:
current_buffer = w.current_buffer()
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer, None)
if channel and channel.type != "thread":
identifier = channel.identifier
request = {"type": "typing", "channel": identifier}
channel.team.send_to_websocket(request, expect_reply=False)
typing_timer = now
return w.WEECHAT_RC_OK
@utf8_decode
def typing_update_cb(data, remaining_calls):
w.bar_item_update("slack_typing_notice")
return w.WEECHAT_RC_OK
@utf8_decode
def slack_never_away_cb(data, remaining_calls):
if config.never_away:
for t in EVENTROUTER.teams.values():
slackbot = t.get_channel_map()['slackbot']
channel = t.channels[slackbot]
request = {"type": "typing", "channel": channel.identifier}
channel.team.send_to_websocket(request, expect_reply=False)
return w.WEECHAT_RC_OK
@utf8_decode
def typing_bar_item_cb(data, current_buffer, args):
"""
Privides a bar item indicating who is typing in the current channel AND
why is typing a DM to you globally.
"""
typers = []
current_buffer = w.current_buffer()
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer, None)
# first look for people typing in this channel
if current_channel:
# this try is mostly becuase server buffers don't implement is_someone_typing
try:
if current_channel.type != 'im' and current_channel.is_someone_typing():
typers += current_channel.get_typing_list()
except:
pass
# here is where we notify you that someone is typing in DM
# regardless of which buffer you are in currently
for t in EVENTROUTER.teams.values():
for channel in t.channels.values():
if channel.type == "im":
if channel.is_someone_typing():
typers.append("D/" + channel.slack_name)
pass
typing = ", ".join(typers)
if typing != "":
typing = w.color('yellow') + "typing: " + typing
return typing
@utf8_decode
def nick_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all @-prefixed nicks to completion list
"""
current_buffer = w.current_buffer()
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer, None)
if current_channel is None or current_channel.members is None:
return w.WEECHAT_RC_OK
for m in current_channel.members:
u = current_channel.team.users.get(m, None)
if u:
w.hook_completion_list_add(completion, "@" + u.name, 1, w.WEECHAT_LIST_POS_SORT)
return w.WEECHAT_RC_OK
@utf8_decode
def emoji_completion_cb(data, completion_item, current_buffer, completion):
"""
Adds all :-prefixed emoji to completion list
"""
current_buffer = w.current_buffer()
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer, None)
if current_channel is None:
return w.WEECHAT_RC_OK
for e in current_channel.team.emoji_completions:
w.hook_completion_list_add(completion, ":" + e + ":", 0, w.WEECHAT_LIST_POS_SORT)
return w.WEECHAT_RC_OK
@utf8_decode
def complete_next_cb(data, current_buffer, command):
"""Extract current word, if it is equal to a nick, prefix it with @ and
rely on nick_completion_cb adding the @-prefixed versions to the
completion lists, then let Weechat's internal completion do its
thing
"""
current_buffer = w.current_buffer()
current_channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer, None)
# channel = channels.find(current_buffer)
if not hasattr(current_channel, 'members') or current_channel is None or current_channel.members is None:
return w.WEECHAT_RC_OK
line_input = w.buffer_get_string(current_buffer, "input")
current_pos = w.buffer_get_integer(current_buffer, "input_pos") - 1
input_length = w.buffer_get_integer(current_buffer, "input_length")
word_start = 0
word_end = input_length
# If we're on a non-word, look left for something to complete
while current_pos >= 0 and line_input[current_pos] != '@' and not line_input[current_pos].isalnum():
current_pos = current_pos - 1
if current_pos < 0:
current_pos = 0
for l in range(current_pos, 0, -1):
if line_input[l] != '@' and not line_input[l].isalnum():
word_start = l + 1
break
for l in range(current_pos, input_length):
if not line_input[l].isalnum():
word_end = l
break
word = line_input[word_start:word_end]
for m in current_channel.members:
u = current_channel.team.users.get(m, None)
if u and u.name == word:
# Here, we cheat. Insert a @ in front and rely in the @
# nicks being in the completion list
w.buffer_set(current_buffer, "input", line_input[:word_start] + "@" + line_input[word_start:])
w.buffer_set(current_buffer, "input_pos", str(w.buffer_get_integer(current_buffer, "input_pos") + 1))
return w.WEECHAT_RC_OK_EAT
return w.WEECHAT_RC_OK
def script_unloaded():
stop_talking_to_slack()
return w.WEECHAT_RC_OK
def stop_talking_to_slack():
"""
complete
Prevents a race condition where quitting closes buffers
which triggers leaving the channel because of how close
buffer is handled
"""
EVENTROUTER.shutdown()
return w.WEECHAT_RC_OK
##### New Classes
class SlackRequest(object):
"""
complete
Encapsulates a Slack api request. Valuable as an object that we can add to the queue and/or retry.
makes a SHA of the requst url and current time so we can re-tag this on the way back through.
"""
def __init__(self, token, request, post_data={}, **kwargs):
for key, value in kwargs.items():
setattr(self, key, value)
self.tries = 0
self.start_time = time.time()
self.domain = 'api.slack.com'
self.request = request
self.request_normalized = re.sub(r'\W+', '', request)
self.token = token
post_data["token"] = token
self.post_data = post_data
self.params = {'useragent': 'wee_slack {}'.format(SCRIPT_VERSION)}
self.url = 'https://{}/api/{}?{}'.format(self.domain, request, urllib.urlencode(encode_to_utf8(post_data)))
self.response_id = sha.sha("{}{}".format(self.url, self.start_time)).hexdigest()
self.retries = kwargs.get('retries', 3)
# def __repr__(self):
# return "URL: {} Tries: {} ID: {}".format(self.url, self.tries, self.response_id)
def request_string(self):
return "{}".format(self.url)
def tried(self):
self.tries += 1
self.response_id = sha.sha("{}{}".format(self.url, time.time())).hexdigest()
def should_try(self):
return self.tries < self.retries
def retry_ready(self):
return (self.start_time + (self.tries**2)) < time.time()
class SlackTeam(object):
"""
incomplete
Team object under which users and channels live.. Does lots.
"""
def __init__(self, eventrouter, token, websocket_url, subdomain, nick, myidentifier, users, bots, channels, **kwargs):
self.ws_url = websocket_url
self.connected = False
self.connecting = False
# self.ws = None
self.ws_counter = 0
self.ws_replies = {}
self.eventrouter = eventrouter
self.token = token
self.team = self
self.subdomain = subdomain
self.domain = subdomain + ".slack.com"
self.preferred_name = self.domain
self.nick = nick
self.myidentifier = myidentifier
try:
if self.channels:
for c in channels.keys():
if not self.channels.get(c):
self.channels[c] = channels[c]
except:
self.channels = channels
self.users = users
self.bots = bots
self.team_hash = SlackTeam.generate_team_hash(self.nick, self.subdomain)
self.name = self.domain
self.channel_buffer = None
self.got_history = True
self.create_buffer()
self.set_muted_channels(kwargs.get('muted_channels', ""))
for c in self.channels.keys():
channels[c].set_related_server(self)
channels[c].check_should_open()
# self.channel_set_related_server(c)
# Last step is to make sure my nickname is the set color
self.users[self.myidentifier].force_color(w.config_string(w.config_get('weechat.color.chat_nick_self')))
# This highlight step must happen after we have set related server
self.set_highlight_words(kwargs.get('highlight_words', ""))
self.load_emoji_completions()
def __repr__(self):
return "domain={} nick={}".format(self.subdomain, self.nick)
def __eq__(self, compare_str):
if compare_str == self.token or compare_str == self.domain or compare_str == self.subdomain:
return True
else:
return False
def load_emoji_completions(self):
self.emoji_completions = list(EMOJI)
if self.emoji_completions:
s = SlackRequest(self.token, "emoji.list", {}, team_hash=self.team_hash)
self.eventrouter.receive(s)
def add_channel(self, channel):
self.channels[channel["id"]] = channel
channel.set_related_server(self)
# def connect_request_generate(self):
# return SlackRequest(self.token, 'rtm.start', {})
# def close_all_buffers(self):
# for channel in self.channels:
# self.eventrouter.weechat_controller.unregister_buffer(channel.channel_buffer, update_remote=False, close_buffer=True)
# #also close this server buffer
# self.eventrouter.weechat_controller.unregister_buffer(self.channel_buffer, update_remote=False, close_buffer=True)
def create_buffer(self):
if not self.channel_buffer:
if config.short_buffer_names:
self.preferred_name = self.subdomain
elif config.server_aliases not in ['', None]:
name = config.server_aliases.get(self.subdomain, None)
if name:
self.preferred_name = name
else:
self.preferred_name = self.domain
self.channel_buffer = w.buffer_new("{}".format(self.preferred_name), "buffer_input_callback", "EVENTROUTER", "", "")
self.eventrouter.weechat_controller.register_buffer(self.channel_buffer, self)
w.buffer_set(self.channel_buffer, "localvar_set_type", 'server')
w.buffer_set(self.channel_buffer, "localvar_set_nick", self.nick)
w.buffer_set(self.channel_buffer, "localvar_set_server", self.preferred_name)
if w.config_string(w.config_get('irc.look.server_buffer')) == 'merge_with_core':
w.buffer_merge(self.channel_buffer, w.buffer_search_main())
def set_muted_channels(self, muted_str):
self.muted_channels = {x for x in muted_str.split(',')}
def set_highlight_words(self, highlight_str):
self.highlight_words = {x for x in highlight_str.split(',')}
if len(self.highlight_words) > 0:
for v in self.channels.itervalues():
v.set_highlights()
def formatted_name(self, **kwargs):
return self.domain
def buffer_prnt(self, data):
w.prnt_date_tags(self.channel_buffer, SlackTS().major, tag("team"), data)
def find_channel_by_members(self, members, channel_type=None):
for channel in self.channels.itervalues():
if channel.get_members() == members and (
channel_type is None or channel.type == channel_type):
return channel
def get_channel_map(self):
return {v.slack_name: k for k, v in self.channels.iteritems()}
def get_username_map(self):
return {v.name: k for k, v in self.users.iteritems()}
def get_team_hash(self):
return self.team_hash
@staticmethod
def generate_team_hash(nick, subdomain):
return str(sha.sha("{}{}".format(nick, subdomain)).hexdigest())
def refresh(self):
self.rename()
def rename(self):
pass
# def attach_websocket(self, ws):
# self.ws = ws
def is_user_present(self, user_id):
user = self.users.get(user_id)
if user.presence == 'active':
return True
else:
return False
def mark_read(self, ts=None, update_remote=True, force=False):
pass
def connect(self):
if not self.connected and not self.connecting:
self.connecting = True
if self.ws_url:
try:
ws = create_connection(self.ws_url, sslopt=sslopt_ca_certs)
self.hook = w.hook_fd(ws.sock._sock.fileno(), 1, 0, 0, "receive_ws_callback", self.get_team_hash())
ws.sock.setblocking(0)
self.ws = ws
# self.attach_websocket(ws)
self.set_connected()
self.connecting = False
except Exception as e:
dbg("websocket connection error: {}".format(decode_from_utf8(e)))
self.connecting = False
return False
else:
# The fast reconnect failed, so start over-ish
for chan in self.channels:
self.channels[chan].got_history = False
s = initiate_connection(self.token, retries=999)
self.eventrouter.receive(s)
self.connecting = False
# del self.eventrouter.teams[self.get_team_hash()]
self.set_reconnect_url(None)
def set_connected(self):
self.connected = True
def set_disconnected(self):
w.unhook(self.hook)
self.connected = False
def set_reconnect_url(self, url):
self.ws_url = url
def next_ws_transaction_id(self):
if self.ws_counter > 999:
self.ws_counter = 0
self.ws_counter += 1
return self.ws_counter
def send_to_websocket(self, data, expect_reply=True):
data["id"] = self.next_ws_transaction_id()
message = json.dumps(data)
try:
if expect_reply:
self.ws_replies[data["id"]] = data
self.ws.send(encode_to_utf8(message))
dbg("Sent {}...".format(message[:100]))
except:
print "WS ERROR"
dbg("Unexpected error: {}\nSent: {}".format(sys.exc_info()[0], data))
self.set_connected()
def update_member_presence(self, user, presence):
user.presence = presence
for c in self.channels:
c = self.channels[c]
if user.id in c.members:
c.update_nicklist(user.id)
def subscribe_users_presence(self):
# FIXME: There is a limitation in the API to the size of the
# json we can send.
# We should try to be smarter to fetch the users whom we want to
# subscribe to.
users = self.users.keys()[0:750]
self.send_to_websocket({
"type": "presence_sub",
"ids": users,
}, expect_reply=False)
class SlackChannel(object):
"""
Represents an individual slack channel.
"""
def __init__(self, eventrouter, **kwargs):
# We require these two things for a valid object,
# the rest we can just learn from slack
self.active = False
for key, value in kwargs.items():
setattr(self, key, value)
self.eventrouter = eventrouter
self.slack_name = kwargs["name"]
self.slack_purpose = kwargs.get("purpose", {"value": ""})
self.topic = kwargs.get("topic", {}).get("value", "")
self.identifier = kwargs["id"]
self.last_read = SlackTS(kwargs.get("last_read", SlackTS()))
self.channel_buffer = None
self.team = kwargs.get('team', None)
self.got_history = False
self.messages = OrderedDict()
self.hashed_messages = {}
self.new_messages = False
self.typing = {}
self.type = 'channel'
self.set_name(self.slack_name)
# short name relates to the localvar we change for typing indication
self.current_short_name = self.name
self.set_members(kwargs.get('members', []))
self.unread_count_display = 0
def __eq__(self, compare_str):
if compare_str == self.slack_name or compare_str == self.formatted_name() or compare_str == self.formatted_name(style="long_default"):
return True
else:
return False
def __repr__(self):
return "Name:{} Identifier:{}".format(self.name, self.identifier)
def set_name(self, slack_name):
self.name = "#" + slack_name
def refresh(self):
return self.rename()
def rename(self):
if self.channel_buffer:
new_name = self.formatted_name(typing=self.is_someone_typing(), style="sidebar")
if self.current_short_name != new_name:
self.current_short_name = new_name
w.buffer_set(self.channel_buffer, "short_name", new_name)
return True
return False
def set_members(self, members):
self.members = set(members)
self.update_nicklist()
def get_members(self):
return self.members
def set_unread_count_display(self, count):
self.unread_count_display = count
self.new_messages = bool(self.unread_count_display)
for c in range(self.unread_count_display):
if self.type == "im":
w.buffer_set(self.channel_buffer, "hotlist", "2")
else:
w.buffer_set(self.channel_buffer, "hotlist", "1")
def formatted_name(self, style="default", typing=False, **kwargs):
if typing and config.channel_name_typing_indicator:
prepend = ">"
elif self.type == "group":
prepend = config.group_name_prefix
else:
prepend = "#"
select = {
"default": prepend + self.slack_name,
"sidebar": prepend + self.slack_name,
"base": self.slack_name,
"long_default": "{}.{}{}".format(self.team.preferred_name, prepend, self.slack_name),
"long_base": "{}.{}".format(self.team.preferred_name, self.slack_name),
}
return select[style]
def render_topic(self):
if self.channel_buffer:
if self.topic != "":
topic = self.topic
else:
topic = self.slack_purpose['value']
w.buffer_set(self.channel_buffer, "title", topic)
def set_topic(self, value):
self.topic = value
self.render_topic()
def update_from_message_json(self, message_json):
for key, value in message_json.items():
setattr(self, key, value)
def open(self, update_remote=True):
if update_remote:
if "join" in SLACK_API_TRANSLATOR[self.type]:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["join"], {"channel": self.identifier}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
self.create_buffer()
self.active = True
self.get_history()
# self.create_buffer()
def check_should_open(self, force=False):
if hasattr(self, "is_archived") and self.is_archived:
return
if force:
self.create_buffer()
return
# Only check is_member if is_open is not set, because in some cases
# (e.g. group DMs), is_member should be ignored in favor of is_open.
is_open = self.is_open if hasattr(self, "is_open") else self.is_member
if is_open or self.unread_count_display:
self.create_buffer()
if config.background_load_all_history:
self.get_history(slow_queue=True)
def set_related_server(self, team):
self.team = team
def set_highlights(self):
# highlight my own name and any set highlights
if self.channel_buffer:
highlights = self.team.highlight_words.union({'@' + self.team.nick, self.team.myidentifier, "!here", "!channel", "!everyone"})
h_str = ",".join(highlights)
w.buffer_set(self.channel_buffer, "highlight_words", h_str)
def create_buffer(self):
"""
incomplete (muted doesn't work)
Creates the weechat buffer where the channel magic happens.
"""
if not self.channel_buffer:
self.active = True
self.channel_buffer = w.buffer_new(self.formatted_name(style="long_default"), "buffer_input_callback", "EVENTROUTER", "", "")
self.eventrouter.weechat_controller.register_buffer(self.channel_buffer, self)
if self.type == "im":
w.buffer_set(self.channel_buffer, "localvar_set_type", 'private')
else:
w.buffer_set(self.channel_buffer, "localvar_set_type", 'channel')
w.buffer_set(self.channel_buffer, "localvar_set_channel", self.formatted_name())
w.buffer_set(self.channel_buffer, "localvar_set_nick", self.team.nick)
w.buffer_set(self.channel_buffer, "short_name", self.formatted_name(style="sidebar", enable_color=True))
self.render_topic()
self.eventrouter.weechat_controller.set_refresh_buffer_list(True)
if self.channel_buffer:
# if self.team.server_alias:
# w.buffer_set(self.channel_buffer, "localvar_set_server", self.team.server_alias)
# else:
w.buffer_set(self.channel_buffer, "localvar_set_server", self.team.preferred_name)
# else:
# self.eventrouter.weechat_controller.register_buffer(self.channel_buffer, self)
self.update_nicklist()
if "info" in SLACK_API_TRANSLATOR[self.type]:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["info"], {"channel": self.identifier}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
if self.type == "im":
if "join" in SLACK_API_TRANSLATOR[self.type]:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["join"], {"users": self.user, "return_im": True}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
def destroy_buffer(self, update_remote):
if self.channel_buffer is not None:
self.channel_buffer = None
self.messages = OrderedDict()
self.hashed_messages = {}
self.got_history = False
# if update_remote and not eventrouter.shutting_down:
self.active = False
if update_remote and not self.eventrouter.shutting_down:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["leave"], {"channel": self.identifier}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
def buffer_prnt(self, nick, text, timestamp=str(time.time()), tagset=None, tag_nick=None, **kwargs):
data = "{}\t{}".format(format_nick(nick), text)
ts = SlackTS(timestamp)
last_read = SlackTS(self.last_read)
# without this, DMs won't open automatically
if not self.channel_buffer and ts > last_read:
self.open(update_remote=False)
if self.channel_buffer:
# backlog messages - we will update the read marker as we print these
backlog = True if ts <= last_read else False
if tagset:
tags = tag(tagset, user=tag_nick)
self.new_messages = True
# we have to infer the tagset because we weren't told
elif ts <= last_read:
tags = tag("backlog", user=tag_nick)
elif self.type in ["im", "mpdm"]:
if tag_nick != self.team.nick:
tags = tag("dm", user=tag_nick)
self.new_messages = True
else:
tags = tag("dmfromme")
else:
tags = tag("default", user=tag_nick)
self.new_messages = True
try:
if config.unhide_buffers_with_activity and not self.is_visible() and (self.identifier not in self.team.muted_channels):
w.buffer_set(self.channel_buffer, "hidden", "0")
w.prnt_date_tags(self.channel_buffer, ts.major, tags, data)
modify_print_time(self.channel_buffer, ts.minorstr(), ts.major)
if backlog:
self.mark_read(ts, update_remote=False, force=True)
except:
dbg("Problem processing buffer_prnt")
def send_message(self, message, request_dict_ext={}):
# team = self.eventrouter.teams[self.team]
message = linkify_text(message, self.team, self)
dbg(message)
request = {"type": "message", "channel": self.identifier, "text": message, "_team": self.team.team_hash, "user": self.team.myidentifier}
request.update(request_dict_ext)
self.team.send_to_websocket(request)
self.mark_read(update_remote=False, force=True)
def store_message(self, message, team, from_me=False):
if not self.active:
return
if from_me:
message.message_json["user"] = team.myidentifier
self.messages[SlackTS(message.ts)] = message
sorted_messages = sorted(self.messages.items())
messages_to_delete = sorted_messages[:-SCROLLBACK_SIZE]
messages_to_keep = sorted_messages[-SCROLLBACK_SIZE:]
for message_hash in [m[1].hash for m in messages_to_delete]:
if message_hash in self.hashed_messages:
del self.hashed_messages[message_hash]
self.messages = OrderedDict(messages_to_keep)
def change_message(self, ts, text=None, suffix=None):
ts = SlackTS(ts)
if ts in self.messages:
m = self.messages[ts]
if text:
m.change_text(text)
if suffix:
m.change_suffix(suffix)
text = m.render(force=True)
modify_buffer_line(self.channel_buffer, text, ts.major, ts.minor)
return True
def edit_nth_previous_message(self, n, old, new, flags):
message = self.my_last_message(n)
if new == "" and old == "":
s = SlackRequest(self.team.token, "chat.delete", {"channel": self.identifier, "ts": message['ts']}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
else:
num_replace = 1
if 'g' in flags:
num_replace = 0
new_message = re.sub(old, new, message["text"], num_replace)
if new_message != message["text"]:
s = SlackRequest(self.team.token, "chat.update", {"channel": self.identifier, "ts": message['ts'], "text": new_message}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
def my_last_message(self, msgno):
for key in self.main_message_keys_reversed():
m = self.messages[key]
if "user" in m.message_json and "text" in m.message_json and m.message_json["user"] == self.team.myidentifier:
msgno -= 1
if msgno == 0:
return m.message_json
def is_visible(self):
return w.buffer_get_integer(self.channel_buffer, "hidden") == 0
def get_history(self, slow_queue=False):
if not self.got_history:
# we have probably reconnected. flush the buffer
if self.team.connected:
w.buffer_clear(self.channel_buffer)
self.buffer_prnt('', 'getting channel history...', tagset='backlog')
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["history"], {"channel": self.identifier, "count": BACKLOG_SIZE}, team_hash=self.team.team_hash, channel_identifier=self.identifier, clear=True)
if not slow_queue:
self.eventrouter.receive(s)
else:
self.eventrouter.receive_slow(s)
self.got_history = True
def send_add_reaction(self, msg_number, reaction):
self.send_change_reaction("reactions.add", msg_number, reaction)
def send_remove_reaction(self, msg_number, reaction):
self.send_change_reaction("reactions.remove", msg_number, reaction)
def send_change_reaction(self, method, msg_number, reaction):
if 0 < msg_number < len(self.messages):
keys = self.main_message_keys_reversed()
timestamp = next(islice(keys, msg_number - 1, None))
data = {"channel": self.identifier, "timestamp": timestamp, "name": reaction}
s = SlackRequest(self.team.token, method, data)
self.eventrouter.receive(s)
def main_message_keys_reversed(self):
return (key for key in reversed(self.messages)
if type(self.messages[key]) == SlackMessage)
# Typing related
def set_typing(self, user):
if self.channel_buffer and self.is_visible():
self.typing[user] = time.time()
self.eventrouter.weechat_controller.set_refresh_buffer_list(True)
def unset_typing(self, user):
if self.channel_buffer and self.is_visible():
u = self.typing.get(user, None)
if u:
self.eventrouter.weechat_controller.set_refresh_buffer_list(True)
def is_someone_typing(self):
"""
Walks through dict of typing folks in a channel and fast
returns if any of them is actively typing. If none are,
nulls the dict and returns false.
"""
for user, timestamp in self.typing.iteritems():
if timestamp + 4 > time.time():
return True
if len(self.typing) > 0:
self.typing = {}
self.eventrouter.weechat_controller.set_refresh_buffer_list(True)
return False
def get_typing_list(self):
"""
Returns the names of everyone in the channel who is currently typing.
"""
typing = []
for user, timestamp in self.typing.iteritems():
if timestamp + 4 > time.time():
typing.append(user)
else:
del self.typing[user]
return typing
def mark_read(self, ts=None, update_remote=True, force=False):
if not ts:
ts = next(self.main_message_keys_reversed(), SlackTS())
if self.new_messages or force:
if self.channel_buffer:
w.buffer_set(self.channel_buffer, "unread", "")
w.buffer_set(self.channel_buffer, "hotlist", "-1")
if update_remote:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["mark"], {"channel": self.identifier, "ts": ts}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
self.new_messages = False
def user_joined(self, user_id):
# ugly hack - for some reason this gets turned into a list
self.members = set(self.members)
self.members.add(user_id)
self.update_nicklist(user_id)
def user_left(self, user_id):
self.members.discard(user_id)
self.update_nicklist(user_id)
def update_nicklist(self, user=None):
if not self.channel_buffer:
return
if self.type not in ["channel", "group", "mpim"]:
return
w.buffer_set(self.channel_buffer, "nicklist", "1")
# create nicklists for the current channel if they don't exist
# if they do, use the existing pointer
here = w.nicklist_search_group(self.channel_buffer, '', NICK_GROUP_HERE)
if not here:
here = w.nicklist_add_group(self.channel_buffer, '', NICK_GROUP_HERE, "weechat.color.nicklist_group", 1)
afk = w.nicklist_search_group(self.channel_buffer, '', NICK_GROUP_AWAY)
if not afk:
afk = w.nicklist_add_group(self.channel_buffer, '', NICK_GROUP_AWAY, "weechat.color.nicklist_group", 1)
if user and len(self.members) < 1000:
user = self.team.users[user]
if user.deleted:
return
nick = w.nicklist_search_nick(self.channel_buffer, "", user.name)
# since this is a change just remove it regardless of where it is
w.nicklist_remove_nick(self.channel_buffer, nick)
# now add it back in to whichever..
nick_group = afk
if self.team.is_user_present(user.identifier):
nick_group = here
if user.identifier in self.members:
w.nicklist_add_nick(self.channel_buffer, nick_group, user.name, user.color_name, "", "", 1)
# if we didn't get a user, build a complete list. this is expensive.
else:
if len(self.members) < 1000:
try:
for user in self.members:
user = self.team.users[user]
if user.deleted:
continue
nick_group = afk
if self.team.is_user_present(user.identifier):
nick_group = here
w.nicklist_add_nick(self.channel_buffer, nick_group, user.name, user.color_name, "", "", 1)
except Exception as e:
dbg("DEBUG: {} {} {}".format(self.identifier, self.name, decode_from_utf8(e)))
else:
w.nicklist_remove_all(self.channel_buffer)
for fn in ["1| too", "2| many", "3| users", "4| to", "5| show"]:
w.nicklist_add_group(self.channel_buffer, '', fn, w.color('white'), 1)
def hash_message(self, ts):
ts = SlackTS(ts)
def calc_hash(msg):
return sha.sha(str(msg.ts)).hexdigest()
if ts in self.messages and not self.messages[ts].hash:
message = self.messages[ts]
tshash = calc_hash(message)
hl = 3
shorthash = tshash[:hl]
while any(x.startswith(shorthash) for x in self.hashed_messages):
hl += 1
shorthash = tshash[:hl]
if shorthash[:-1] in self.hashed_messages:
col_msg = self.hashed_messages.pop(shorthash[:-1])
col_new_hash = calc_hash(col_msg)[:hl]
col_msg.hash = col_new_hash
self.hashed_messages[col_new_hash] = col_msg
self.change_message(str(col_msg.ts))
if col_msg.thread_channel:
col_msg.thread_channel.rename()
self.hashed_messages[shorthash] = message
message.hash = shorthash
class SlackDMChannel(SlackChannel):
"""
Subclass of a normal channel for person-to-person communication, which
has some important differences.
"""
def __init__(self, eventrouter, users, **kwargs):
dmuser = kwargs["user"]
kwargs["name"] = users[dmuser].name
super(SlackDMChannel, self).__init__(eventrouter, **kwargs)
self.type = 'im'
self.update_color()
self.set_name(self.slack_name)
self.topic = create_user_status_string(users[dmuser].profile)
def set_name(self, slack_name):
self.name = slack_name
def get_members(self):
return {self.user}
def create_buffer(self):
if not self.channel_buffer:
super(SlackDMChannel, self).create_buffer()
w.buffer_set(self.channel_buffer, "localvar_set_type", 'private')
def update_color(self):
if config.colorize_private_chats:
self.color_name = get_nick_color_name(self.name)
self.color = w.color(self.color_name)
else:
self.color = ""
self.color_name = ""
def formatted_name(self, style="default", typing=False, present=True, enable_color=False, **kwargs):
if config.colorize_private_chats and enable_color:
print_color = self.color
else:
print_color = ""
if not present:
prepend = " "
else:
prepend = "+"
select = {
"default": self.slack_name,
"sidebar": prepend + self.slack_name,
"base": self.slack_name,
"long_default": "{}.{}".format(self.team.preferred_name, self.slack_name),
"long_base": "{}.{}".format(self.team.preferred_name, self.slack_name),
}
return print_color + select[style]
def open(self, update_remote=True):
self.create_buffer()
# self.active = True
self.get_history()
if "info" in SLACK_API_TRANSLATOR[self.type]:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["info"], {"name": self.identifier}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
if update_remote:
if "join" in SLACK_API_TRANSLATOR[self.type]:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["join"], {"users": self.user, "return_im": True}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
self.create_buffer()
def rename(self):
if self.channel_buffer:
new_name = self.formatted_name(style="sidebar", present=self.team.is_user_present(self.user), enable_color=config.colorize_private_chats)
if self.current_short_name != new_name:
self.current_short_name = new_name
w.buffer_set(self.channel_buffer, "short_name", new_name)
return True
return False
def refresh(self):
return self.rename()
class SlackGroupChannel(SlackChannel):
"""
A group channel is a private discussion group.
"""
def __init__(self, eventrouter, **kwargs):
super(SlackGroupChannel, self).__init__(eventrouter, **kwargs)
self.type = "group"
self.set_name(self.slack_name)
def set_name(self, slack_name):
self.name = config.group_name_prefix + slack_name
# def formatted_name(self, prepend="#", enable_color=True, basic=False):
# return prepend + self.slack_name
class SlackMPDMChannel(SlackChannel):
"""
An MPDM channel is a special instance of a 'group' channel.
We change the name to look less terrible in weechat.
"""
def __init__(self, eventrouter, **kwargs):
super(SlackMPDMChannel, self).__init__(eventrouter, **kwargs)
n = kwargs.get('name')
self.set_name(n)
self.type = "mpim"
def open(self, update_remote=True):
self.create_buffer()
self.active = True
self.get_history()
if "info" in SLACK_API_TRANSLATOR[self.type]:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["info"], {"channel": self.identifier}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
if update_remote and 'join' in SLACK_API_TRANSLATOR[self.type]:
s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]['join'], {'users': ','.join(self.members)}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
self.eventrouter.receive(s)
# self.create_buffer()
@staticmethod
def adjust_name(n):
return "|".join("-".join(n.split("-")[1:-1]).split("--"))
def set_name(self, n):
self.name = self.adjust_name(n)
def formatted_name(self, style="default", typing=False, **kwargs):
adjusted_name = self.adjust_name(self.slack_name)
if typing and config.channel_name_typing_indicator:
prepend = ">"
else:
prepend = "@"
select = {
"default": adjusted_name,
"sidebar": prepend + adjusted_name,
"base": adjusted_name,
"long_default": "{}.{}".format(self.team.preferred_name, adjusted_name),
"long_base": "{}.{}".format(self.team.preferred_name, adjusted_name),
}
return select[style]
def rename(self):
pass
class SlackThreadChannel(object):
"""
A thread channel is a virtual channel. We don't inherit from
SlackChannel, because most of how it operates will be different.
"""
def __init__(self, eventrouter, parent_message):
self.eventrouter = eventrouter
self.parent_message = parent_message
self.channel_buffer = None
# self.identifier = ""
# self.name = "#" + kwargs['name']
self.type = "thread"
self.got_history = False
self.label = None
self.members = self.parent_message.channel.members
self.team = self.parent_message.team
# self.set_name(self.slack_name)
# def set_name(self, slack_name):
# self.name = "#" + slack_name
def formatted_name(self, style="default", **kwargs):
hash_or_ts = self.parent_message.hash or self.parent_message.ts
styles = {
"default": " +{}".format(hash_or_ts),
"long_default": "{}.{}".format(self.parent_message.channel.formatted_name(style="long_default"), hash_or_ts),
"sidebar": " +{}".format(hash_or_ts),
}
return styles[style]
def refresh(self):
self.rename()
def mark_read(self, ts=None, update_remote=True, force=False):
if self.channel_buffer:
w.buffer_set(self.channel_buffer, "unread", "")
w.buffer_set(self.channel_buffer, "hotlist", "-1")
def buffer_prnt(self, nick, text, timestamp, **kwargs):
data = "{}\t{}".format(format_nick(nick), text)
ts = SlackTS(timestamp)
if self.channel_buffer:
# backlog messages - we will update the read marker as we print these
# backlog = False
# if ts <= SlackTS(self.last_read):
# tags = tag("backlog")
# backlog = True
# elif self.type in ["im", "mpdm"]:
# tags = tag("dm")
# self.new_messages = True
# else:
tags = tag("default")
# self.new_messages = True
w.prnt_date_tags(self.channel_buffer, ts.major, tags, data)
modify_print_time(self.channel_buffer, ts.minorstr(), ts.major)
# if backlog:
# self.mark_read(ts, update_remote=False, force=True)
def get_history(self):
self.got_history = True
for message in self.parent_message.submessages:
# message = SlackMessage(message_json, team, channel)
text = message.render()
# print text
suffix = ''
if 'edited' in message.message_json:
suffix = ' (edited)'
# try:
# channel.unread_count += 1
# except:
# channel.unread_count = 1
self.buffer_prnt(message.sender, text + suffix, message.ts)
def send_message(self, message):
# team = self.eventrouter.teams[self.team]
message = linkify_text(message, self.team, self)
dbg(message)
request = {"type": "message", "channel": self.parent_message.channel.identifier, "text": message, "_team": self.team.team_hash, "user": self.team.myidentifier, "thread_ts": str(self.parent_message.ts)}
self.team.send_to_websocket(request)
self.mark_read(update_remote=False, force=True)
def open(self, update_remote=True):
self.create_buffer()
self.active = True
self.get_history()
# if "info" in SLACK_API_TRANSLATOR[self.type]:
# s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["info"], {"name": self.identifier}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
# self.eventrouter.receive(s)
# if update_remote:
# if "join" in SLACK_API_TRANSLATOR[self.type]:
# s = SlackRequest(self.team.token, SLACK_API_TRANSLATOR[self.type]["join"], {"name": self.name}, team_hash=self.team.team_hash, channel_identifier=self.identifier)
# self.eventrouter.receive(s)
self.create_buffer()
def rename(self):
if self.channel_buffer and not self.label:
w.buffer_set(self.channel_buffer, "short_name", self.formatted_name(style="sidebar", enable_color=True))
def create_buffer(self):
"""
incomplete (muted doesn't work)
Creates the weechat buffer where the thread magic happens.
"""
if not self.channel_buffer:
self.channel_buffer = w.buffer_new(self.formatted_name(style="long_default"), "buffer_input_callback", "EVENTROUTER", "", "")
self.eventrouter.weechat_controller.register_buffer(self.channel_buffer, self)
w.buffer_set(self.channel_buffer, "localvar_set_type", 'channel')
w.buffer_set(self.channel_buffer, "localvar_set_nick", self.team.nick)
w.buffer_set(self.channel_buffer, "localvar_set_channel", self.formatted_name())
w.buffer_set(self.channel_buffer, "short_name", self.formatted_name(style="sidebar", enable_color=True))
time_format = w.config_string(w.config_get("weechat.look.buffer_time_format"))
parent_time = time.localtime(SlackTS(self.parent_message.ts).major)
topic = '{} {} | {}'.format(time.strftime(time_format, parent_time), self.parent_message.sender, self.parent_message.render() )
w.buffer_set(self.channel_buffer, "title", topic)
# self.eventrouter.weechat_controller.set_refresh_buffer_list(True)
# try:
# if self.unread_count != 0:
# for c in range(1, self.unread_count):
# if self.type == "im":
# w.buffer_set(self.channel_buffer, "hotlist", "2")
# else:
# w.buffer_set(self.channel_buffer, "hotlist", "1")
# else:
# pass
# #dbg("no unread in {}".format(self.name))
# except:
# pass
# dbg("exception no unread count")
# if self.unread_count != 0 and not self.muted:
# w.buffer_set(self.channel_buffer, "hotlist", "1")
def destroy_buffer(self, update_remote):
if self.channel_buffer is not None:
self.channel_buffer = None
self.got_history = False
# if update_remote and not eventrouter.shutting_down:
self.active = False
class SlackUser(object):
"""
Represends an individual slack user. Also where you set their name formatting.
"""
def __init__(self, **kwargs):
# We require these two things for a valid object,
# the rest we can just learn from slack
self.identifier = kwargs["id"]
self.profile = {} # in case it's not in kwargs
for key, value in kwargs.items():
setattr(self, key, value)
if self.profile.get("display_name"):
self.slack_name = self.profile["display_name"]
self.name = self.profile["display_name"].replace(' ', '')
else:
# No display name set. Fall back to the deprecated username field.
self.slack_name = kwargs["name"]
self.name = self.slack_name
self.update_color()
def __repr__(self):
return "Name:{} Identifier:{}".format(self.name, self.identifier)
def force_color(self, color_name):
self.color_name = color_name
self.color = w.color(self.color_name)
def update_color(self):
# This will automatically be none/"" if the user has disabled nick
# colourization.
self.color_name = get_nick_color_name(self.name)
self.color = w.color(self.color_name)
def update_status(self, status_emoji, status_text):
self.profile["status_emoji"] = status_emoji
self.profile["status_text"] = status_text
def formatted_name(self, prepend="", enable_color=True):
if enable_color:
return self.color + prepend + self.name
else:
return prepend + self.name
class SlackBot(SlackUser):
"""
Basically the same as a user, but split out to identify and for future
needs
"""
def __init__(self, **kwargs):
super(SlackBot, self).__init__(**kwargs)
class SlackMessage(object):
"""
Represents a single slack message and associated context/metadata.
These are modifiable and can be rerendered to change a message,
delete a message, add a reaction, add a thread.
Note: these can't be tied to a SlackUser object because users
can be deleted, so we have to store sender in each one.
"""
def __init__(self, message_json, team, channel, override_sender=None):
self.team = team
self.channel = channel
self.message_json = message_json
self.submessages = []
self.thread_channel = None
self.hash = None
if override_sender:
self.sender = override_sender
self.sender_plain = override_sender
else:
senders = self.get_sender()
self.sender, self.sender_plain = senders[0], senders[1]
self.suffix = ''
self.ts = SlackTS(message_json['ts'])
text = self.message_json.get('text')
if text and text.startswith('_') and text.endswith('_') and 'subtype' not in message_json:
message_json['text'] = text[1:-1]
message_json['subtype'] = 'me_message'
if message_json.get('subtype') == 'me_message' and not message_json['text'].startswith(self.sender):
message_json['text'] = self.sender + ' ' + self.message_json['text']
def __hash__(self):
return hash(self.ts)
def render(self, force=False):
if len(self.submessages) > 0:
return "{} {} {}".format(render(self.message_json, self.team, self.channel, force), self.suffix, "{}[ Thread: {} Replies: {} ]".format(w.color(config.thread_suffix_color), self.hash or self.ts, len(self.submessages)))
return "{} {}".format(render(self.message_json, self.team, self.channel, force), self.suffix)
def change_text(self, new_text):
self.message_json["text"] = new_text
dbg(self.message_json)
def change_suffix(self, new_suffix):
self.suffix = new_suffix
dbg(self.message_json)
def get_sender(self):
name = ""
name_plain = ""
if 'user' in self.message_json:
if self.message_json['user'] == self.team.myidentifier:
u = self.team.users[self.team.myidentifier]
elif self.message_json['user'] in self.team.users:
u = self.team.users[self.message_json['user']]
name = "{}".format(u.formatted_name())
name_plain = "{}".format(u.formatted_name(enable_color=False))
elif 'username' in self.message_json:
u = self.message_json["username"]
if self.message_json.get("subtype") == "bot_message":
name = "{} :]".format(u)
name_plain = "{}".format(u)
else:
name = "-{}-".format(u)
name_plain = "{}".format(u)
elif 'service_name' in self.message_json:
name = "-{}-".format(self.message_json["service_name"])
name_plain = "{}".format(self.message_json["service_name"])
elif self.message_json.get('bot_id') in self.team.bots:
name = "{} :]".format(self.team.bots[self.message_json["bot_id"]].formatted_name())
name_plain = "{}".format(self.team.bots[self.message_json["bot_id"]].formatted_name(enable_color=False))
else:
name = ""
name_plain = ""
return (name, name_plain)
def add_reaction(self, reaction, user):
m = self.message_json.get('reactions', None)
if m:
found = False
for r in m:
if r["name"] == reaction and user not in r["users"]:
r["users"].append(user)
found = True
if not found:
self.message_json["reactions"].append({"name": reaction, "users": [user]})
else:
self.message_json["reactions"] = [{"name": reaction, "users": [user]}]
def remove_reaction(self, reaction, user):
m = self.message_json.get('reactions', None)
if m:
for r in m:
if r["name"] == reaction and user in r["users"]:
r["users"].remove(user)
else:
pass
class SlackThreadMessage(SlackMessage):
def __init__(self, parent_id, *args):
super(SlackThreadMessage, self).__init__(*args)
self.parent_id = parent_id
class WeeSlackMetadata(object):
"""
A simple container that we pickle/unpickle to hold data.
"""
def __init__(self, meta):
self.meta = meta
def jsonify(self):
return self.meta
class SlackTS(object):
def __init__(self, ts=None):
if ts:
self.major, self.minor = [int(x) for x in ts.split('.', 1)]
else:
self.major = int(time.time())
self.minor = 0
def __cmp__(self, other):
if isinstance(other, SlackTS):
if self.major < other.major:
return -1
elif self.major > other.major:
return 1
elif self.major == other.major:
if self.minor < other.minor:
return -1
elif self.minor > other.minor:
return 1
else:
return 0
else:
s = self.__str__()
if s < other:
return -1
elif s > other:
return 1
elif s == other:
return 0
def __hash__(self):
return hash("{}.{}".format(self.major, self.minor))
def __repr__(self):
return str("{0}.{1:06d}".format(self.major, self.minor))
def split(self, *args, **kwargs):
return [self.major, self.minor]
def majorstr(self):
return str(self.major)
def minorstr(self):
return str(self.minor)
###### New handlers
def handle_rtmstart(login_data, eventrouter):
"""
This handles the main entry call to slack, rtm.start
"""
metadata = pickle.loads(login_data["wee_slack_request_metadata"])
if not login_data["ok"]:
w.prnt("", "ERROR: Failed connecting to Slack with token starting with {}: {}"
.format(metadata.token[:15], login_data["error"]))
return
# Let's reuse a team if we have it already.
th = SlackTeam.generate_team_hash(login_data['self']['name'], login_data['team']['domain'])
if not eventrouter.teams.get(th):
users = {}
for item in login_data["users"]:
users[item["id"]] = SlackUser(**item)
bots = {}
for item in login_data["bots"]:
bots[item["id"]] = SlackBot(**item)
channels = {}
for item in login_data["channels"]:
channels[item["id"]] = SlackChannel(eventrouter, **item)
for item in login_data["ims"]:
channels[item["id"]] = SlackDMChannel(eventrouter, users, **item)
for item in login_data["groups"]:
if item["name"].startswith('mpdm-'):
channels[item["id"]] = SlackMPDMChannel(eventrouter, **item)
else:
channels[item["id"]] = SlackGroupChannel(eventrouter, **item)
t = SlackTeam(
eventrouter,
metadata.token,
login_data['url'],
login_data["team"]["domain"],
login_data["self"]["name"],
login_data["self"]["id"],
users,
bots,
channels,
muted_channels=login_data["self"]["prefs"]["muted_channels"],
highlight_words=login_data["self"]["prefs"]["highlight_words"],
)
eventrouter.register_team(t)
else:
t = eventrouter.teams.get(th)
t.set_reconnect_url(login_data['url'])
t.connect()
t.buffer_prnt('Connected to Slack')
t.buffer_prnt('{:<20} {}'.format("Websocket URL", login_data["url"]))
t.buffer_prnt('{:<20} {}'.format("User name", login_data["self"]["name"]))
t.buffer_prnt('{:<20} {}'.format("User ID", login_data["self"]["id"]))
t.buffer_prnt('{:<20} {}'.format("Team name", login_data["team"]["name"]))
t.buffer_prnt('{:<20} {}'.format("Team domain", login_data["team"]["domain"]))
t.buffer_prnt('{:<20} {}'.format("Team id", login_data["team"]["id"]))
dbg("connected to {}".format(t.domain))
def handle_emojilist(emoji_json, eventrouter, **kwargs):
if emoji_json["ok"]:
request_metadata = pickle.loads(emoji_json["wee_slack_request_metadata"])
team = eventrouter.teams[request_metadata.team_hash]
team.emoji_completions.extend(emoji_json["emoji"].keys())
def handle_channelsinfo(channel_json, eventrouter, **kwargs):
request_metadata = pickle.loads(channel_json["wee_slack_request_metadata"])
team = eventrouter.teams[request_metadata.team_hash]
channel = team.channels[request_metadata.channel_identifier]
channel.set_unread_count_display(channel_json['channel']['unread_count_display'])
channel.set_members(channel_json['channel']['members'])
def handle_groupsinfo(group_json, eventrouter, **kwargs):
request_metadata = pickle.loads(group_json["wee_slack_request_metadata"])
team = eventrouter.teams[request_metadata.team_hash]
group = team.channels[request_metadata.channel_identifier]
unread_count_display = group_json['group']['unread_count_display']
group_id = group_json['group']['id']
group.set_unread_count_display(unread_count_display)
def handle_conversationsopen(conversation_json, eventrouter, object_name='channel', **kwargs):
request_metadata = pickle.loads(conversation_json["wee_slack_request_metadata"])
# Set unread count if the channel isn't new (channel_identifier exists)
if hasattr(request_metadata, 'channel_identifier'):
channel_id = request_metadata.channel_identifier
team = eventrouter.teams[request_metadata.team_hash]
conversation = team.channels[channel_id]
unread_count_display = conversation_json[object_name]['unread_count_display']
conversation.set_unread_count_display(unread_count_display)
def handle_mpimopen(mpim_json, eventrouter, object_name='group', **kwargs):
handle_conversationsopen(mpim_json, eventrouter, object_name, **kwargs)
def handle_groupshistory(message_json, eventrouter, **kwargs):
handle_history(message_json, eventrouter, **kwargs)
def handle_channelshistory(message_json, eventrouter, **kwargs):
handle_history(message_json, eventrouter, **kwargs)
def handle_imhistory(message_json, eventrouter, **kwargs):
handle_history(message_json, eventrouter, **kwargs)
def handle_mpimhistory(message_json, eventrouter, **kwargs):
handle_history(message_json, eventrouter, **kwargs)
def handle_history(message_json, eventrouter, **kwargs):
request_metadata = pickle.loads(message_json["wee_slack_request_metadata"])
kwargs['team'] = eventrouter.teams[request_metadata.team_hash]
kwargs['channel'] = kwargs['team'].channels[request_metadata.channel_identifier]
try:
clear = request_metadata.clear
except:
clear = False
dbg(clear)
kwargs['output_type'] = "backlog"
if clear:
w.buffer_clear(kwargs['channel'].channel_buffer)
for message in reversed(message_json["messages"]):
process_message(message, eventrouter, **kwargs)
###### New/converted process_ and subprocess_ methods
def process_hello(message_json, eventrouter, **kwargs):
kwargs['team'].subscribe_users_presence()
def process_reconnect_url(message_json, eventrouter, **kwargs):
kwargs['team'].set_reconnect_url(message_json['url'])
def process_manual_presence_change(message_json, eventrouter, **kwargs):
process_presence_change(message_json, eventrouter, **kwargs)
def process_presence_change(message_json, eventrouter, **kwargs):
if "user" in kwargs:
# TODO: remove once it's stable
user = kwargs["user"]
team = kwargs["team"]
team.update_member_presence(user, message_json["presence"])
if "users" in message_json:
team = kwargs["team"]
for user_id in message_json["users"]:
user = team.users[user_id]
team.update_member_presence(user, message_json["presence"])
def process_pref_change(message_json, eventrouter, **kwargs):
team = kwargs["team"]
if message_json['name'] == 'muted_channels':
team.set_muted_channels(message_json['value'])
elif message_json['name'] == 'highlight_words':
team.set_highlight_words(message_json['value'])
else:
dbg("Preference change not implemented: {}\n".format(message_json['name']))
def process_user_change(message_json, eventrouter, **kwargs):
"""
Currently only used to update status, but lots here we could do.
"""
user = message_json['user']
profile = user.get("profile")
team = kwargs["team"]
team.users[user["id"]].update_status(profile.get("status_emoji"), profile.get("status_text"))
dmchannel = team.find_channel_by_members({user["id"]}, channel_type='im')
if dmchannel:
dmchannel.set_topic(create_user_status_string(profile))
def process_user_typing(message_json, eventrouter, **kwargs):
channel = kwargs["channel"]
team = kwargs["team"]
if channel:
channel.set_typing(team.users.get(message_json["user"]).name)
w.bar_item_update("slack_typing_notice")
def process_team_join(message_json, eventrouter, **kwargs):
user = message_json['user']
team = kwargs["team"]
team.users[user["id"]] = SlackUser(**user)
def process_pong(message_json, eventrouter, **kwargs):
pass
def process_message(message_json, eventrouter, store=True, **kwargs):
channel = kwargs["channel"]
team = kwargs["team"]
# try:
# send these subtype messages elsewhere
known_subtypes = [
'thread_message',
'message_replied',
'message_changed',
'message_deleted',
'channel_join',
'channel_leave',
'channel_topic',
# 'group_join',
# 'group_leave',
]
if "thread_ts" in message_json and "reply_count" not in message_json:
message_json["subtype"] = "thread_message"
subtype = message_json.get("subtype", None)
if subtype and subtype in known_subtypes:
f = eval('subprocess_' + subtype)
f(message_json, eventrouter, channel, team)
else:
message = SlackMessage(message_json, team, channel)
text = message.render()
dbg("Rendered message: %s" % text)
dbg("Sender: %s (%s)" % (message.sender, message.sender_plain))
# Handle actions (/me).
# We don't use `subtype` here because creating the SlackMessage may
# have changed the subtype based on the detected message contents.
if message.message_json.get('subtype') == 'me_message':
try:
channel.unread_count_display += 1
except:
channel.unread_count_display = 1
channel.buffer_prnt(w.prefix("action").rstrip(), text, message.ts, tag_nick=message.sender_plain, **kwargs)
else:
suffix = ''
if 'edited' in message_json:
suffix = ' (edited)'
try:
channel.unread_count_display += 1
except:
channel.unread_count_display = 1
channel.buffer_prnt(message.sender, text + suffix, message.ts, tag_nick=message.sender_plain, **kwargs)
if store:
channel.store_message(message, team)
dbg("NORMAL REPLY {}".format(message_json))
# except:
# channel.buffer_prnt("WEE-SLACK-ERROR", json.dumps(message_json), message_json["ts"], **kwargs)
# traceback.print_exc()
def subprocess_thread_message(message_json, eventrouter, channel, team):
# print ("THREADED: " + str(message_json))
parent_ts = message_json.get('thread_ts', None)
if parent_ts:
parent_message = channel.messages.get(SlackTS(parent_ts), None)
if parent_message:
message = SlackThreadMessage(parent_ts, message_json, team, channel)
parent_message.submessages.append(message)
channel.hash_message(parent_ts)
channel.store_message(message, team)
channel.change_message(parent_ts)
text = message.render()
# channel.buffer_prnt(message.sender, text, message.ts, **kwargs)
if parent_message.thread_channel:
parent_message.thread_channel.buffer_prnt(message.sender, text, message.ts)
# channel = channels.find(message_json["channel"])
# server = channel.server
# #threadinfo = channel.get_message(message_json["thread_ts"])
# message = Message(message_json, server=server, channel=channel)
# dbg(message, main_buffer=True)
#
# orig = channel.get_message(message_json['thread_ts'])
# if orig[0]:
# channel.get_message(message_json['thread_ts'])[2].add_thread_message(message)
# else:
# dbg("COULDN'T find orig message {}".format(message_json['thread_ts']), main_buffer=True)
# if threadinfo[0]:
# channel.messages[threadinfo[1]].become_thread()
# message_json["item"]["ts"], message_json)
# channel.change_message(message_json["thread_ts"], None, message_json["text"])
# channel.become_thread(message_json["item"]["ts"], message_json)
def subprocess_channel_join(message_json, eventrouter, channel, team):
joinprefix = w.prefix("join")
message = SlackMessage(message_json, team, channel, override_sender=joinprefix)
channel.buffer_prnt(joinprefix, message.render(), message_json["ts"], tagset='joinleave')
channel.user_joined(message_json['user'])
def subprocess_channel_leave(message_json, eventrouter, channel, team):
leaveprefix = w.prefix("quit")
message = SlackMessage(message_json, team, channel, override_sender=leaveprefix)
channel.buffer_prnt(leaveprefix, message.render(), message_json["ts"], tagset='joinleave')
channel.user_left(message_json['user'])
# channel.update_nicklist(message_json['user'])
# channel.update_nicklist()
def subprocess_message_replied(message_json, eventrouter, channel, team):
pass
def subprocess_message_changed(message_json, eventrouter, channel, team):
m = message_json.get("message", None)
if m:
new_message = m
# message = SlackMessage(new_message, team, channel)
if "attachments" in m:
message_json["attachments"] = m["attachments"]
if "text" in m:
if "text" in message_json:
message_json["text"] += m["text"]
dbg("added text!")
else:
message_json["text"] = m["text"]
if "fallback" in m:
if "fallback" in message_json:
message_json["fallback"] += m["fallback"]
else:
message_json["fallback"] = m["fallback"]
new_message["text"] += unwrap_attachments(message_json, new_message["text"])
if "edited" in new_message:
channel.change_message(new_message["ts"], new_message["text"], ' (edited)')
else:
channel.change_message(new_message["ts"], new_message["text"])
def subprocess_message_deleted(message_json, eventrouter, channel, team):
channel.change_message(message_json["deleted_ts"], "(deleted)", '')
def subprocess_channel_topic(message_json, eventrouter, channel, team):
text = unhtmlescape(unfurl_refs(message_json["text"], ignore_alt_text=False))
channel.buffer_prnt(w.prefix("network").rstrip(), text, message_json["ts"], tagset="muted")
channel.set_topic(unhtmlescape(message_json["topic"]))
def process_reply(message_json, eventrouter, **kwargs):
dbg('processing reply')
team = kwargs["team"]
identifier = message_json["reply_to"]
try:
original_message_json = team.ws_replies[identifier]
del team.ws_replies[identifier]
if "ts" in message_json:
original_message_json["ts"] = message_json["ts"]
else:
dbg("no reply ts {}".format(message_json))
c = original_message_json.get('channel', None)
channel = team.channels[c]
m = SlackMessage(original_message_json, team, channel)
# if "type" in message_json:
# if message_json["type"] == "message" and "channel" in message_json.keys():
# message_json["ts"] = message_json["ts"]
# channels.find(message_json["channel"]).store_message(m, from_me=True)
# channels.find(message_json["channel"]).buffer_prnt(server.nick, m.render(), m.ts)
process_message(m.message_json, eventrouter, channel=channel, team=team)
channel.mark_read(update_remote=True, force=True)
dbg("REPLY {}".format(message_json))
except KeyError:
dbg("Unexpected reply {}".format(message_json))
def process_channel_marked(message_json, eventrouter, **kwargs):
"""
complete
"""
channel = kwargs["channel"]
ts = message_json.get("ts", None)
if ts:
channel.mark_read(ts=ts, force=True, update_remote=False)
else:
dbg("tried to mark something weird {}".format(message_json))
def process_group_marked(message_json, eventrouter, **kwargs):
process_channel_marked(message_json, eventrouter, **kwargs)
def process_im_marked(message_json, eventrouter, **kwargs):
process_channel_marked(message_json, eventrouter, **kwargs)
def process_mpim_marked(message_json, eventrouter, **kwargs):
process_channel_marked(message_json, eventrouter, **kwargs)
def process_channel_joined(message_json, eventrouter, **kwargs):
item = message_json["channel"]
kwargs['team'].channels[item["id"]].update_from_message_json(item)
kwargs['team'].channels[item["id"]].open()
def process_channel_created(message_json, eventrouter, **kwargs):
item = message_json["channel"]
c = SlackChannel(eventrouter, team=kwargs["team"], **item)
kwargs['team'].channels[item["id"]] = c
kwargs['team'].buffer_prnt('Channel created: {}'.format(c.slack_name))
def process_channel_rename(message_json, eventrouter, **kwargs):
item = message_json["channel"]
channel = kwargs['team'].channels[item["id"]]
channel.slack_name = message_json['channel']['name']
def process_im_created(message_json, eventrouter, **kwargs):
team = kwargs['team']
item = message_json["channel"]
c = SlackDMChannel(eventrouter, team=team, users=team.users, **item)
team.channels[item["id"]] = c
kwargs['team'].buffer_prnt('IM channel created: {}'.format(c.name))
def process_im_open(message_json, eventrouter, **kwargs):
channel = kwargs['channel']
item = message_json
kwargs['team'].channels[item["channel"]].check_should_open(True)
w.buffer_set(channel.channel_buffer, "hotlist", "2")
def process_im_close(message_json, eventrouter, **kwargs):
item = message_json
cbuf = kwargs['team'].channels[item["channel"]].channel_buffer
eventrouter.weechat_controller.unregister_buffer(cbuf, False, True)
def process_group_joined(message_json, eventrouter, **kwargs):
item = message_json["channel"]
if item["name"].startswith("mpdm-"):
c = SlackMPDMChannel(eventrouter, team=kwargs["team"], **item)
else:
c = SlackGroupChannel(eventrouter, team=kwargs["team"], **item)
kwargs['team'].channels[item["id"]] = c
kwargs['team'].channels[item["id"]].open()
def process_reaction_added(message_json, eventrouter, **kwargs):
channel = kwargs['team'].channels.get(message_json["item"].get("channel"))
if message_json["item"].get("type") == "message":
ts = SlackTS(message_json['item']["ts"])
message = channel.messages.get(ts, None)
if message:
message.add_reaction(message_json["reaction"], message_json["user"])
channel.change_message(ts)
else:
dbg("reaction to item type not supported: " + str(message_json))
def process_reaction_removed(message_json, eventrouter, **kwargs):
channel = kwargs['team'].channels.get(message_json["item"].get("channel"))
if message_json["item"].get("type") == "message":
ts = SlackTS(message_json['item']["ts"])
message = channel.messages.get(ts, None)
if message:
message.remove_reaction(message_json["reaction"], message_json["user"])
channel.change_message(ts)
else:
dbg("Reaction to item type not supported: " + str(message_json))
def process_emoji_changed(message_json, eventrouter, **kwargs):
team = kwargs['team']
team.load_emoji_completions()
###### New module/global methods
def render_formatting(text):
text = re.sub(r'(^| )\*([^*]+)\*([^a-zA-Z0-9_]|$)',
r'\1{}\2{}\3'.format(w.color(config.render_bold_as),
w.color('-' + config.render_bold_as)),
text)
text = re.sub(r'(^| )_([^_]+)_([^a-zA-Z0-9_]|$)',
r'\1{}\2{}\3'.format(w.color(config.render_italic_as),
w.color('-' + config.render_italic_as)),
text)
return text
def render(message_json, team, channel, force=False):
# If we already have a rendered version in the object, just return that.
if not force and message_json.get("_rendered_text", ""):
return message_json["_rendered_text"]
else:
# server = servers.find(message_json["_server"])
if "fallback" in message_json:
text = message_json["fallback"]
elif "text" in message_json:
if message_json['text'] is not None:
text = message_json["text"]
else:
text = ""
else:
text = ""
text = unfurl_refs(text)
text += unfurl_refs(unwrap_attachments(message_json, text))
text = text.lstrip()
text = unhtmlescape(text.replace("\t", " "))
if message_json.get('mrkdwn', True):
text = render_formatting(text)
# if self.threads:
# text += " [Replies: {} Thread ID: {} ] ".format(len(self.threads), self.thread_id)
# #for thread in self.threads:
text += create_reaction_string(message_json.get("reactions", ""))
message_json["_rendered_text"] = text
return text
def linkify_text(message, team, channel):
# The get_username_map function is a bit heavy, but this whole
# function is only called on message send..
usernames = team.get_username_map()
channels = team.get_channel_map()
message = (message
# Replace IRC formatting chars with Slack formatting chars.
.replace('\x02', '*')
.replace('\x1D', '_')
.replace('\x1F', config.map_underline_to)
# Escape chars that have special meaning to Slack. Note that we do not
# (and should not) perform full HTML entity-encoding here.
# See https://api.slack.com/docs/message-formatting for details.
.replace('&', '&')
.replace('<', '<')
.replace('>', '>')
.split(' '))
for item in enumerate(message):
targets = re.match('^\s*([@#])([\w.-]+[\w. -])(\W*)', item[1])
if targets and targets.groups()[0] == '@':
named = targets.groups()
if named[1] in ["group", "channel", "here"]:
message[item[0]] = "<!{}>".format(named[1])
else:
try:
if usernames[named[1]]:
message[item[0]] = "<@{}>{}".format(usernames[named[1]], named[2])
except:
message[item[0]] = "@{}{}".format(named[1], named[2])
if targets and targets.groups()[0] == '#':
named = targets.groups()
try:
if channels[named[1]]:
message[item[0]] = "<#{}|{}>{}".format(channels[named[1]], named[1], named[2])
except:
message[item[0]] = "#{}{}".format(named[1], named[2])
# dbg(message)
return " ".join(message)
def unfurl_refs(text, ignore_alt_text=None, auto_link_display=None):
"""
input : <@U096Q7CQM|someuser> has joined the channel
ouput : someuser has joined the channel
"""
# Find all strings enclosed by <>
# - <https://example.com|example with spaces>
# - <#C2147483705|#otherchannel>
# - <@U2147483697|@othernick>
# Test patterns lives in ./_pytest/test_unfurl.py
if ignore_alt_text is None:
ignore_alt_text = config.unfurl_ignore_alt_text
if auto_link_display is None:
auto_link_display = config.unfurl_auto_link_display
matches = re.findall(r"(<[@#]?(?:[^>]*)>)", text)
for m in matches:
# Replace them with human readable strings
text = text.replace(
m, unfurl_ref(m[1:-1], ignore_alt_text, auto_link_display))
return text
def unfurl_ref(ref, ignore_alt_text, auto_link_display):
id = ref.split('|')[0]
display_text = ref
if ref.find('|') > -1:
if ignore_alt_text:
display_text = resolve_ref(id)
else:
if id.startswith("#C"):
display_text = "#{}".format(ref.split('|')[1])
elif id.startswith("@U"):
display_text = ref.split('|')[1]
else:
url, desc = ref.split('|', 1)
match_url = r"^\w+:(//)?{}$".format(re.escape(desc))
url_matches_desc = re.match(match_url, url)
if url_matches_desc and auto_link_display == "text":
display_text = desc
elif url_matches_desc and auto_link_display == "url":
display_text = url
else:
display_text = "{} ({})".format(url, desc)
else:
display_text = resolve_ref(ref)
return display_text
def unhtmlescape(text):
return text.replace("<", "<") \
.replace(">", ">") \
.replace("&", "&")
def unwrap_attachments(message_json, text_before):
text_before_unescaped = unhtmlescape(text_before)
attachment_texts = []
a = message_json.get("attachments", None)
if a:
if text_before:
attachment_texts.append('')
for attachment in a:
# Attachments should be rendered roughly like:
#
# $pretext
# $author: (if rest of line is non-empty) $title ($title_link) OR $from_url
# $author: (if no $author on previous line) $text
# $fields
t = []
prepend_title_text = ''
if 'author_name' in attachment:
prepend_title_text = attachment['author_name'] + ": "
if 'pretext' in attachment:
t.append(attachment['pretext'])
title = attachment.get('title', None)
title_link = attachment.get('title_link', '')
if title_link in text_before_unescaped:
title_link = ''
if title and title_link:
t.append('%s%s (%s)' % (prepend_title_text, title, title_link,))
prepend_title_text = ''
elif title and not title_link:
t.append('%s%s' % (prepend_title_text, title,))
prepend_title_text = ''
from_url = attachment.get('from_url', '')
if from_url not in text_before_unescaped and from_url != title_link:
t.append(from_url)
atext = attachment.get("text", None)
if atext:
tx = re.sub(r' *\n[\n ]+', '\n', atext)
t.append(prepend_title_text + tx)
prepend_title_text = ''
fields = attachment.get("fields", None)
if fields:
for f in fields:
if f['title'] != '':
t.append('%s %s' % (f['title'], f['value'],))
else:
t.append(f['value'])
fallback = attachment.get("fallback", None)
if t == [] and fallback:
t.append(fallback)
attachment_texts.append("\n".join([x.strip() for x in t if x]))
return "\n".join(attachment_texts)
def resolve_ref(ref):
# TODO: This hack to use eventrouter needs to go
# this resolver should probably move to the slackteam or eventrouter itself
# global EVENTROUTER
if 'EVENTROUTER' in globals():
e = EVENTROUTER
if ref.startswith('@U') or ref.startswith('@W'):
for t in e.teams.keys():
if ref[1:] in e.teams[t].users:
# try:
return "@{}".format(e.teams[t].users[ref[1:]].name)
# except:
# dbg("NAME: {}".format(ref))
elif ref.startswith('#C'):
for t in e.teams.keys():
if ref[1:] in e.teams[t].channels:
# try:
return "{}".format(e.teams[t].channels[ref[1:]].name)
# except:
# dbg("CHANNEL: {}".format(ref))
# Something else, just return as-is
return ref
def create_user_status_string(profile):
real_name = profile.get("real_name")
status_emoji = profile.get("status_emoji")
status_text = profile.get("status_text")
if status_emoji or status_text:
return "{} | {} {}".format(real_name, status_emoji, status_text)
else:
return real_name
def create_reaction_string(reactions):
count = 0
if not isinstance(reactions, list):
reaction_string = " [{}]".format(reactions)
else:
reaction_string = ' ['
for r in reactions:
if len(r["users"]) > 0:
count += 1
if config.show_reaction_nicks:
nicks = [resolve_ref("@{}".format(user)) for user in r["users"]]
users = "({})".format(",".join(nicks))
else:
users = len(r["users"])
reaction_string += ":{}:{} ".format(r["name"], users)
reaction_string = reaction_string[:-1] + ']'
if count == 0:
reaction_string = ''
return reaction_string
def modify_buffer_line(buffer, new_line, timestamp, time_id):
# get a pointer to this buffer's lines
own_lines = w.hdata_pointer(w.hdata_get('buffer'), buffer, 'own_lines')
if own_lines:
# get a pointer to the last line
line_pointer = w.hdata_pointer(w.hdata_get('lines'), own_lines, 'last_line')
# hold the structure of a line and of line data
struct_hdata_line = w.hdata_get('line')
struct_hdata_line_data = w.hdata_get('line_data')
# keep track of the number of lines with the matching time and id
number_of_matching_lines = 0
while line_pointer:
# get a pointer to the data in line_pointer via layout of struct_hdata_line
data = w.hdata_pointer(struct_hdata_line, line_pointer, 'data')
if data:
line_timestamp = w.hdata_time(struct_hdata_line_data, data, 'date')
line_time_id = w.hdata_integer(struct_hdata_line_data, data, 'date_printed')
# prefix = w.hdata_string(struct_hdata_line_data, data, 'prefix')
if timestamp == int(line_timestamp) and int(time_id) == line_time_id:
number_of_matching_lines += 1
elif number_of_matching_lines > 0:
# since number_of_matching_lines is non-zero, we have
# already reached the message and can stop traversing
break
else:
dbg(('Encountered line without any data while trying to modify '
'line. This is not handled, so aborting modification.'))
return w.WEECHAT_RC_ERROR
# move backwards one line and try again - exit the while if you hit the end
line_pointer = w.hdata_move(struct_hdata_line, line_pointer, -1)
# split the message into at most the number of existing lines
lines = new_line.split('\n', number_of_matching_lines - 1)
# updating a line with a string containing newlines causes the lines to
# be broken when viewed in bare display mode
lines = [line.replace('\n', ' | ') for line in lines]
# pad the list with empty strings until the number of elements equals
# number_of_matching_lines
lines += [''] * (number_of_matching_lines - len(lines))
if line_pointer:
for line in lines:
line_pointer = w.hdata_move(struct_hdata_line, line_pointer, 1)
data = w.hdata_pointer(struct_hdata_line, line_pointer, 'data')
w.hdata_update(struct_hdata_line_data, data, {"message": line})
return w.WEECHAT_RC_OK
def modify_print_time(buffer, new_id, time):
"""
This overloads the time printed field to let us store the slack
per message unique id that comes after the "." in a slack ts
"""
# get a pointer to this buffer's lines
own_lines = w.hdata_pointer(w.hdata_get('buffer'), buffer, 'own_lines')
if own_lines:
# get a pointer to the last line
line_pointer = w.hdata_pointer(w.hdata_get('lines'), own_lines, 'last_line')
# hold the structure of a line and of line data
struct_hdata_line = w.hdata_get('line')
struct_hdata_line_data = w.hdata_get('line_data')
prefix = ''
while not prefix and line_pointer:
# get a pointer to the data in line_pointer via layout of struct_hdata_line
data = w.hdata_pointer(struct_hdata_line, line_pointer, 'data')
if data:
prefix = w.hdata_string(struct_hdata_line_data, data, 'prefix')
w.hdata_update(struct_hdata_line_data, data, {"date_printed": new_id})
else:
dbg('Encountered line without any data while setting message id.')
return w.WEECHAT_RC_ERROR
# move backwards one line and repeat, so all the lines of the message are set
# exit when you reach a prefix, which means you have reached the
# first line of the message, or if you hit the end
line_pointer = w.hdata_move(struct_hdata_line, line_pointer, -1)
return w.WEECHAT_RC_OK
def format_nick(nick):
nick_prefix = w.config_string(w.config_get('weechat.look.nick_prefix'))
nick_prefix_color_name = w.config_string(w.config_get('weechat.color.chat_nick_prefix'))
nick_prefix_color = w.color(nick_prefix_color_name)
nick_suffix = w.config_string(w.config_get('weechat.look.nick_suffix'))
nick_suffix_color_name = w.config_string(w.config_get('weechat.color.chat_nick_prefix'))
nick_suffix_color = w.color(nick_suffix_color_name)
return nick_prefix_color + nick_prefix + w.color("reset") + nick + nick_suffix_color + nick_suffix + w.color("reset")
def tag(tagset, user=None):
if user:
default_tag = "nick_" + user.replace(" ", "_")
else:
default_tag = 'nick_unknown'
tagsets = {
# messages in the team/server buffer, e.g. "new channel created"
"team": "no_highlight,log3",
# when replaying something old
"backlog": "irc_privmsg,no_highlight,notify_none,logger_backlog",
# when posting messages to a muted channel
"muted": "irc_privmsg,no_highlight,notify_none,log1",
# when receiving a direct message
"dm": "irc_privmsg,notify_private,log1",
"dmfromme": "irc_privmsg,no_highlight,notify_none,log1",
# when this is a join/leave, attach for smart filter ala:
# if user in [x.strip() for x in w.prefix("join"), w.prefix("quit")]
"joinleave": "irc_smart_filter,no_highlight,log4",
# catchall ?
"default": "irc_privmsg,notify_message,log1",
}
return "{},slack_{},{}".format(default_tag, tagset, tagsets[tagset])
###### New/converted command_ commands
@slack_buffer_or_ignore
@utf8_decode
def part_command_cb(data, current_buffer, args):
e = EVENTROUTER
args = args.split()
if len(args) > 1:
team = e.weechat_controller.buffers[current_buffer].team
cmap = team.get_channel_map()
channel = "".join(args[1:])
if channel in cmap:
buffer_ptr = team.channels[cmap[channel]].channel_buffer
e.weechat_controller.unregister_buffer(buffer_ptr, update_remote=True, close_buffer=True)
else:
e.weechat_controller.unregister_buffer(current_buffer, update_remote=True, close_buffer=True)
return w.WEECHAT_RC_OK_EAT
def parse_topic_command(command):
args = command.split()[1:]
channel_name = None
topic = None
if args:
if args[0].startswith('#'):
channel_name = args[0][1:]
topic = args[1:]
else:
topic = args
if topic == []:
topic = None
if topic:
topic = ' '.join(topic)
if topic == '-delete':
topic = ''
return channel_name, topic
@slack_buffer_or_ignore
@utf8_decode
def topic_command_cb(data, current_buffer, command):
"""
Change the topic of a channel
/topic [<channel>] [<topic>|-delete]
"""
channel_name, topic = parse_topic_command(command)
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
if channel_name:
channel = team.channels.get(team.get_channel_map().get(channel_name))
else:
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
if not channel:
w.prnt(team.channel_buffer, "#{}: No such channel".format(channel_name))
return w.WEECHAT_RC_OK_EAT
if topic is None:
w.prnt(channel.channel_buffer, 'Topic for {} is "{}"'.format(channel.name, channel.topic))
else:
s = SlackRequest(team.token, "channels.setTopic", {"channel": channel.identifier, "topic": topic}, team_hash=team.team_hash)
EVENTROUTER.receive(s)
return w.WEECHAT_RC_OK_EAT
@slack_buffer_or_ignore
@utf8_decode
def whois_command_cb(data, current_buffer, command):
"""
Get real name of user
/whois <display_name>
"""
args = command.split()
if len(args) < 2:
w.prnt(current_buffer, "Not enough arguments")
return w.WEECHAT_RC_OK_EAT
user = args[1]
if (user.startswith('@')):
user = user[1:]
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
u = team.users.get(team.get_username_map().get(user))
if u:
team.buffer_prnt("[{}]: {}".format(user, u.real_name))
if u.profile.get("status_text"):
team.buffer_prnt("[{}]: {} {}".format(user, u.profile.status_emoji, u.profile.status_text))
team.buffer_prnt("[{}]: Real name: {}".format(user, u.profile.get('real_name_normalized', '')))
team.buffer_prnt("[{}]: Title: {}".format(user, u.profile.get('title', '')))
team.buffer_prnt("[{}]: Email: {}".format(user, u.profile.get('email', '')))
team.buffer_prnt("[{}]: Phone: {}".format(user, u.profile.get('phone', '')))
else:
team.buffer_prnt("[{}]: No such user".format(user))
return w.WEECHAT_RC_OK_EAT
@slack_buffer_or_ignore
@utf8_decode
def me_command_cb(data, current_buffer, args):
message = "_{}_".format(args.split(' ', 1)[1])
buffer_input_callback("EVENTROUTER", current_buffer, message)
return w.WEECHAT_RC_OK_EAT
def command_register(data, current_buffer, args):
CLIENT_ID = "2468770254.51917335286"
CLIENT_SECRET = "dcb7fe380a000cba0cca3169a5fe8d70" # Not really a secret.
if args == 'register':
message = textwrap.dedent("""
#### Retrieving a Slack token via OAUTH ####
1) Paste this into a browser: https://slack.com/oauth/authorize?client_id=2468770254.51917335286&scope=client
2) Select the team you wish to access from wee-slack in your browser.
3) Click "Authorize" in the browser **IMPORTANT: the redirect will fail, this is expected**
4) Copy the "code" portion of the URL to your clipboard
5) Return to weechat and run `/slack register [code]`
""")
w.prnt("", message)
return
try:
_, oauth_code = args.split()
except ValueError:
w.prnt("",
"ERROR: wrong number of arguments given for register command")
return
uri = (
"https://slack.com/api/oauth.access?"
"client_id={}&client_secret={}&code={}"
).format(CLIENT_ID, CLIENT_SECRET, oauth_code)
ret = urllib.urlopen(uri).read()
d = json.loads(ret)
if not d["ok"]:
w.prnt("",
"ERROR: Couldn't get Slack OAuth token: {}".format(d['error']))
return
if config.is_default('slack_api_token'):
w.config_set_plugin('slack_api_token', d['access_token'])
else:
# Add new token to existing set, joined by comma.
tok = config.get_string('slack_api_token')
w.config_set_plugin('slack_api_token',
','.join([tok, d['access_token']]))
w.prnt("", "Success! Added team \"%s\"" % (d['team_name'],))
w.prnt("", "Please reload wee-slack with: /python reload slack")
@slack_buffer_or_ignore
@utf8_decode
def msg_command_cb(data, current_buffer, args):
dbg("msg_command_cb")
aargs = args.split(None, 2)
who = aargs[1]
if who == "*":
who = EVENTROUTER.weechat_controller.buffers[current_buffer].slack_name
else:
command_talk(data, current_buffer, "talk " + who)
if len(aargs) > 2:
message = aargs[2]
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
cmap = team.get_channel_map()
if who in cmap:
channel = team.channels[cmap[who]]
channel.send_message(message)
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_channels(data, current_buffer, args):
e = EVENTROUTER
team = e.weechat_controller.buffers[current_buffer].team
team.buffer_prnt("Channels:")
for channel in team.get_channel_map():
team.buffer_prnt(" {}".format(channel))
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def command_users(data, current_buffer, args):
e = EVENTROUTER
team = e.weechat_controller.buffers[current_buffer].team
team.buffer_prnt("Users:")
for user in team.users.values():
team.buffer_prnt(" {:<25}({})".format(user.name, user.presence))
return w.WEECHAT_RC_OK_EAT
@slack_buffer_or_ignore
@utf8_decode
def command_talk(data, current_buffer, args):
"""
Open a chat with the specified user(s)
/slack talk <user>[,<user2>[,<user3>...]]
"""
e = EVENTROUTER
team = e.weechat_controller.buffers[current_buffer].team
channel_name = args.split(' ')[1]
if channel_name.startswith('#'):
channel_name = channel_name[1:]
# Try finding the channel by name
chan = team.channels.get(team.get_channel_map().get(channel_name))
# If the channel doesn't exist, try finding a DM or MPDM instead
if not chan:
# Get the IDs of the users
u = team.get_username_map()
users = set()
for user in channel_name.split(','):
if user.startswith('@'):
user = user[1:]
if user in u:
users.add(u[user])
if users:
if len(users) > 1:
channel_type = 'mpim'
# Add the current user since MPDMs include them as a member
users.add(team.myidentifier)
else:
channel_type = 'im'
chan = team.find_channel_by_members(users, channel_type=channel_type)
# If the DM or MPDM doesn't exist, create it
if not chan:
s = SlackRequest(team.token, SLACK_API_TRANSLATOR[channel_type]['join'], {'users': ','.join(users)}, team_hash=team.team_hash)
EVENTROUTER.receive(s)
if chan:
chan.open()
if config.switch_buffer_on_join:
w.buffer_set(chan.channel_buffer, "display", "1")
return w.WEECHAT_RC_OK_EAT
return w.WEECHAT_RC_OK_EAT
def command_showmuted(data, current_buffer, args):
current = w.current_buffer()
w.prnt(EVENTROUTER.weechat_controller.buffers[current].team.channel_buffer, str(EVENTROUTER.weechat_controller.buffers[current].team.muted_channels))
@utf8_decode
def thread_command_callback(data, current_buffer, args):
current = w.current_buffer()
channel = EVENTROUTER.weechat_controller.buffers.get(current)
if channel:
args = args.split()
if args[0] == '/thread':
if len(args) == 2:
try:
pm = channel.messages[SlackTS(args[1])]
except:
pm = channel.hashed_messages[args[1]]
tc = SlackThreadChannel(EVENTROUTER, pm)
pm.thread_channel = tc
tc.open()
# tc.create_buffer()
if config.switch_buffer_on_join:
w.buffer_set(tc.channel_buffer, "display", "1")
return w.WEECHAT_RC_OK_EAT
elif args[0] == '/reply':
count = int(args[1])
msg = " ".join(args[2:])
mkeys = channel.main_message_keys_reversed()
parent_id = str(next(islice(mkeys, count - 1, None)))
channel.send_message(msg, request_dict_ext={"thread_ts": parent_id})
return w.WEECHAT_RC_OK_EAT
w.prnt(current, "Invalid thread command.")
return w.WEECHAT_RC_OK_EAT
@utf8_decode
def rehistory_command_callback(data, current_buffer, args):
current = w.current_buffer()
channel = EVENTROUTER.weechat_controller.buffers.get(current)
channel.got_history = False
w.buffer_clear(channel.channel_buffer)
channel.get_history()
return w.WEECHAT_RC_OK_EAT
@slack_buffer_required
@utf8_decode
def hide_command_callback(data, current_buffer, args):
c = EVENTROUTER.weechat_controller.buffers.get(current_buffer, None)
if c:
name = c.formatted_name(style='long_default')
if name in config.distracting_channels:
w.buffer_set(c.channel_buffer, "hidden", "1")
return w.WEECHAT_RC_OK_EAT
@utf8_decode
def slack_command_cb(data, current_buffer, args):
a = args.split(' ', 1)
if len(a) > 1:
function_name, args = a[0], args
else:
function_name, args = a[0], args
try:
EVENTROUTER.cmds[function_name]("", current_buffer, args)
except KeyError:
w.prnt("", "Command not found: " + function_name)
return w.WEECHAT_RC_OK
@slack_buffer_required
def command_distracting(data, current_buffer, args):
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer, None)
if channel:
fullname = channel.formatted_name(style="long_default")
if config.distracting_channels.count(fullname) == 0:
config.distracting_channels.append(fullname)
else:
config.distracting_channels.pop(config.distracting_channels.index(fullname))
save_distracting_channels()
def save_distracting_channels():
w.config_set_plugin('distracting_channels', ','.join(config.distracting_channels))
@slack_buffer_required
def command_slash(data, current_buffer, args):
"""
Support for custom slack commands
/slack slash /customcommand arg1 arg2 arg3
"""
e = EVENTROUTER
channel = e.weechat_controller.buffers.get(current_buffer, None)
if channel:
team = channel.team
if args == 'slash':
w.prnt("", "Usage: /slack slash /someslashcommand [arguments...].")
return
split_args = args.split(None, 2)
command = split_args[1]
text = split_args[2] if len(split_args) > 2 else ""
s = SlackRequest(team.token, "chat.command", {"command": command, "text": text, 'channel': channel.identifier}, team_hash=team.team_hash, channel_identifier=channel.identifier)
EVENTROUTER.receive(s)
@slack_buffer_required
def command_mute(data, current_buffer, args):
current = w.current_buffer()
channel_id = EVENTROUTER.weechat_controller.buffers[current].identifier
team = EVENTROUTER.weechat_controller.buffers[current].team
if channel_id not in team.muted_channels:
team.muted_channels.add(channel_id)
else:
team.muted_channels.discard(channel_id)
s = SlackRequest(team.token, "users.prefs.set", {"name": "muted_channels", "value": ",".join(team.muted_channels)}, team_hash=team.team_hash, channel_identifier=channel_id)
EVENTROUTER.receive(s)
@slack_buffer_required
def command_openweb(data, current_buffer, args):
# if done from server buffer, open slack for reals
channel = EVENTROUTER.weechat_controller.buffers[current_buffer]
if isinstance(channel, SlackTeam):
url = "https://{}".format(channel.team.domain)
else:
now = SlackTS()
url = "https://{}/archives/{}/p{}000000".format(channel.team.domain, channel.slack_name, now.majorstr())
w.prnt_date_tags(channel.team.channel_buffer, SlackTS().major, "openweb,logger_backlog_end,notify_none", url)
def command_nodistractions(data, current_buffer, args):
global hide_distractions
hide_distractions = not hide_distractions
if config.distracting_channels != ['']:
for channel in config.distracting_channels:
dbg('hiding channel {}'.format(channel))
# try:
for c in EVENTROUTER.weechat_controller.buffers.itervalues():
if c == channel:
dbg('found channel {} to hide'.format(channel))
w.buffer_set(c.channel_buffer, "hidden", str(int(hide_distractions)))
# except:
# dbg("Can't hide channel {} .. removing..".format(channel), main_buffer=True)
# config.distracting_channels.pop(config.distracting_channels.index(channel))
# save_distracting_channels()
@slack_buffer_required
def command_upload(data, current_buffer, args):
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
url = 'https://slack.com/api/files.upload'
fname = args.split(' ', 1)
file_path = os.path.expanduser(fname[1])
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
if ' ' in file_path:
file_path = file_path.replace(' ', '\ ')
command = 'curl -F file=@{} -F channels={} -F token={} {}'.format(file_path, channel.identifier, team.token, url)
w.hook_process(command, config.slack_timeout, '', '')
@utf8_decode
def away_command_cb(data, current_buffer, args):
# TODO: reimplement all.. maybe
(all, message) = re.match("^/away(?:\s+(-all))?(?:\s+(.+))?", args).groups()
if message is None:
command_back(data, current_buffer, args)
else:
command_away(data, current_buffer, args)
return w.WEECHAT_RC_OK
@slack_buffer_required
def command_away(data, current_buffer, args):
"""
Sets your status as 'away'
/slack away
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
s = SlackRequest(team.token, "users.setPresence", {"presence": "away"}, team_hash=team.team_hash)
EVENTROUTER.receive(s)
@slack_buffer_required
def command_status(data, current_buffer, args):
"""
Lets you set your Slack Status (not to be confused with away/here)
/slack status [emoji] [status_message]
"""
e = EVENTROUTER
channel = e.weechat_controller.buffers.get(current_buffer, None)
if channel:
team = channel.team
split_args = args.split(None, 2)
emoji = split_args[1] if len(split_args) > 1 else ""
text = split_args[2] if len(split_args) > 2 else ""
profile = {"status_text":text,"status_emoji":emoji}
s = SlackRequest(team.token, "users.profile.set", {"profile": profile}, team_hash=team.team_hash)
EVENTROUTER.receive(s)
@slack_buffer_required
def command_back(data, current_buffer, args):
"""
Sets your status as 'back'
/slack back
"""
team = EVENTROUTER.weechat_controller.buffers[current_buffer].team
s = SlackRequest(team.token, "users.setPresence", {"presence": "auto"}, team_hash=team.team_hash)
EVENTROUTER.receive(s)
@slack_buffer_required
@utf8_decode
def label_command_cb(data, current_buffer, args):
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
if channel and channel.type == 'thread':
aargs = args.split(None, 2)
new_name = " +" + aargs[1]
channel.label = new_name
w.buffer_set(channel.channel_buffer, "short_name", new_name)
@utf8_decode
def set_unread_cb(data, current_buffer, command):
for channel in EVENTROUTER.weechat_controller.buffers.values():
channel.mark_read()
return w.WEECHAT_RC_OK
@slack_buffer_or_ignore
@utf8_decode
def set_unread_current_buffer_cb(data, current_buffer, command):
channel = EVENTROUTER.weechat_controller.buffers.get(current_buffer)
channel.mark_read()
return w.WEECHAT_RC_OK
def command_p(data, current_buffer, args):
args = args.split(' ', 1)[1]
w.prnt("", "{}".format(eval(args)))
###### NEW EXCEPTIONS
class ProcessNotImplemented(Exception):
"""
Raised when we try to call process_(something), but
(something) has not been defined as a function.
"""
def __init__(self, function_name):
super(ProcessNotImplemented, self).__init__(function_name)
class InvalidType(Exception):
"""
Raised when we do type checking to ensure objects of the wrong
type are not used improperly.
"""
def __init__(self, type_str):
super(InvalidType, self).__init__(type_str)
###### New but probably old and need to migrate
def closed_slack_debug_buffer_cb(data, buffer):
global slack_debug
slack_debug = None
return w.WEECHAT_RC_OK
def create_slack_debug_buffer():
global slack_debug, debug_string
if slack_debug is not None:
w.buffer_set(slack_debug, "display", "1")
else:
debug_string = None
slack_debug = w.buffer_new("slack-debug", "", "", "closed_slack_debug_buffer_cb", "")
w.buffer_set(slack_debug, "notify", "0")
def load_emoji():
try:
DIR = w.info_get("weechat_dir", "")
with open('{}/weemoji.json'.format(DIR), 'r') as ef:
return json.loads(ef.read())["emoji"]
except Exception as e:
dbg("Couldn't load emoji list: {}".format(e), 5)
return []
def setup_hooks():
cmds = {k[8:]: v for k, v in globals().items() if k.startswith("command_")}
w.bar_item_new('slack_typing_notice', 'typing_bar_item_cb', '')
w.hook_timer(1000, 0, 0, "typing_update_cb", "")
w.hook_timer(1000, 0, 0, "buffer_list_update_callback", "EVENTROUTER")
w.hook_timer(3000, 0, 0, "reconnect_callback", "EVENTROUTER")
w.hook_timer(1000 * 60 * 5, 0, 0, "slack_never_away_cb", "")
w.hook_signal('buffer_closing', "buffer_closing_callback", "EVENTROUTER")
w.hook_signal('buffer_switch', "buffer_switch_callback", "EVENTROUTER")
w.hook_signal('window_switch', "buffer_switch_callback", "EVENTROUTER")
w.hook_signal('quit', "quit_notification_callback", "")
if config.send_typing_notice:
w.hook_signal('input_text_changed', "typing_notification_cb", "")
w.hook_command(
# Command name and description
'slack', 'Plugin to allow typing notification and sync of read markers for slack.com',
# Usage
'[command] [command options]',
# Description of arguments
'Commands:\n' +
'\n'.join(cmds.keys()) +
'\nUse /slack help [command] to find out more\n',
# Completions
'|'.join(cmds.keys()),
# Function name
'slack_command_cb', '')
# w.hook_command('me', '', 'stuff', 'stuff2', '', 'me_command_cb', '')
w.hook_command_run('/me', 'me_command_cb', '')
w.hook_command_run('/query', 'command_talk', '')
w.hook_command_run('/join', 'command_talk', '')
w.hook_command_run('/part', 'part_command_cb', '')
w.hook_command_run('/leave', 'part_command_cb', '')
w.hook_command_run('/topic', 'topic_command_cb', '')
w.hook_command_run('/thread', 'thread_command_callback', '')
w.hook_command_run('/reply', 'thread_command_callback', '')
w.hook_command_run('/rehistory', 'rehistory_command_callback', '')
w.hook_command_run('/hide', 'hide_command_callback', '')
w.hook_command_run('/msg', 'msg_command_cb', '')
w.hook_command_run('/label', 'label_command_cb', '')
w.hook_command_run("/input complete_next", "complete_next_cb", "")
w.hook_command_run("/input set_unread", "set_unread_cb", "")
w.hook_command_run("/input set_unread_current_buffer", "set_unread_current_buffer_cb", "")
w.hook_command_run('/away', 'away_command_cb', '')
w.hook_command_run('/whois', 'whois_command_cb', '')
w.hook_completion("nicks", "complete @-nicks for slack", "nick_completion_cb", "")
w.hook_completion("emoji", "complete :emoji: for slack", "emoji_completion_cb", "")
# Hooks to fix/implement
# w.hook_signal('buffer_opened', "buffer_opened_cb", "")
# w.hook_signal('window_scrolled', "scrolled_cb", "")
# w.hook_timer(3000, 0, 0, "slack_connection_persistence_cb", "")
##### END NEW
def dbg(message, level=0, main_buffer=False, fout=False):
"""
send debug output to the slack-debug buffer and optionally write to a file.
"""
# TODO: do this smarter
# return
if level >= config.debug_level:
global debug_string
message = "DEBUG: {}".format(message)
if fout:
file('/tmp/debug.log', 'a+').writelines(message + '\n')
if main_buffer:
# w.prnt("", "---------")
w.prnt("", "slack: " + message)
else:
if slack_debug and (not debug_string or debug_string in message):
# w.prnt(slack_debug, "---------")
w.prnt(slack_debug, message)
###### Config code
Setting = collections.namedtuple('Setting', ['default', 'desc'])
class PluginConfig(object):
# Default settings.
# These are, initially, each a (default, desc) tuple; the former is the
# default value of the setting, in the (string) format that weechat
# expects, and the latter is the user-friendly description of the setting.
# At __init__ time these values are extracted, the description is used to
# set or update the setting description for use with /help, and the default
# value is used to set the default for any settings not already defined.
# Following this procedure, the keys remain the same, but the values are
# the real (python) values of the settings.
default_settings = {
'background_load_all_history': Setting(
default='false',
desc='Load history for each channel in the background as soon as it'
' opens, rather than waiting for the user to look at it.'),
'channel_name_typing_indicator': Setting(
default='true',
desc='Change the prefix of a channel from # to > when someone is'
' typing in it. Note that this will (temporarily) affect the sort'
' order if you sort buffers by name rather than by number.'),
'colorize_private_chats': Setting(
default='false',
desc='Whether to use nick-colors in DM windows.'),
'debug_mode': Setting(
default='false',
desc='Open a dedicated buffer for debug messages and start logging'
' to it. How verbose the logging is depends on log_level.'),
'debug_level': Setting(
default='3',
desc='Show only this level of debug info (or higher) when'
' debug_mode is on. Lower levels -> more messages.'),
'distracting_channels': Setting(
default='',
desc='List of channels to hide.'),
'group_name_prefix': Setting(
default='&',
desc='The prefix of buffer names for groups (private channels).'),
'map_underline_to': Setting(
default='_',
desc='When sending underlined text to slack, use this formatting'
' character for it. The default ("_") sends it as italics. Use'
' "*" to send bold instead.'),
'never_away': Setting(
default='false',
desc='Poke Slack every five minutes so that it never marks you "away".'),
'record_events': Setting(
default='false',
desc='Log all traffic from Slack to disk as JSON.'),
'render_bold_as': Setting(
default='bold',
desc='When receiving bold text from Slack, render it as this in weechat.'),
'render_italic_as': Setting(
default='italic',
desc='When receiving bold text from Slack, render it as this in weechat.'
' If your terminal lacks italic support, consider using "underline" instead.'),
'send_typing_notice': Setting(
default='true',
desc='Alert Slack users when you are typing a message in the input bar '
'(Requires reload)'),
'server_aliases': Setting(
default='',
desc='A comma separated list of `subdomain:alias` pairs. The alias'
' will be used instead of the actual name of the slack (in buffer'
' names, logging, etc). E.g `work:no_fun_allowed` would make your'
' work slack show up as `no_fun_allowed` rather than `work.slack.com`.'),
'short_buffer_names': Setting(
default='false',
desc='Use `foo.#channel` rather than `foo.slack.com.#channel` as the'
' internal name for Slack buffers. Overrides server_aliases.'),
'show_reaction_nicks': Setting(
default='false',
desc='Display the name of the reacting user(s) alongside each reactji.'),
'slack_api_token': Setting(
default='INSERT VALID KEY HERE!',
desc='List of Slack API tokens, one per Slack instance you want to'
' connect to. See the README for details on how to get these.'),
'slack_timeout': Setting(
default='20000',
desc='How long (ms) to wait when communicating with Slack.'),
'switch_buffer_on_join': Setting(
default='true',
desc='When /joining a channel, automatically switch to it as well.'),
'thread_suffix_color': Setting(
default='lightcyan',
desc='Color to use for the [thread: XXX] suffix on messages that'
' have threads attached to them.'),
'unfurl_ignore_alt_text': Setting(
default='false',
desc='When displaying ("unfurling") links to channels/users/etc,'
' ignore the "alt text" present in the message and instead use the'
' canonical name of the thing being linked to.'),
'unfurl_auto_link_display': Setting(
default='both',
desc='When displaying ("unfurling") links to channels/users/etc,'
' determine what is displayed when the text matches the url'
' without the protocol. This happens when Slack automatically'
' creates links, e.g. from words separated by dots or email'
' addresses. Set it to "text" to only display the text written by'
' the user, "url" to only display the url or "both" (the default)'
' to display both.'),
'unhide_buffers_with_activity': Setting(
default='false',
desc='When activity occurs on a buffer, unhide it even if it was'
' previously hidden (whether by the user or by the'
' distracting_channels setting).'),
}
# Set missing settings to their defaults. Load non-missing settings from
# weechat configs.
def __init__(self):
self.settings = {}
# Set all descriptions, replace the values in the dict with the
# default setting value rather than the (setting,desc) tuple.
# Use items() rather than iteritems() so we don't need to worry about
# invalidating the iterator.
for key, (default, desc) in self.default_settings.items():
w.config_set_desc_plugin(key, desc)
self.settings[key] = default
# Migrate settings from old versions of Weeslack...
self.migrate()
# ...and then set anything left over from the defaults.
for key, default in self.settings.iteritems():
if not w.config_get_plugin(key):
w.config_set_plugin(key, default)
self.config_changed(None, None, None)
def __str__(self):
return "".join([x + "\t" + str(self.settings[x]) + "\n" for x in self.settings.keys()])
def config_changed(self, data, key, value):
for key in self.settings:
self.settings[key] = self.fetch_setting(key)
if self.debug_mode:
create_slack_debug_buffer()
return w.WEECHAT_RC_OK
def fetch_setting(self, key):
if hasattr(self, 'get_' + key):
try:
return getattr(self, 'get_' + key)(key)
except:
return self.settings[key]
else:
# Most settings are on/off, so make get_boolean the default
return self.get_boolean(key)
def __getattr__(self, key):
return self.settings[key]
def get_boolean(self, key):
return w.config_string_to_boolean(w.config_get_plugin(key))
def get_string(self, key):
return w.config_get_plugin(key)
def get_int(self, key):
return int(w.config_get_plugin(key))
def is_default(self, key):
default = self.default_settings.get(key).default
return w.config_get_plugin(key) == default
get_debug_level = get_int
get_group_name_prefix = get_string
get_map_underline_to = get_string
get_render_bold_as = get_string
get_render_italic_as = get_string
get_slack_timeout = get_int
get_thread_suffix_color = get_string
get_unfurl_auto_link_display = get_string
def get_distracting_channels(self, key):
return [x.strip() for x in w.config_get_plugin(key).split(',')]
def get_server_aliases(self, key):
alias_list = w.config_get_plugin(key)
if len(alias_list) > 0:
return dict(item.split(":") for item in alias_list.split(","))
def get_slack_api_token(self, key):
token = w.config_get_plugin("slack_api_token")
if token.startswith('${sec.data'):
return w.string_eval_expression(token, {}, {}, {})
else:
return token
def migrate(self):
"""
This is to migrate the extension name from slack_extension to slack
"""
if not w.config_get_plugin("migrated"):
for k in self.settings.keys():
if not w.config_is_set_plugin(k):
p = w.config_get("plugins.var.python.slack_extension.{}".format(k))
data = w.config_string(p)
if data != "":
w.config_set_plugin(k, data)
w.config_set_plugin("migrated", "true")
# to Trace execution, add `setup_trace()` to startup
# and to a function and sys.settrace(trace_calls) to a function
def setup_trace():
global f
now = time.time()
f = open('{}/{}-trace.json'.format(RECORD_DIR, now), 'w')
def trace_calls(frame, event, arg):
global f
if event != 'call':
return
co = frame.f_code
func_name = co.co_name
if func_name == 'write':
# Ignore write() calls from print statements
return
func_line_no = frame.f_lineno
func_filename = co.co_filename
caller = frame.f_back
caller_line_no = caller.f_lineno
caller_filename = caller.f_code.co_filename
print >> f, 'Call to %s on line %s of %s from line %s of %s' % \
(func_name, func_line_no, func_filename,
caller_line_no, caller_filename)
f.flush()
return
def initiate_connection(token, retries=3):
return SlackRequest(token,
'rtm.start',
{"batch_presence_aware": 1 },
retries=retries)
# Main
if __name__ == "__main__":
w = WeechatWrapper(weechat)
if w.register(SCRIPT_NAME, SCRIPT_AUTHOR, SCRIPT_VERSION, SCRIPT_LICENSE,
SCRIPT_DESC, "script_unloaded", ""):
weechat_version = w.info_get("version_number", "") or 0
if int(weechat_version) < 0x1030000:
w.prnt("", "\nERROR: Weechat version 1.3+ is required to use {}.\n\n".format(SCRIPT_NAME))
else:
global EVENTROUTER
EVENTROUTER = EventRouter()
# setup_trace()
# WEECHAT_HOME = w.info_get("weechat_dir", "")
# Global var section
slack_debug = None
config = PluginConfig()
config_changed_cb = config.config_changed
typing_timer = time.time()
# domain = None
# previous_buffer = None
# slack_buffer = None
# never_away = False
hide_distractions = False
# hotlist = w.infolist_get("hotlist", "", "")
# main_weechat_buffer = w.info_get("irc_buffer", "{}.{}".format(domain, "DOESNOTEXIST!@#$"))
w.hook_config("plugins.var.python." + SCRIPT_NAME + ".*", "config_changed_cb", "")
w.hook_modifier("input_text_for_buffer", "input_text_for_buffer_cb", "")
EMOJI.extend(load_emoji())
setup_hooks()
# attach to the weechat hooks we need
tokens = config.slack_api_token.split(',')
for t in tokens:
s = initiate_connection(t)
EVENTROUTER.receive(s)
if config.record_events:
EVENTROUTER.record()
EVENTROUTER.handle_next()
w.hook_timer(10, 0, 0, "handle_next", "")
# END attach to the weechat hooks we need
| [
"[email protected]"
]
| |
0588e6013bc4ccd0a97c815853df716c9fa6e040 | c0ea89d58fd6f780a23f10a0b5535b3feada5a1a | /anchore_engine/services/policy_engine/api/models/image_selection_rule.py | e0f9abbea332fcca8e57209b3916beb1d02c3c34 | [
"Apache-2.0"
]
| permissive | longfeide2008/anchore-engine | b62acbab8c7ebbf7fa67a2503768c677942220e4 | 622786ec653531f4fb216cb33e11ffe31fe33a29 | refs/heads/master | 2022-11-08T10:02:51.988961 | 2020-06-15T18:00:37 | 2020-06-15T18:00:37 | 274,068,878 | 1 | 0 | Apache-2.0 | 2020-06-22T07:27:39 | 2020-06-22T07:27:38 | null | UTF-8 | Python | false | false | 4,764 | py | # coding: utf-8
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from anchore_engine.services.policy_engine.api.models.base_model_ import Model
from anchore_engine.services.policy_engine.api.models.image_ref import ImageRef # noqa: F401,E501
from anchore_engine.services.policy_engine.api import util
class ImageSelectionRule(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self, id=None, name=None, registry=None, repository=None, image=None): # noqa: E501
"""ImageSelectionRule - a model defined in Swagger
:param id: The id of this ImageSelectionRule. # noqa: E501
:type id: str
:param name: The name of this ImageSelectionRule. # noqa: E501
:type name: str
:param registry: The registry of this ImageSelectionRule. # noqa: E501
:type registry: str
:param repository: The repository of this ImageSelectionRule. # noqa: E501
:type repository: str
:param image: The image of this ImageSelectionRule. # noqa: E501
:type image: ImageRef
"""
self.swagger_types = {
'id': str,
'name': str,
'registry': str,
'repository': str,
'image': ImageRef
}
self.attribute_map = {
'id': 'id',
'name': 'name',
'registry': 'registry',
'repository': 'repository',
'image': 'image'
}
self._id = id
self._name = name
self._registry = registry
self._repository = repository
self._image = image
@classmethod
def from_dict(cls, dikt):
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The ImageSelectionRule of this ImageSelectionRule. # noqa: E501
:rtype: ImageSelectionRule
"""
return util.deserialize_model(dikt, cls)
@property
def id(self):
"""Gets the id of this ImageSelectionRule.
:return: The id of this ImageSelectionRule.
:rtype: str
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ImageSelectionRule.
:param id: The id of this ImageSelectionRule.
:type id: str
"""
self._id = id
@property
def name(self):
"""Gets the name of this ImageSelectionRule.
:return: The name of this ImageSelectionRule.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this ImageSelectionRule.
:param name: The name of this ImageSelectionRule.
:type name: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`") # noqa: E501
self._name = name
@property
def registry(self):
"""Gets the registry of this ImageSelectionRule.
:return: The registry of this ImageSelectionRule.
:rtype: str
"""
return self._registry
@registry.setter
def registry(self, registry):
"""Sets the registry of this ImageSelectionRule.
:param registry: The registry of this ImageSelectionRule.
:type registry: str
"""
if registry is None:
raise ValueError("Invalid value for `registry`, must not be `None`") # noqa: E501
self._registry = registry
@property
def repository(self):
"""Gets the repository of this ImageSelectionRule.
:return: The repository of this ImageSelectionRule.
:rtype: str
"""
return self._repository
@repository.setter
def repository(self, repository):
"""Sets the repository of this ImageSelectionRule.
:param repository: The repository of this ImageSelectionRule.
:type repository: str
"""
if repository is None:
raise ValueError("Invalid value for `repository`, must not be `None`") # noqa: E501
self._repository = repository
@property
def image(self):
"""Gets the image of this ImageSelectionRule.
:return: The image of this ImageSelectionRule.
:rtype: ImageRef
"""
return self._image
@image.setter
def image(self, image):
"""Sets the image of this ImageSelectionRule.
:param image: The image of this ImageSelectionRule.
:type image: ImageRef
"""
if image is None:
raise ValueError("Invalid value for `image`, must not be `None`") # noqa: E501
self._image = image
| [
"[email protected]"
]
| |
41f4b127bfbd6b75174719694a023c07f6cca470 | 673e829dda9583c8dd2ac8d958ba1dc304bffeaf | /data/multilingual/Latn.TZO/Sun-ExtA_16/pdf_to_json_test_Latn.TZO_Sun-ExtA_16.py | 1ec5da78381362fbe785a67e34d5996d974a7995 | [
"BSD-3-Clause"
]
| permissive | antoinecarme/pdf_to_json_tests | 58bab9f6ba263531e69f793233ddc4d33b783b7e | d57a024fde862e698d916a1178f285883d7a3b2f | refs/heads/master | 2021-01-26T08:41:47.327804 | 2020-02-27T15:54:48 | 2020-02-27T15:54:48 | 243,359,934 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 311 | py | import pdf_to_json as p2j
import json
url = "file:data/multilingual/Latn.TZO/Sun-ExtA_16/udhr_Latn.TZO_Sun-ExtA_16.pdf"
lConverter = p2j.pdf_to_json.pdf_to_json_converter()
lConverter.mImageHashOnly = True
lDict = lConverter.convert(url)
print(json.dumps(lDict, indent=4, ensure_ascii=False, sort_keys=True))
| [
"[email protected]"
]
| |
d7e882092e4b190087f4548e9372a44995255bcf | d3737731634ee3f6fa2b19f6806d42ecc27d21a5 | /wals3/scripts/initializedb.py | 273185799ef7e0f763aec0421b0141b6c83648e1 | []
| no_license | Maggi12/wals3 | 3ad2475714b2d0bd1a7e5bb52baac1070eb07a5f | e66f08766ef67f51cae3d9656bcd4da1a8cf63c8 | refs/heads/master | 2021-01-22T20:02:56.225183 | 2014-07-25T15:42:39 | 2014-07-25T15:42:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,628 | py | from __future__ import unicode_literals
import sys
import transaction
from itertools import groupby, cycle
import re
from datetime import date, datetime
from collections import defaultdict
from pytz import utc
from sqlalchemy import create_engine
from sqlalchemy.orm import joinedload_all
from path import path
from bs4 import BeautifulSoup
from clld.db.meta import DBSession, VersionedDBSession
from clld.db.models import common
from clld.db.util import compute_language_sources
from clld.scripts.util import initializedb, Data, gbs_func
from clld.lib.bibtex import EntryType
from clld.lib.dsv import reader
from clld.util import LGR_ABBRS
import wals3
from wals3 import models
from wals3.scripts import uncited
from wals3.scripts import issues
UNCITED_MAP = {}
for k, v in uncited.MAP.items():
UNCITED_MAP[k.lower()] = v
# start with what's online right now:
DB = create_engine('postgresql://robert@/wals-vm42')
REFDB = create_engine('postgresql://robert@/walsrefs')
GC = create_engine('postgresql://robert@/glottolog3')
ABBRS = {
"A": "agent-like argument",
"ACCOMP": "accompanied ",
"ACR": "actor",
"ACT": "actual",
"ADEL": "adelative",
"ADVZ": "adverbializer",
"AFF": "affirmative",
"AGT": "agent",
"ALL": "allative",
"AN": "action nominal",
"ANC": "action nominal construction",
"ANIM": "animate",
"ANTIP": "antipassive",
"APPL": "applicative",
"AS": "asseverative",
"ASSOC": "associative",
"ASY": "asymmetric",
"ATTR": "attributive",
"AUD": "auditory evidential",
"AUG": "augmented",
"C": "common gender",
"CL": "class (= noun class, gender)",
"CLF": "classifier",
"CMPL": "completive",
"CNTR": "contrary to expectation marker",
"COLL": "collective",
"COM": "comitative",
"COMPR": "comparative",
"CONN": "connective",
"CONNEG": "connegative",
"CONSTR": "construct",
"CONT": "continuative, continous",
"CONTEMP": "contemporative",
"COP": "copula",
"CPW": "categories per word",
"CRS": "currently relevant state",
"DECL": "declarative",
"DEG": "degree word",
"DEP": "dependent marker",
"DES": "desire",
"DESID": "desiderative",
"DIM": "diminutive",
"DIR": "direct",
"DIR.EVD": "direct evidential",
"DIRL": "directional",
"DIST.PST": "distant past",
"DOBJ": "direct object",
"DS": "different subject",
"EMPH": "emphatic",
"EPENTH": "epenthetic",
"EPV": "expletive verbal suffix",
"EVD": "evidential",
"FACT": "fact",
"FAM": "familiar",
"FIN": "finite",
"FIN.AOR": "finite aorist",
"FV": "verb-final vowel",
"HAB": "habitual",
"HEST": "hesternal past",
"HHON": "super honorific",
"HOD": "hodiernal past",
"HON": "honorific",
"HORT": "hortative",
"HUM": "human",
"IE": "Indo-European",
"ILL": "illative",
"IMM.PRET": "immediate preterite",
"IMM.PST": "immediate past",
"IMPERS": "impersonal",
"INAN": "inanimate",
"INCEP": "inceptive",
"INCOMPL": "incompletive",
"IND": "indicative",
"INDIR.EVD": "indirect evidential",
"INFER": "inferential evidential",
"INGR": "ingressive",
"INTENT": "intentional",
"INTER": "interrogative",
"INTF": "intensifier",
"INTGEN": "intended genitive",
"INV": "inverse",
"IO": "indirect object ",
"IRR": "irrealis",
"ITER": "iterative",
"LIG": "ligature",
"LOCUT": "locutor person marker",
"MED": "medial",
"NARR": "narrative",
"NC": "noun class",
"NEC": "necessity",
"NHON": "non-honorific",
"NOMIN": "nominalization",
"NON.F": "non-feminine ",
"NONFIN": "non-finite ",
"NONFIN.AOR": "non-finite aorist",
"NP": "noun phrase",
"NPST": "non-past",
"NSG": "non-singular",
"NUM": "numeral",
"O": "object pronominal marker",
"OBV": "obviative",
"OPT": "optative",
"P": "patient-like argument",
"PAT": "patient",
"PATH": "path locative",
"PCL": "particle",
"PERS": "personal",
"PHR.TERM": "phrase terminal marker",
"PLUPERF": "pluperfect",
"POS": "possibility",
"POSTP": "postposition",
"POT": "potential",
"PP": "prepositional/postpositional phrase",
"PRECONTEMP": "precontemporal",
"PRED": "predicative",
"PREF": "prefix",
"PREP": "preposition",
"PREV": "preverb",
"PROL": "prolative",
"PRON": "pronoun",
"PROP": "proper name",
"PRTV": "partitive",
"PST.CONT": "past continuous",
"PST.PUNCT": "past punctiliar",
"PSTBEFOREYEST": "past before yesterday (= prehesternal)",
"PUNCT": "punctual stem",
"Q": "question-marker",
"QUOT": "quotative",
"RDP": "reduplication",
"REAL": "realis",
"REC": "recent (past)",
"RECP": "reciprocal",
"REM.PST": "remote past",
"REMOTE": "remote",
"REPET": "repetitive",
"RLZ": "realized",
"RNR": "result nominalizer",
"S": "sole argument of the intransitive verb",
"SBJV": "subjunctive",
"SENS": "sensory evidential",
"SPEC": "specific",
"SR": "switch Reference",
"SS": "same subject",
"STAT": "stative",
"SUBORD": "subordination",
"SUFF": "suffix",
"SUP": "superessive",
"SYM": "symmetric",
"SymAsy": "symmetric and asymmetric",
"T/A": "tense/ aspect",
"TD": "time depth/ proximality marker",
"TELIC": "telic",
"TEMPRY": "temporary",
"TH": "thematic suffix",
"THM": "theme (i.e. the semantic role)",
"TOD.PST": "today past",
"TRASL": "traslative",
"TRI": "trial",
"UNSP": "unspecified",
"VBLZ": "verbalizer",
"VENT": "ventive",
"VIS": "visual evidential",
"VP": "verb phrase",
}
for k, v in LGR_ABBRS.items():
ABBRS.setdefault(k, v)
def get_source(id): # pragma: no cover
"""retrieve a source record from wals_refdb
"""
field_map = {
'onlineversion': 'url',
'gbs_id': 'google_book_search_id',
'doi': 'jsondata',
'cited': 'jsondata',
'conference': 'jsondata',
'iso_code': 'jsondata',
'olac_field': 'jsondata',
'wals_code': 'jsondata',
}
res = {'id': id, 'jsondata': {'iso_code': [], 'olac_field': [], 'wals_code': []}}
refdb_id = UNCITED_MAP.get(id.lower())
if not refdb_id:
for row in REFDB.execute("""\
select id, genre from ref_record, ref_recordofdocument
where id = id_r_ref and citekey = '%s'""" % id
):
res['bibtex_type'] = row['genre']
refdb_id = row['id']
break
if not refdb_id:
if id[-1] in ['a', 'b', 'c', 'd']:
refdb_id = UNCITED_MAP.get(id[:-1].lower())
if not refdb_id:
print 'missing ref', id
return {}
res['pk'] = int(refdb_id)
if 'bibtex_type' not in res:
for row in REFDB.execute("select genre from ref_record where id = %s" % refdb_id):
res['bibtex_type'] = row['genre']
break
for row in REFDB.execute(
"select * from ref_recfields where id_r_ref = %s" % refdb_id
):
field = field_map.get(row['id_name'], row['id_name'])
if field == 'jsondata':
if row['id_name'] in ['iso_code', 'olac_field', 'wals_code']:
res['jsondata'][row['id_name']].append(row['id_value'])
else:
res['jsondata'][row['id_name']] = row['id_value']
else:
res[field] = row['id_value']
if res['bibtex_type'] == 'thesis':
if res['format'] == 'phd':
res['bibtex_type'] == 'phdthesis'
del res['format']
elif res['format'] == 'ma':
res['bibtex_type'] == 'mastersthesis'
del res['format']
else:
res['bibtex_type'] == 'misc'
if res['bibtex_type'] == 'online':
res['howpublished'] = 'online'
res['bibtex_type'] = getattr(EntryType, res['bibtex_type'], EntryType.misc)
if 'format' in res:
res['type'] = res['format']
del res['format']
authors = ''
for row in REFDB.execute(
"select * from ref_recauthors where id_r_ref = %s order by ord" % refdb_id
):
if row['type'] == 'etal':
authors += ' et al.'
else:
if authors:
authors += ' and '
authors += row['value']
res['author'] = authors
for row in REFDB.execute(
"select * from ref_recjournal where id_r_ref = %s" % refdb_id
):
res['journal'] = row['name']
break
return res
def parse_igt(html): # pragma: no cover
"""
<table class="IGT">
<caption>
<div class="translation">I want the white one.</div>
</caption>
<tbody>
<tr class="phrase">
<td class="morpheme"><i>Pojne-j-ben </i></td>
<td class="morpheme"><i>lew-din </i></td>
<td class="morpheme"><i>erd'-ije. </i></td>
</tr>
<tr class="gloss">
<td class="morpheme">white-PTCP-NMLZ</td>
<td class="morpheme">eat-INF</td>
<td class="morpheme">want-1SG.INTR</td>
</tr>
</tbody>
</table>
"""
def get_text(e):
if not isinstance(e, list):
e = [e]
return ' '.join(' '.join(ee.stripped_strings) for ee in e)
res = {}
soup = BeautifulSoup(html)
e = soup.find('caption')
if e:
res['description'] = get_text(e)
e = soup.find('tr', attrs={'class': 'phrase'})
if e:
morphemes = e.find_all('td', attrs={'class': 'morpheme'})
res['name'] = get_text(morphemes)
res['analyzed'] = '\t'.join(get_text(m) for m in morphemes)
res['markup_analyzed'] = '\t'.join(
''.join(unicode(c) for c in m.contents) for m in morphemes)
e = soup.find('tr', attrs={'class': 'gloss'})
if e:
morphemes = e.find_all('td', attrs={'class': 'morpheme'})
res['gloss'] = '\t'.join(get_text(m).replace('. ', '.') for m in morphemes)
res['markup_gloss'] = '\t'.join(
''.join(unicode(c) for c in m.contents) for m in morphemes)
assert len(res.get('gloss', '').split('\t')) == len(res.get('analyzed', '').split('\t'))
return res
def teaser(html): # pragma: no cover
res = ''
for s in BeautifulSoup(html).stripped_strings:
res = '%s %s' % (res, s)
if len(res) > 100:
break
return res.strip()
def get_vs2008(args): # pragma: no cover
vs2008 = {}
for row in reader(args.data_file('datapoints_2008.csv'), delimiter=','):
vs2008[(row[0], '%sA' % row[1])] = int(row[2])
return vs2008
E2008 = utc.localize(datetime(2008, 4, 21))
E2011 = utc.localize(datetime(2011, 4, 28))
E2013 = utc.localize(datetime(2013, 11, 15))
data = Data(created=E2008, updated=E2008)
def migrate(from_, to_, converter): # pragma: no cover
for row in DB.execute("select * from %s" % from_):
res = converter(row)
if not res:
continue
if isinstance(res, dict):
DBSession.add(to_(**res))
else:
data.add(to_, res[0], **res[1])
DBSession.flush()
def main(args): # pragma: no cover
glottocodes = {}
for row in GC.execute('select ll.hid, l.id from language as l, languoid as ll where ll.pk = l.pk'):
if row[0] and len(row[0]) == 3:
glottocodes[row[0]] = row[1]
icons = issues.Icons()
old_db = DB
vs2008 = get_vs2008(args)
missing_sources = []
refdb_ids = {}
max_id = 7350
with open('/home/robert/venvs/clld/data/wals-data/missing_source.py', 'w') as fp:
for row in old_db.execute("select * from reference"):
try:
author, year = row['id'].split('-')
except:
author, year = None, None
bibdata = get_source(row['id'])
if not bibdata:
fp.write('"%s",\n' % row['id'])
missing_sources.append(row['id'])
bibdata['pk'] = max_id
max_id += 1
if bibdata['pk'] in refdb_ids:
print 'already seen:', row['id'], 'as', refdb_ids[bibdata['pk']]
data['Source'][row['id']] = data['Source'][refdb_ids[bibdata['pk']]]
continue
refdb_ids[bibdata['pk']] = row['id']
bibdata.update({
'id': row['id'],
'name': row['name'],
'description': bibdata.get('title', bibdata.get('booktitle')),
'google_book_search_id': row['gbs_id'] or None,
})
data.add(common.Source, row['id'], **bibdata)
#
# TODO: add additional bibdata as data items
#
print('sources missing for %s refs' % len(missing_sources))
for id, name in ABBRS.items():
DBSession.add(common.GlossAbbreviation(id=id, name=name))
migrate(
'country',
models.Country,
lambda r: (r['id'], dict(id=r['id'], name=r['name'], continent=r['continent'])))
migrate(
'family',
models.Family,
lambda r: (r['id'], dict(id=r['id'], name=r['name'], description=r['comment'])))
for row, icon in zip(
list(old_db.execute("select * from genus order by family_id")),
cycle(iter(icons))
):
genus = data.add(
models.Genus, row['id'],
id=row['id'], name=row['name'], icon=icon, subfamily=row['subfamily'])
genus.family = data['Family'][row['family_id']]
DBSession.flush()
migrate(
'altname',
common.Identifier,
lambda r: (
(r['name'], r['type']), dict(name=r['name'], type='name', description=r['type'])))
# names for isolanguages are not unique!
enames = {}
for r in DB.execute("select * from isolanguage"):
id_ = 'ethnologue-%s' % r['id']
if r['name'] in enames:
data['Identifier'][id_] = enames[r['name']]
else:
enames[r['name']] = data.add(
common.Identifier, id_,
id=id_,
name=r['name'],
type='name',
description='ethnologue')
DBSession.flush()
migrate(
'isolanguage',
common.Identifier,
lambda r: (
r['id'],
dict(
id=r['id'],
name=r['id'],
type=common.IdentifierType.iso.value,
description=r['name'])))
migrate(
'isolanguage',
common.Identifier,
lambda r: None if r['id'] not in glottocodes else (
'gc-%s' % r['id'],
dict(
id='gc-%s' % r['id'],
name=glottocodes[r['id']],
type=common.IdentifierType.glottolog.value,
description=r['name'])))
migrate(
'language',
models.WalsLanguage,
lambda r: (
r['id'],
dict(
id=r['id'],
name=r['name'],
latitude=r['latitude'],
longitude=r['longitude'],
ascii_name=r['ascii_name'],
genus=data['Genus'][r['genus_id']],
samples_100=r['samples_100'] != 0,
samples_200=r['samples_200'] != 0)))
migrate(
'author',
common.Contributor,
lambda r: (
r['id'],
dict(name=r['name'], url=r['www'], id=r['id'], description=r['note'])))
dataset = common.Dataset(
id='wals',
name='WALS Online',
description='The World Atlas of Language Structures Online',
domain='wals.info',
published=date(2013, 8, 15),
contact='[email protected]',
license='http://creativecommons.org/licenses/by-nc-nd/2.0/de/deed.en',
jsondata={
'license_icon': 'http://wals.info/static/images/cc_by_nc_nd.png',
'license_name': 'Creative Commons Attribution-NonCommercial-NoDerivs 2.0 Germany'})
DBSession.add(dataset)
for i, editor in enumerate(['dryerms', 'haspelmathm']):
common.Editor(dataset=dataset, contributor=data['Contributor'][editor], ord=i + 1)
migrate(
'country_language',
models.CountryLanguage,
lambda r: dict(
language_pk=data['WalsLanguage'][r['language_id']].pk,
country_pk=data['Country'][r['country_id']].pk))
migrate(
'altname_language',
common.LanguageIdentifier,
lambda r: dict(
language=data['WalsLanguage'][r['language_id']],
identifier=data['Identifier'][(r['altname_name'], r['altname_type'])],
description=r['relation']))
migrate(
'isolanguage_language',
common.LanguageIdentifier,
lambda r: dict(
language=data['WalsLanguage'][r['language_id']],
identifier=data['Identifier'][r['isolanguage_id']],
description=r['relation']))
migrate(
'isolanguage_language',
common.LanguageIdentifier,
lambda r: None if 'ethnologue-%s' % r['isolanguage_id'] not in data['Identifier'] else dict(
language=data['WalsLanguage'][r['language_id']],
identifier=data['Identifier']['ethnologue-%s' % r['isolanguage_id']],
description=r['relation']))
migrate(
'isolanguage_language',
common.LanguageIdentifier,
lambda r: None if 'gc-%s' % r['isolanguage_id'] not in data['Identifier'] else dict(
language=data['WalsLanguage'][r['language_id']],
identifier=data['Identifier']['gc-%s' % r['isolanguage_id']],
description=r['relation']))
migrate(
'area',
models.Area,
lambda r: (
r['id'],
dict(name=r['name'], dbpedia_url=r['dbpedia_url'], id=str(r['id']))))
def migrate_chapter(row):
kw = dict(
id=row['id'],
name=row['name'],
wp_slug=row['blog_title'],
sortkey=int(row['id']),
area=data['Area'][row['area_id']])
if int(row['id']) in [143, 144]:
kw['created'] = E2011
kw['updated'] = E2011
return row['id'], kw
migrate('chapter', models.Chapter, migrate_chapter)
def migrate_supplement(row):
if row['name'] not in ['Help', 'Abbreviations']:
sortkey = 990 + int(row['id']) if row['name'] != 'Introduction' else 0
id_ = 's%s' % row['id']
kw = dict(id=id_, name=row['name'], sortkey=sortkey)
return id_, kw
migrate('supplement', models.Chapter, migrate_supplement)
migrate(
'chapter_reference',
common.ContributionReference,
lambda r: dict(
contribution=data['Chapter'][r['chapter_id']],
source=data['Source'][r['reference_id']]))
migrate(
'reference_supplement',
common.ContributionReference,
lambda r: dict(
contribution=data['Chapter']['s%s' % r['supplement_id']],
source=data['Source'][r['reference_id']]))
def migrate_feature(row):
kw = dict(id=row['id'], name=row['name'], ordinal_qualifier=row['id'][-1])
if row['id'].startswith('143') or row['id'].startswith('144'):
kw['created'] = E2011
kw['updated'] = E2011
kw['chapter'] = data['Chapter'][row['chapter_id']]
return row['id'], kw
migrate('feature', models.Feature, migrate_feature)
def migrate_value(row):
desc = row['description']
if desc == 'SOV & NegV/VNeg':
if row['icon_id'] != 's9ff':
desc += ' (a)'
else:
desc += ' (b)'
kw = dict(
id='%s-%s' % (row['feature_id'], row['numeric']),
name=desc,
description=row['long_description'],
jsondata=dict(icon=issues.Icons.id(row['icon_id'])),
number=row['numeric'],
parameter=data['Feature'][row['feature_id']])
return (row['feature_id'], row['numeric']), kw
migrate('value', common.DomainElement, migrate_value)
same = 0
added = 0
for row in old_db.execute("select * from datapoint"):
parameter = data['Feature'][row['feature_id']]
language = data['WalsLanguage'][row['language_id']]
id_ = '%s-%s' % (parameter.id, language.id)
created = E2008
updated = E2008
value_numeric = row['value_numeric']
if (language.id, parameter.id) in vs2008:
if vs2008[(language.id, parameter.id)] != row['value_numeric']:
print '~~~', id_, vs2008[(language.id, parameter.id)], '-->', row['value_numeric']
value_numeric = vs2008[(language.id, parameter.id)]
else:
same += 1
else:
updated = E2011
created = E2011
if parameter.id[-1] == 'A' and not (parameter.id.startswith('143') or parameter.id.startswith('144')):
added += 1
kw = dict(id=id_, updated=updated, created=created)
valueset = data.add(
common.ValueSet, row['id'],
language=language,
parameter=parameter,
contribution=parameter.chapter,
**kw)
data.add(
common.Value, id_,
domainelement=data['DomainElement'][(row['feature_id'], value_numeric)],
valueset=valueset,
**kw)
print same, 'datapoints did not change'
print added, 'datapoints added to existing features'
DBSession.flush()
migrate(
'datapoint_reference',
common.ValueSetReference,
lambda r: dict(
valueset=data['ValueSet'][r['datapoint_id']],
source=data['Source'][r['reference_id']],
description=r['note']))
migrate(
'author_chapter',
common.ContributionContributor,
lambda r: dict(
ord=r['order'],
primary=r['primary'] != 0,
contributor_pk=data['Contributor'][r['author_id']].pk,
contribution_pk=data['Chapter'][r['chapter_id']].pk))
migrate(
'author_supplement',
common.ContributionContributor,
lambda r: dict(
ord=r['order'],
primary=r['primary'] != 0,
contributor_pk=data['Contributor'][r['author_id']].pk,
contribution_pk=data['Chapter']['s%s' % r['supplement_id']].pk))
igts = defaultdict(lambda: [])
for row in old_db.execute("select * from igt"):
d = {'id': 'igt-%s' % row['id']}
d.update(parse_igt(row['xhtml']))
igts[row['example_id']].append(d)
for row in old_db.execute("select * from example"):
if not row['language_id']:
print 'example without language:', row['id']
continue
_igts = igts[row['id']]
if _igts:
for igt in _igts:
data.add(
common.Sentence, igt['id'],
markup_comment=row['xhtml'],
language=data['WalsLanguage'][row['language_id']],
**igt)
else:
name = teaser(row['xhtml'])
if name:
data.add(
common.Sentence, row['id'],
id=str(row['id']),
name=name,
xhtml=row['xhtml'],
language=data['WalsLanguage'][row['language_id']])
missing = {}
for row in old_db.execute("select * from example_feature"):
_igts = igts[row['example_id']]
if _igts:
for igt in _igts:
try:
sentence = data['Sentence'][igt['id']]
except KeyError:
print 'missing sentence:', row['example_id']
continue
try:
value = data['Value']['%s-%s' % (row['feature_id'], sentence.language.id)]
DBSession.add(common.ValueSentence(sentence=sentence, value=value))
except KeyError:
missing[(row['feature_id'], sentence.language.id)] = 1
#print 'missing datapoint:', '%s-%s' % (row['feature_id'], sentence.language.id)
else:
try:
sentence = data['Sentence'][row['example_id']]
except KeyError:
print 'missing sentence:', row['example_id']
continue
try:
value = data['Value']['%s-%s' % (row['feature_id'], sentence.language.id)]
DBSession.add(common.ValueSentence(sentence=sentence, value=value))
except KeyError:
missing[(row['feature_id'], sentence.language.id)] = 1
#print 'missing datapoint:', '%s-%s' % (row['feature_id'], sentence.language.id)
print len(missing), 'missing datapoints for example_feature relations'
def prime_cache(args): # pragma: no cover
"""
we use a versioned session to insert the changes in value assignment
"""
#
# compute the changes from 2008 to 2011:
#
vs2008 = get_vs2008(args)
for row in DB.execute("select * from datapoint"):
key = (row['language_id'], row['feature_id'])
old_value = vs2008.get(key)
new_value = row['value_numeric']
if old_value and old_value != new_value:
valueset = VersionedDBSession.query(common.ValueSet)\
.join(common.Language)\
.join(common.Parameter)\
.filter(common.Parameter.id == row['feature_id'])\
.filter(common.Language.id == row['language_id'])\
.one()
value = valueset.values[0]
assert value.domainelement.number == old_value
for de in valueset.parameter.domain:
if de.number == new_value:
value.domainelement = de
break
assert value.domainelement.number == new_value
valueset.updated = E2011
value.updated = E2011
VersionedDBSession.flush()
for row in reader(args.data_file('corrections_2013.tab'), namedtuples=True, newline='\r'):
valueset = VersionedDBSession.query(common.ValueSet)\
.join(common.Language)\
.join(common.Parameter)\
.filter(common.Parameter.id == row.feature)\
.filter(common.Language.id == row.wals_code)\
.one()
value = valueset.values[0]
if value.domainelement.number == int(row.new):
print '**** old news', valueset.language.id, valueset.parameter.id
continue
if value.domainelement.number != int(row.old):
print '--->', valueset.language.id, valueset.parameter.id, value.domainelement.number
for de in valueset.parameter.domain:
if de.number == int(row.new):
value.domainelement = de
break
assert value.domainelement.number == int(row.new)
valueset.updated = E2013
value.updated = E2013
VersionedDBSession.flush()
print 'corrections 2013 done'
for issue in ['0', '9', '10', '11', '13', '14', '15', '16', '17', '19', '20', '24', '26', '27', '28']:
issue = getattr(issues, 'issue' + issue)
issue(VersionedDBSession, E2013)
VersionedDBSession.flush()
transaction.commit()
transaction.begin()
#
# TODO: these must be recomputed as well, after migrations!
#
# cache number of languages for a parameter:
for parameter, valuesets in groupby(
DBSession.query(common.ValueSet).order_by(common.ValueSet.parameter_pk),
lambda vs: vs.parameter):
parameter.representation = str(len(set(v.language_pk for v in valuesets)))
print 'recomputation of representation done'
transaction.commit()
transaction.begin()
# cache iso codes for languages:
for language in DBSession.query(common.Language).options(joinedload_all(
common.Language.languageidentifier, common.LanguageIdentifier.identifier
)):
iso_codes = []
for identifier in language.identifiers:
if identifier.type == common.IdentifierType.iso.value:
iso_codes.append(identifier.name)
language.iso_codes = ', '.join(sorted(set(iso_codes)))
print 'recomputation of iso codes done'
transaction.commit()
transaction.begin()
compute_language_sources()
transaction.commit()
transaction.begin()
gbs_func('update', args)
if __name__ == '__main__': # pragma: no cover
initializedb(create=main, prime_cache=prime_cache)
| [
"[email protected]"
]
| |
4b803dc11bc61c8e8bfaa692a0a6f248f40f8b06 | bf885e4a58ac5ab888890e297eafcfca907d7845 | /hello_world_project/hello_world_project/urls.py | d3bf32b3ef4875f4d09711b297f85325df8055ae | []
| no_license | manishbalyan/django-hello_world_app | c54e4875a9bb3dac7e58224f11e1cf6d60b70463 | bc53fa0a8d3e57bc085bc113c0d5640521c45e44 | refs/heads/master | 2021-01-23T16:28:18.954683 | 2019-02-13T05:55:24 | 2019-02-13T05:55:24 | 38,373,453 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 418 | py | from django.conf.urls import patterns, include, url
from django.contrib import admin
urlpatterns = patterns('',
# Examples:
# url(r'^$', 'hello_world_project.views.home', name='home'),
# url(r'^blog/', include('blog.urls')),
# this link the project urls to app urls
url(r'^admin/', include(admin.site.urls)), url(r'^', include('hello_world.urls')), url(r'^about/', include('hello_world.urls'))
)
| [
"[email protected]"
]
| |
6f319e57426860fd21a49fcc0ff9ad5f63b64e02 | ed9d718007b5bc776f3405ad6bac3a64abdebf0b | /google/cloud/logging_v2/services/metrics_service_v2/transports/grpc.py | 6a7a2c6a61a74aea88c49021160e9d906ee686b5 | [
"Apache-2.0"
]
| permissive | renovate-bot/python-logging | 06b020e1aaae238b2693264bbad489567902481b | 28d141d0e8ed4560d2e33f8de0d43b0825a7f33f | refs/heads/master | 2023-08-31T09:01:49.829649 | 2021-08-30T20:37:20 | 2021-08-30T20:37:20 | 238,564,076 | 0 | 0 | Apache-2.0 | 2020-02-05T22:49:14 | 2020-02-05T22:49:13 | null | UTF-8 | Python | false | false | 15,816 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import warnings
from typing import Callable, Dict, Optional, Sequence, Tuple, Union
from google.api_core import grpc_helpers # type: ignore
from google.api_core import gapic_v1 # type: ignore
import google.auth # type: ignore
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
import grpc # type: ignore
from google.cloud.logging_v2.types import logging_metrics
from google.protobuf import empty_pb2 # type: ignore
from .base import MetricsServiceV2Transport, DEFAULT_CLIENT_INFO
class MetricsServiceV2GrpcTransport(MetricsServiceV2Transport):
"""gRPC backend transport for MetricsServiceV2.
Service for configuring logs-based metrics.
This class defines the same methods as the primary client, so the
primary client can load the underlying transport implementation
and call it.
It sends protocol buffers over the wire using gRPC (which is built on
top of HTTP/2); the ``grpcio`` package must be installed.
"""
_stubs: Dict[str, Callable]
def __init__(
self,
*,
host: str = "logging.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Sequence[str] = None,
channel: grpc.Channel = None,
api_mtls_endpoint: str = None,
client_cert_source: Callable[[], Tuple[bytes, bytes]] = None,
ssl_channel_credentials: grpc.ChannelCredentials = None,
client_cert_source_for_mtls: Callable[[], Tuple[bytes, bytes]] = None,
quota_project_id: Optional[str] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
always_use_jwt_access: Optional[bool] = False,
) -> None:
"""Instantiate the transport.
Args:
host (Optional[str]):
The hostname to connect to.
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
This argument is ignored if ``channel`` is provided.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is ignored if ``channel`` is provided.
scopes (Optional(Sequence[str])): A list of scopes. This argument is
ignored if ``channel`` is provided.
channel (Optional[grpc.Channel]): A ``Channel`` instance through
which to make calls.
api_mtls_endpoint (Optional[str]): Deprecated. The mutual TLS endpoint.
If provided, it overrides the ``host`` argument and tries to create
a mutual TLS channel with client SSL credentials from
``client_cert_source`` or applicatin default SSL credentials.
client_cert_source (Optional[Callable[[], Tuple[bytes, bytes]]]):
Deprecated. A callback to provide client SSL certificate bytes and
private key bytes, both in PEM format. It is ignored if
``api_mtls_endpoint`` is None.
ssl_channel_credentials (grpc.ChannelCredentials): SSL credentials
for grpc channel. It is ignored if ``channel`` is provided.
client_cert_source_for_mtls (Optional[Callable[[], Tuple[bytes, bytes]]]):
A callback to provide client certificate bytes and private key bytes,
both in PEM format. It is used to configure mutual TLS channel. It is
ignored if ``channel`` or ``ssl_channel_credentials`` is provided.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
always_use_jwt_access (Optional[bool]): Whether self signed JWT should
be used for service account credentials.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
self._grpc_channel = None
self._ssl_channel_credentials = ssl_channel_credentials
self._stubs: Dict[str, Callable] = {}
if api_mtls_endpoint:
warnings.warn("api_mtls_endpoint is deprecated", DeprecationWarning)
if client_cert_source:
warnings.warn("client_cert_source is deprecated", DeprecationWarning)
if channel:
# Ignore credentials if a channel was passed.
credentials = False
# If a channel was explicitly provided, set it.
self._grpc_channel = channel
self._ssl_channel_credentials = None
else:
if api_mtls_endpoint:
host = api_mtls_endpoint
# Create SSL credentials with client_cert_source or application
# default SSL credentials.
if client_cert_source:
cert, key = client_cert_source()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
else:
self._ssl_channel_credentials = SslCredentials().ssl_credentials
else:
if client_cert_source_for_mtls and not ssl_channel_credentials:
cert, key = client_cert_source_for_mtls()
self._ssl_channel_credentials = grpc.ssl_channel_credentials(
certificate_chain=cert, private_key=key
)
# The base transport sets the host, credentials and scopes
super().__init__(
host=host,
credentials=credentials,
credentials_file=credentials_file,
scopes=scopes,
quota_project_id=quota_project_id,
client_info=client_info,
always_use_jwt_access=always_use_jwt_access,
)
if not self._grpc_channel:
self._grpc_channel = type(self).create_channel(
self._host,
credentials=self._credentials,
credentials_file=credentials_file,
scopes=self._scopes,
ssl_credentials=self._ssl_channel_credentials,
quota_project_id=quota_project_id,
options=[
("grpc.max_send_message_length", -1),
("grpc.max_receive_message_length", -1),
],
)
# Wrap messages. This must be done after self._grpc_channel exists
self._prep_wrapped_messages(client_info)
@classmethod
def create_channel(
cls,
host: str = "logging.googleapis.com",
credentials: ga_credentials.Credentials = None,
credentials_file: str = None,
scopes: Optional[Sequence[str]] = None,
quota_project_id: Optional[str] = None,
**kwargs,
) -> grpc.Channel:
"""Create and return a gRPC channel object.
Args:
host (Optional[str]): The host for the channel to use.
credentials (Optional[~.Credentials]): The
authorization credentials to attach to requests. These
credentials identify this application to the service. If
none are specified, the client will attempt to ascertain
the credentials from the environment.
credentials_file (Optional[str]): A file with credentials that can
be loaded with :func:`google.auth.load_credentials_from_file`.
This argument is mutually exclusive with credentials.
scopes (Optional[Sequence[str]]): A optional list of scopes needed for this
service. These are only used when credentials are not specified and
are passed to :func:`google.auth.default`.
quota_project_id (Optional[str]): An optional project to use for billing
and quota.
kwargs (Optional[dict]): Keyword arguments, which are passed to the
channel creation.
Returns:
grpc.Channel: A gRPC channel object.
Raises:
google.api_core.exceptions.DuplicateCredentialArgs: If both ``credentials``
and ``credentials_file`` are passed.
"""
return grpc_helpers.create_channel(
host,
credentials=credentials,
credentials_file=credentials_file,
quota_project_id=quota_project_id,
default_scopes=cls.AUTH_SCOPES,
scopes=scopes,
default_host=cls.DEFAULT_HOST,
**kwargs,
)
@property
def grpc_channel(self) -> grpc.Channel:
"""Return the channel designed to connect to this service.
"""
return self._grpc_channel
@property
def list_log_metrics(
self,
) -> Callable[
[logging_metrics.ListLogMetricsRequest], logging_metrics.ListLogMetricsResponse
]:
r"""Return a callable for the list log metrics method over gRPC.
Lists logs-based metrics.
Returns:
Callable[[~.ListLogMetricsRequest],
~.ListLogMetricsResponse]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "list_log_metrics" not in self._stubs:
self._stubs["list_log_metrics"] = self.grpc_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/ListLogMetrics",
request_serializer=logging_metrics.ListLogMetricsRequest.serialize,
response_deserializer=logging_metrics.ListLogMetricsResponse.deserialize,
)
return self._stubs["list_log_metrics"]
@property
def get_log_metric(
self,
) -> Callable[[logging_metrics.GetLogMetricRequest], logging_metrics.LogMetric]:
r"""Return a callable for the get log metric method over gRPC.
Gets a logs-based metric.
Returns:
Callable[[~.GetLogMetricRequest],
~.LogMetric]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "get_log_metric" not in self._stubs:
self._stubs["get_log_metric"] = self.grpc_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/GetLogMetric",
request_serializer=logging_metrics.GetLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
)
return self._stubs["get_log_metric"]
@property
def create_log_metric(
self,
) -> Callable[[logging_metrics.CreateLogMetricRequest], logging_metrics.LogMetric]:
r"""Return a callable for the create log metric method over gRPC.
Creates a logs-based metric.
Returns:
Callable[[~.CreateLogMetricRequest],
~.LogMetric]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "create_log_metric" not in self._stubs:
self._stubs["create_log_metric"] = self.grpc_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/CreateLogMetric",
request_serializer=logging_metrics.CreateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
)
return self._stubs["create_log_metric"]
@property
def update_log_metric(
self,
) -> Callable[[logging_metrics.UpdateLogMetricRequest], logging_metrics.LogMetric]:
r"""Return a callable for the update log metric method over gRPC.
Creates or updates a logs-based metric.
Returns:
Callable[[~.UpdateLogMetricRequest],
~.LogMetric]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "update_log_metric" not in self._stubs:
self._stubs["update_log_metric"] = self.grpc_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/UpdateLogMetric",
request_serializer=logging_metrics.UpdateLogMetricRequest.serialize,
response_deserializer=logging_metrics.LogMetric.deserialize,
)
return self._stubs["update_log_metric"]
@property
def delete_log_metric(
self,
) -> Callable[[logging_metrics.DeleteLogMetricRequest], empty_pb2.Empty]:
r"""Return a callable for the delete log metric method over gRPC.
Deletes a logs-based metric.
Returns:
Callable[[~.DeleteLogMetricRequest],
~.Empty]:
A function that, when called, will call the underlying RPC
on the server.
"""
# Generate a "stub function" on-the-fly which will actually make
# the request.
# gRPC handles serialization and deserialization, so we just need
# to pass in the functions for each.
if "delete_log_metric" not in self._stubs:
self._stubs["delete_log_metric"] = self.grpc_channel.unary_unary(
"/google.logging.v2.MetricsServiceV2/DeleteLogMetric",
request_serializer=logging_metrics.DeleteLogMetricRequest.serialize,
response_deserializer=empty_pb2.Empty.FromString,
)
return self._stubs["delete_log_metric"]
__all__ = ("MetricsServiceV2GrpcTransport",)
| [
"[email protected]"
]
| |
71b4c3192c59446446642f2dc38ac6eac594e87f | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_138/1273.py | 7807dd8fe021579a8ca3aa6fa4f8c90eff1cc487 | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 800 | py | t = int(raw_input())
for i in range(1,t+1):
n = int(raw_input())
line = raw_input().split()
naomi = []
for j in range(0,n):
naomi.append(float(line[j]))
line = raw_input().split()
ken = []
for j in range(0,n):
ken.append(float(line[j]))
naomi = sorted(naomi)
ken = sorted(ken)
ind_ken = 0
ind_naomi = 0
end = False
while ind_ken != n:
while ken[ind_ken] < naomi[ind_naomi]:
ind_ken += 1
if ind_ken == n:
end = True
break
if end:
break
ind_naomi += 1
ind_ken += 1
w = len(naomi) - ind_naomi
dw = 0
while len(ken) > 0:
if ken[len(ken) - 1] < naomi[len(naomi) - 1]:
dw += 1
ken.pop()
naomi.pop()
else:
ken.pop()
naomi.pop(0)
str = "Case #%d: %d %d" % (i, dw, w)
print str | [
"[email protected]"
]
| |
39abb1c58a1ae46d15c937d463dbc72c51ee8659 | b641319ea5164c1eb5db77c819abdd1f8136fce3 | /random_stream.py | 26e2a2c280f4736e7a6b65c58e3d223854009094 | []
| no_license | Anwesh43/theano-starter | 8d4b2a9e3023f10018f9005ef9a9e4583270fee0 | 87f2d987ce02a883889eac6543b82530d1b90989 | refs/heads/master | 2021-01-12T02:48:45.879958 | 2017-01-16T15:35:22 | 2017-01-16T15:35:22 | 78,109,827 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 693 | py | import theano.tensor as T
from theano import *
from theano.tensor.shared_randomstreams import RandomStreams
srng = RandomStreams(seed=1000)
r_uv = srng.uniform((2,2))
r_nd = srng.normal((2,2))
rng_val = r_uv.rng.get_value(borrow=True)
rng_val.seed(345)
r_uv.rng.set_value(rng_val,borrow=True)
r_uniform = function([],r_uv)
r_normal = function([],r_nd,no_default_updates=True)
print r_uniform()
print r_normal()
print r_normal()
rnd_val = r_uv.rng.get_value(borrow=True)
state = rnd_val.get_state()
v1 = r_uniform()
v2 = r_uniform()
rnd_val = r_uv.rng.get_value(borrow=True)
rnd_val.set_state(state)
r_nd.rng.set_value(rnd_val)
v3 = r_uniform()
print v1
print v2
print v3
print v1 == v3
| [
"[email protected]"
]
| |
1ea4f37d648dbba8cdb93a2e9036c0c97129ecf0 | 8c06beebdb5ee28f7292574fefd540f8c43a7acf | /Arctype_Dashboard/asgi.py | f843ce0156227c94479067214b7caa5e4e018782 | []
| no_license | progettazionemauro/ARCTYPE_DJANGO_DASHBOARD | 0c3baf93c6a3f8dd28d9459a21a273efbed1f4e3 | 60d1dab19c32b7a80d70de85e846fd6760be9a26 | refs/heads/master | 2023-04-12T01:37:57.317231 | 2021-05-03T01:48:41 | 2021-05-03T01:48:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | """
ASGI config for Arctype_Dashboard project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Arctype_Dashboard.settings')
application = get_asgi_application()
| [
"[email protected]"
]
| |
bb352a077de0a96d708f7bd908b1d4f2e9c8b720 | aa76391d5789b5082702d3f76d2b6e13488d30be | /programmers/Lev1/print_triangle.py | aeb50f3fd8f6008928c6bee577e7267406cb4451 | []
| no_license | B2SIC/python_playground | 118957fe4ca3dc9395bc78b56825b9a014ef95cb | 14cbc32affbeec57abbd8e8c4ff510aaa986874e | refs/heads/master | 2023-02-28T21:27:34.148351 | 2021-02-12T10:20:49 | 2021-02-12T10:20:49 | 104,154,645 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 132 | py | def printTriangle(num):
s = ''
for i in range(1, num + 1):
s += "*" * i + "\n"
return s
print(printTriangle(5)) | [
"[email protected]"
]
| |
baa7678b52fae8e25d141a1b921f8006e86a6d26 | 66adad393a638d3a4cc47ed3d8b45b208f155ebe | /bookmarks1/account/views.py | bdac6a72fc611c4ef5ecf8d9c87d1849eaffa17e | []
| no_license | Dyavathrocky/image_sharing | a5e265c65fde29c1f665c522230bd73dfbf16c23 | 0939240f9a96dd8c80de813939d79455e95782c7 | refs/heads/main | 2023-01-21T15:23:10.141362 | 2020-11-29T13:53:26 | 2020-11-29T13:53:26 | 316,220,540 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,172 | py | from django.http import HttpResponse
from django.shortcuts import render
from django.contrib.auth import authenticate, login
from django.contrib.auth.decorators import login_required
from .forms import LoginForm, UserRegistrationForm, \
UserEditForm, ProfileEditForm
from .models import Profile
from django.contrib import messages
# Create your views here.
@login_required
def dashboard(request):
return render(request,
'account/dashboard.html', {'section': 'dashboard'})
def user_login(request):
if request.method == 'POST':
form = LoginForm(request.POST)
if form.is_valid():
cd = form.cleaned_data
user = authenticate(request,
username=cd['username'],
password=cd['password'])
if user is not None:
if user.is_active:
login(request, user)
return HttpResponse('Authenticated '
'successfully')
else:
return HttpResponse('Disabled account')
else:
return HttpResponse('Invalid login')
else:
form = LoginForm()
return render(request, 'account/login.html', {'form': form})
def register(request):
if request.method == 'POST':
user_form = UserRegistrationForm(request.POST)
if user_form.is_valid():
# Create a new user object but avoid saving it yet
new_user = user_form.save(commit=False)
# Set the chosen password
new_user.set_password(
user_form.cleaned_data['password'])
# Save the User object
new_user.save()
# Create the user profile
Profile.objects.create(user=new_user)
return render(request,
'account/register_done.html',
{'new_user': new_user})
else:
user_form = UserRegistrationForm()
return render(request,
'account/register.html',
{'user_form': user_form})
@login_required
def edit(request):
if request.method == 'POST':
user_form = UserEditForm(instance=request.user,
data=request.POST)
profile_form = ProfileEditForm(
instance=request.user.profile,
data=request.POST,
files=request.FILES)
if user_form.is_valid() and profile_form.is_valid():
user_form.save()
profile_form.save()
messages.success(request, 'Profile updated successfully')
else:
messages.error(request, 'Error updating your profile')
else:
user_form = UserEditForm(instance=request.user)
profile_form = ProfileEditForm(
instance=request.user.profile)
return render(request,
'account/edit.html',
{'user_form': user_form,
'profile_form': profile_form}) | [
"[email protected]"
]
| |
e7e0deac411c991076dc18e374867a07a253d989 | d89a482aaf3001bbc4515f39af9ba474e1ae6062 | /sip/sip_history.py | 7bdbef694f14c90a11c7df182424967f95a137dc | []
| no_license | hongtao510/u_tool | 2925e3694aba81714cf83018c3f8520a7b503228 | 98c962cfb1f53c4971fb2b9ae22c882c0fae6497 | refs/heads/master | 2021-01-10T20:40:24.793531 | 2014-03-14T22:57:37 | 2014-03-14T22:57:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,527 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Jan 03 13:30:41 2012
@author: jharston
"""
import webapp2 as webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext.webapp import template
import os
from uber import uber_lib
import history_tables
import rest_funcs
class SIPHistoryPage(webapp.RequestHandler):
def get(self):
templatepath = os.path.dirname(__file__) + '/../templates/'
ChkCookie = self.request.cookies.get("ubercookie")
html = uber_lib.SkinChk(ChkCookie, "SIP User History")
html = html + template.render(templatepath + '02uberintroblock_wmodellinks.html', {'model':'sip','page':'history'})
html = html + template.render(templatepath + '03ubertext_links_left.html', {})
html = html + template.render(templatepath + '04uberalgorithm_start.html', {
'model':'sip',
'model_attributes':'SIP User History'})
html = html + template.render (templatepath + 'history_pagination.html', {})
hist_obj = rest_funcs.user_hist('admin', 'sip')
html = html + history_tables.table_all(hist_obj)
html = html + template.render(templatepath + '04ubertext_end.html', {})
html = html + template.render(templatepath + '06uberfooter.html', {'links': ''})
self.response.out.write(html)
app = webapp.WSGIApplication([('/.*', SIPHistoryPage)], debug=True)
def main():
run_wsgi_app(app)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
d57dc6bd6e6ed40663cea90c3cb805e43497b4f9 | e0980f704a573894350e285f66f4cf390837238e | /.history/news/models_20201124144813.py | df865398f4c40cdf05ca57629f9dae3cd204713b | []
| no_license | rucpata/WagtailWebsite | 28008474ec779d12ef43bceb61827168274a8b61 | 5aa44f51592f49c9a708fc5515ad877c6a29dfd9 | refs/heads/main | 2023-02-09T15:30:02.133415 | 2021-01-05T14:55:45 | 2021-01-05T14:55:45 | 303,961,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,391 | py | from django.db import models
from wagtail.contrib.forms.models import AbstractEmailForm
# Create your models here.
class FormField(AbstractEmailForm):
page = ParentalKey(
'NewsPage',
on_delete=models.CASCADE,
related_name
)
class NewsPage(AbstractEmailForm):
tempalte ='news/news_page.html'
leanding_page_template = 'news/news_page_leading.html'
subpage_types = []
max_coun = 1
intro = RichTextField(blank=True, features=['bold', 'italic', 'ol', 'ul'])
thank_you_text = RichTextField(
blank=True,
features=['bold', 'italic', 'ol', 'ul'])
map_image = models.ForeignKey(
'wagtailimages.Image',
null=True,
blank=False,
on_delete=models.SET_NULL,
help_text='Obrazek będzie przycięty do rozmairu 588px na 355 px',
related_name='+',
)
map_url = models.URLField(
blank=True,
help_text='Opcjonalne. Jeśli podasz tutaj łączę, obraz stanie się łączem.'
)
content_panels = AbstractEmailForm.content_panel + [
FieldPanel('intro'),
ImageChooserPanel('map_iamge'),
FieldPanel('map_url'),
InlinePanel('form_fields', label="Form Fields"),
FieldPanel('thank_you_text'),
FieldPanel('from_address'),
FieldPanel('to_address'),
FieldPanel('subject'),
]
| [
"[email protected]"
]
| |
60e68556375c7be92b4a838b420b1e603730aca7 | 6f6997efe1a15d57688c12ff0197790fb2eac6bc | /histogram/wigets/waferdata_histogram.py | aa532f8c85f67b1d487d0d292c97a7194ae277d9 | []
| no_license | montanaviking/waferprobe | 29fa5f0eb07e60820162916e48059f63374902c5 | fb2786b376153f9b6e9495b6faf3ee5960f90a06 | refs/heads/master | 2022-11-06T10:57:01.539733 | 2020-06-19T23:47:59 | 2020-06-19T23:47:59 | 273,601,408 | 4 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65,386 | py | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'waferdata_histogram.ui'
#
# Created by: PyQt5 UI code generator 5.11.2
#
# WARNING! All changes made in this file will be lost!
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_Histogram(object):
def setupUi(self, Histogram):
Histogram.setObjectName("Histogram")
Histogram.resize(763, 624)
Histogram.setFocusPolicy(QtCore.Qt.TabFocus)
self.verticalLayout_5 = QtWidgets.QVBoxLayout(Histogram)
self.verticalLayout_5.setObjectName("verticalLayout_5")
self.horizontalLayout_9 = QtWidgets.QHBoxLayout()
self.horizontalLayout_9.setObjectName("horizontalLayout_9")
self.verticalLayout_2 = QtWidgets.QVBoxLayout()
self.verticalLayout_2.setObjectName("verticalLayout_2")
self.horizontalLayout_13 = QtWidgets.QHBoxLayout()
self.horizontalLayout_13.setObjectName("horizontalLayout_13")
self.wafernamelabel = QtWidgets.QLabel(Histogram)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.wafernamelabel.sizePolicy().hasHeightForWidth())
self.wafernamelabel.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(8)
self.wafernamelabel.setFont(font)
self.wafernamelabel.setObjectName("wafernamelabel")
self.horizontalLayout_13.addWidget(self.wafernamelabel)
self.wafername = QtWidgets.QLineEdit(Histogram)
self.wafername.setEnabled(True)
font = QtGui.QFont()
font.setPointSize(8)
self.wafername.setFont(font)
self.wafername.setAcceptDrops(False)
self.wafername.setReadOnly(True)
self.wafername.setObjectName("wafername")
self.horizontalLayout_13.addWidget(self.wafername)
self.verticalLayout_2.addLayout(self.horizontalLayout_13)
self.horizontalLayout_12 = QtWidgets.QHBoxLayout()
self.horizontalLayout_12.setObjectName("horizontalLayout_12")
self.Vgs_label = QtWidgets.QLabel(Histogram)
self.Vgs_label.setEnabled(True)
font = QtGui.QFont()
font.setPointSize(8)
self.Vgs_label.setFont(font)
self.Vgs_label.setObjectName("Vgs_label")
self.horizontalLayout_12.addWidget(self.Vgs_label)
self.Vgs_comboBox = QtWidgets.QComboBox(Histogram)
self.Vgs_comboBox.setEnabled(True)
font = QtGui.QFont()
font.setPointSize(8)
self.Vgs_comboBox.setFont(font)
self.Vgs_comboBox.setObjectName("Vgs_comboBox")
self.horizontalLayout_12.addWidget(self.Vgs_comboBox)
self.Vds_FOC_label = QtWidgets.QLabel(Histogram)
self.Vds_FOC_label.setEnabled(True)
font = QtGui.QFont()
font.setPointSize(8)
self.Vds_FOC_label.setFont(font)
self.Vds_FOC_label.setObjectName("Vds_FOC_label")
self.horizontalLayout_12.addWidget(self.Vds_FOC_label)
self.Vds_FOC = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.Vds_FOC.setFont(font)
self.Vds_FOC.setInputMethodHints(QtCore.Qt.ImhFormattedNumbersOnly)
self.Vds_FOC.setReadOnly(False)
self.Vds_FOC.setObjectName("Vds_FOC")
self.horizontalLayout_12.addWidget(self.Vds_FOC)
spacerItem = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_12.addItem(spacerItem)
self.verticalLayout_2.addLayout(self.horizontalLayout_12)
self.horizontalLayout_8 = QtWidgets.QHBoxLayout()
self.horizontalLayout_8.setObjectName("horizontalLayout_8")
self.horizontalLayout_15 = QtWidgets.QHBoxLayout()
self.horizontalLayout_15.setObjectName("horizontalLayout_15")
self.horizontalLayout_8.addLayout(self.horizontalLayout_15)
self.Yf_checkBox = QtWidgets.QCheckBox(Histogram)
self.Yf_checkBox.setObjectName("Yf_checkBox")
self.horizontalLayout_8.addWidget(self.Yf_checkBox)
self.deltaVgs_thres_label = QtWidgets.QLabel(Histogram)
self.deltaVgs_thres_label.setEnabled(False)
font = QtGui.QFont()
font.setPointSize(8)
self.deltaVgs_thres_label.setFont(font)
self.deltaVgs_thres_label.setObjectName("deltaVgs_thres_label")
self.horizontalLayout_8.addWidget(self.deltaVgs_thres_label)
self.delta_Vgs_thres = QtWidgets.QLineEdit(Histogram)
self.delta_Vgs_thres.setEnabled(False)
font = QtGui.QFont()
font.setPointSize(8)
self.delta_Vgs_thres.setFont(font)
self.delta_Vgs_thres.setToolTip("")
self.delta_Vgs_thres.setInputMethodHints(QtCore.Qt.ImhFormattedNumbersOnly)
self.delta_Vgs_thres.setReadOnly(False)
self.delta_Vgs_thres.setObjectName("delta_Vgs_thres")
self.horizontalLayout_8.addWidget(self.delta_Vgs_thres)
self.Yf_Vgsfitrange_label = QtWidgets.QLabel(Histogram)
self.Yf_Vgsfitrange_label.setEnabled(False)
font = QtGui.QFont()
font.setPointSize(8)
self.Yf_Vgsfitrange_label.setFont(font)
self.Yf_Vgsfitrange_label.setObjectName("Yf_Vgsfitrange_label")
self.horizontalLayout_8.addWidget(self.Yf_Vgsfitrange_label)
self.Yf_Vgsfitrange_frac = QtWidgets.QLineEdit(Histogram)
self.Yf_Vgsfitrange_frac.setEnabled(False)
font = QtGui.QFont()
font.setPointSize(8)
self.Yf_Vgsfitrange_frac.setFont(font)
self.Yf_Vgsfitrange_frac.setToolTip("")
self.Yf_Vgsfitrange_frac.setInputMethodHints(QtCore.Qt.ImhFormattedNumbersOnly)
self.Yf_Vgsfitrange_frac.setReadOnly(False)
self.Yf_Vgsfitrange_frac.setObjectName("Yf_Vgsfitrange_frac")
self.horizontalLayout_8.addWidget(self.Yf_Vgsfitrange_frac)
spacerItem1 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.horizontalLayout_8.addItem(spacerItem1)
self.verticalLayout_2.addLayout(self.horizontalLayout_8)
self.horizontalLayout_11 = QtWidgets.QHBoxLayout()
self.horizontalLayout_11.setObjectName("horizontalLayout_11")
self.parameterlabel = QtWidgets.QLabel(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.parameterlabel.setFont(font)
self.parameterlabel.setObjectName("parameterlabel")
self.horizontalLayout_11.addWidget(self.parameterlabel)
self.measurementtype = QtWidgets.QComboBox(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.measurementtype.setFont(font)
self.measurementtype.setEditable(False)
self.measurementtype.setObjectName("measurementtype")
self.horizontalLayout_11.addWidget(self.measurementtype)
self.verticalLayout_2.addLayout(self.horizontalLayout_11)
spacerItem2 = QtWidgets.QSpacerItem(20, 40, QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Expanding)
self.verticalLayout_2.addItem(spacerItem2)
self.verticalLayout_4 = QtWidgets.QVBoxLayout()
self.verticalLayout_4.setObjectName("verticalLayout_4")
self.set_includes_label = QtWidgets.QLabel(Histogram)
self.set_includes_label.setEnabled(True)
font = QtGui.QFont()
font.setPointSize(8)
self.set_includes_label.setFont(font)
self.set_includes_label.setAlignment(QtCore.Qt.AlignCenter)
self.set_includes_label.setObjectName("set_includes_label")
self.verticalLayout_4.addWidget(self.set_includes_label)
self.set_includes = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.set_includes.setFont(font)
self.set_includes.setFocusPolicy(QtCore.Qt.ClickFocus)
self.set_includes.setAcceptDrops(False)
self.set_includes.setWhatsThis("")
self.set_includes.setReadOnly(False)
self.set_includes.setObjectName("set_includes")
self.verticalLayout_4.addWidget(self.set_includes)
self.verticalLayout_2.addLayout(self.verticalLayout_4)
self.horizontalLayout_9.addLayout(self.verticalLayout_2)
self.verticalLayout = QtWidgets.QVBoxLayout()
self.verticalLayout.setObjectName("verticalLayout")
self.horizontalLayout_3 = QtWidgets.QHBoxLayout()
self.horizontalLayout_3.setObjectName("horizontalLayout_3")
self.averagelabel = QtWidgets.QLabel(Histogram)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Minimum)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.averagelabel.sizePolicy().hasHeightForWidth())
self.averagelabel.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(8)
self.averagelabel.setFont(font)
self.averagelabel.setObjectName("averagelabel")
self.horizontalLayout_3.addWidget(self.averagelabel)
self.average = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.average.setFont(font)
self.average.setReadOnly(True)
self.average.setObjectName("average")
self.horizontalLayout_3.addWidget(self.average)
self.standarddeviation = QtWidgets.QLabel(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.standarddeviation.setFont(font)
self.standarddeviation.setObjectName("standarddeviation")
self.horizontalLayout_3.addWidget(self.standarddeviation)
self.standard_deviation = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.standard_deviation.setFont(font)
self.standard_deviation.setReadOnly(True)
self.standard_deviation.setObjectName("standard_deviation")
self.horizontalLayout_3.addWidget(self.standard_deviation)
self.verticalLayout.addLayout(self.horizontalLayout_3)
self.horizontalLayout_4 = QtWidgets.QHBoxLayout()
self.horizontalLayout_4.setObjectName("horizontalLayout_4")
self.log_linear_histogram_but = QtWidgets.QPushButton(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.log_linear_histogram_but.setFont(font)
self.log_linear_histogram_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.log_linear_histogram_but.setStyleSheet("background-color: hsv(100, 200, 255);\n"
"color: rgb(0, 0,0);")
self.log_linear_histogram_but.setCheckable(True)
self.log_linear_histogram_but.setChecked(False)
self.log_linear_histogram_but.setAutoDefault(False)
self.log_linear_histogram_but.setObjectName("log_linear_histogram_but")
self.horizontalLayout_4.addWidget(self.log_linear_histogram_but)
self.label_numberofdevices = QtWidgets.QLabel(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.label_numberofdevices.setFont(font)
self.label_numberofdevices.setObjectName("label_numberofdevices")
self.horizontalLayout_4.addWidget(self.label_numberofdevices)
self.numberofdevices = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.numberofdevices.setFont(font)
self.numberofdevices.setReadOnly(True)
self.numberofdevices.setObjectName("numberofdevices")
self.horizontalLayout_4.addWidget(self.numberofdevices)
self.verticalLayout.addLayout(self.horizontalLayout_4)
self.horizontalLayout_5 = QtWidgets.QHBoxLayout()
self.horizontalLayout_5.setObjectName("horizontalLayout_5")
self.selectmintype = QtWidgets.QComboBox(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.selectmintype.setFont(font)
self.selectmintype.setObjectName("selectmintype")
self.selectmintype.addItem("")
self.selectmintype.addItem("")
self.horizontalLayout_5.addWidget(self.selectmintype)
self.minimum = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.minimum.setFont(font)
self.minimum.setObjectName("minimum")
self.horizontalLayout_5.addWidget(self.minimum)
self.verticalLayout.addLayout(self.horizontalLayout_5)
self.horizontalLayout_6 = QtWidgets.QHBoxLayout()
self.horizontalLayout_6.setObjectName("horizontalLayout_6")
self.selectmaxtype = QtWidgets.QComboBox(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.selectmaxtype.setFont(font)
self.selectmaxtype.setObjectName("selectmaxtype")
self.selectmaxtype.addItem("")
self.selectmaxtype.addItem("")
self.horizontalLayout_6.addWidget(self.selectmaxtype)
self.maximum = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.maximum.setFont(font)
self.maximum.setObjectName("maximum")
self.horizontalLayout_6.addWidget(self.maximum)
self.verticalLayout.addLayout(self.horizontalLayout_6)
self.horizontalLayout_7 = QtWidgets.QHBoxLayout()
self.horizontalLayout_7.setObjectName("horizontalLayout_7")
self.range_lin_fit_label = QtWidgets.QLabel(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.range_lin_fit_label.setFont(font)
self.range_lin_fit_label.setObjectName("range_lin_fit_label")
self.horizontalLayout_7.addWidget(self.range_lin_fit_label)
self.range_linearfit = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.range_linearfit.setFont(font)
self.range_linearfit.setFocusPolicy(QtCore.Qt.StrongFocus)
self.range_linearfit.setAcceptDrops(True)
self.range_linearfit.setObjectName("range_linearfit")
self.horizontalLayout_7.addWidget(self.range_linearfit)
self.transfer_curve_smoothing_factor_label = QtWidgets.QLabel(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.transfer_curve_smoothing_factor_label.setFont(font)
self.transfer_curve_smoothing_factor_label.setObjectName("transfer_curve_smoothing_factor_label")
self.horizontalLayout_7.addWidget(self.transfer_curve_smoothing_factor_label)
self.transfer_curve_smoothing_factor = QtWidgets.QLineEdit(Histogram)
self.transfer_curve_smoothing_factor.setEnabled(False)
font = QtGui.QFont()
font.setPointSize(8)
self.transfer_curve_smoothing_factor.setFont(font)
self.transfer_curve_smoothing_factor.setFocusPolicy(QtCore.Qt.ClickFocus)
self.transfer_curve_smoothing_factor.setAcceptDrops(True)
self.transfer_curve_smoothing_factor.setObjectName("transfer_curve_smoothing_factor")
self.horizontalLayout_7.addWidget(self.transfer_curve_smoothing_factor)
self.verticalLayout.addLayout(self.horizontalLayout_7)
self.horizontalLayout_16 = QtWidgets.QHBoxLayout()
self.horizontalLayout_16.setObjectName("horizontalLayout_16")
self.TLM_lin_fit_label = QtWidgets.QLabel(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.TLM_lin_fit_label.setFont(font)
self.TLM_lin_fit_label.setObjectName("TLM_lin_fit_label")
self.horizontalLayout_16.addWidget(self.TLM_lin_fit_label)
self.TLM_fit_quality = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.TLM_fit_quality.setFont(font)
self.TLM_fit_quality.setAcceptDrops(True)
self.TLM_fit_quality.setObjectName("TLM_fit_quality")
self.horizontalLayout_16.addWidget(self.TLM_fit_quality)
self.minTLMlength_label = QtWidgets.QLabel(Histogram)
self.minTLMlength_label.setEnabled(True)
font = QtGui.QFont()
font.setPointSize(8)
self.minTLMlength_label.setFont(font)
self.minTLMlength_label.setObjectName("minTLMlength_label")
self.horizontalLayout_16.addWidget(self.minTLMlength_label)
self.TLMlengthminimum = QtWidgets.QComboBox(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.TLMlengthminimum.setFont(font)
self.TLMlengthminimum.setEditable(False)
self.TLMlengthminimum.setObjectName("TLMlengthminimum")
self.horizontalLayout_16.addWidget(self.TLMlengthminimum)
self.maxTLMlength_label = QtWidgets.QLabel(Histogram)
self.maxTLMlength_label.setEnabled(True)
font = QtGui.QFont()
font.setPointSize(8)
self.maxTLMlength_label.setFont(font)
self.maxTLMlength_label.setObjectName("maxTLMlength_label")
self.horizontalLayout_16.addWidget(self.maxTLMlength_label)
self.TLMlengthmaximum = QtWidgets.QComboBox(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.TLMlengthmaximum.setFont(font)
self.TLMlengthmaximum.setEditable(False)
self.TLMlengthmaximum.setObjectName("TLMlengthmaximum")
self.horizontalLayout_16.addWidget(self.TLMlengthmaximum)
self.verticalLayout.addLayout(self.horizontalLayout_16)
self.horizontalLayout_2 = QtWidgets.QHBoxLayout()
self.horizontalLayout_2.setObjectName("horizontalLayout_2")
self.binsizepolicy_label = QtWidgets.QLabel(Histogram)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Minimum, QtWidgets.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.binsizepolicy_label.sizePolicy().hasHeightForWidth())
self.binsizepolicy_label.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(8)
self.binsizepolicy_label.setFont(font)
self.binsizepolicy_label.setObjectName("binsizepolicy_label")
self.horizontalLayout_2.addWidget(self.binsizepolicy_label)
self.binsizepolicy = QtWidgets.QComboBox(Histogram)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Fixed)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.binsizepolicy.sizePolicy().hasHeightForWidth())
self.binsizepolicy.setSizePolicy(sizePolicy)
font = QtGui.QFont()
font.setPointSize(8)
self.binsizepolicy.setFont(font)
self.binsizepolicy.setObjectName("binsizepolicy")
self.horizontalLayout_2.addWidget(self.binsizepolicy)
self.label_binsize = QtWidgets.QLabel(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.label_binsize.setFont(font)
self.label_binsize.setObjectName("label_binsize")
self.horizontalLayout_2.addWidget(self.label_binsize)
self.binsize_stddev = QtWidgets.QLineEdit(Histogram)
font = QtGui.QFont()
font.setPointSize(8)
self.binsize_stddev.setFont(font)
self.binsize_stddev.setObjectName("binsize_stddev")
self.horizontalLayout_2.addWidget(self.binsize_stddev)
self.verticalLayout.addLayout(self.horizontalLayout_2)
self.horizontalLayout_9.addLayout(self.verticalLayout)
self.verticalLayout_5.addLayout(self.horizontalLayout_9)
self.plotframe = QtWidgets.QFrame(Histogram)
sizePolicy = QtWidgets.QSizePolicy(QtWidgets.QSizePolicy.Preferred, QtWidgets.QSizePolicy.Expanding)
sizePolicy.setHorizontalStretch(0)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(self.plotframe.sizePolicy().hasHeightForWidth())
self.plotframe.setSizePolicy(sizePolicy)
self.plotframe.setFrameShape(QtWidgets.QFrame.StyledPanel)
self.plotframe.setFrameShadow(QtWidgets.QFrame.Raised)
self.plotframe.setObjectName("plotframe")
self.verticalLayout_3 = QtWidgets.QVBoxLayout(self.plotframe)
self.verticalLayout_3.setObjectName("verticalLayout_3")
self.horizontalLayout = QtWidgets.QHBoxLayout()
self.horizontalLayout.setObjectName("horizontalLayout")
self.opendirbut = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.opendirbut.setFont(font)
self.opendirbut.setFocusPolicy(QtCore.Qt.TabFocus)
self.opendirbut.setAutoDefault(False)
self.opendirbut.setObjectName("opendirbut")
self.horizontalLayout.addWidget(self.opendirbut)
self.save_state_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.save_state_but.setFont(font)
self.save_state_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.save_state_but.setObjectName("save_state_but")
self.horizontalLayout.addWidget(self.save_state_but)
self.pack_database_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.pack_database_but.setFont(font)
self.pack_database_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.pack_database_but.setObjectName("pack_database_but")
self.horizontalLayout.addWidget(self.pack_database_but)
self.open_filter_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.open_filter_but.setFont(font)
self.open_filter_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.open_filter_but.setObjectName("open_filter_but")
self.horizontalLayout.addWidget(self.open_filter_but)
self.export_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.export_but.setFont(font)
self.export_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.export_but.setObjectName("export_but")
self.horizontalLayout.addWidget(self.export_but)
self.device_list_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.device_list_but.setFont(font)
self.device_list_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.device_list_but.setObjectName("device_list_but")
self.horizontalLayout.addWidget(self.device_list_but)
self.verticalLayout_3.addLayout(self.horizontalLayout)
self.Device_Listing_Table = DevTable(self.plotframe)
self.Device_Listing_Table.setMaximumSize(QtCore.QSize(16777215, 200))
self.Device_Listing_Table.setStatusTip("")
self.Device_Listing_Table.setWhatsThis("")
self.Device_Listing_Table.setObjectName("Device_Listing_Table")
self.Device_Listing_Table.setColumnCount(0)
self.Device_Listing_Table.setRowCount(0)
self.Device_Listing_Table.horizontalHeader().setCascadingSectionResizes(True)
self.Device_Listing_Table.horizontalHeader().setStretchLastSection(True)
self.Device_Listing_Table.verticalHeader().setCascadingSectionResizes(True)
self.verticalLayout_3.addWidget(self.Device_Listing_Table)
self.chartcontrolHBOX = QtWidgets.QHBoxLayout()
self.chartcontrolHBOX.setObjectName("chartcontrolHBOX")
self.backview_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.backview_but.setFont(font)
self.backview_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.backview_but.setObjectName("backview_but")
self.chartcontrolHBOX.addWidget(self.backview_but)
self.forwardview_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.forwardview_but.setFont(font)
self.forwardview_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.forwardview_but.setObjectName("forwardview_but")
self.chartcontrolHBOX.addWidget(self.forwardview_but)
self.fullview_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.fullview_but.setFont(font)
self.fullview_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.fullview_but.setObjectName("fullview_but")
self.chartcontrolHBOX.addWidget(self.fullview_but)
self.selected_bin_only_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.selected_bin_only_but.setFont(font)
self.selected_bin_only_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.selected_bin_only_but.setCheckable(True)
self.selected_bin_only_but.setObjectName("selected_bin_only_but")
self.chartcontrolHBOX.addWidget(self.selected_bin_only_but)
self.histograph_image_to_clipboard_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.histograph_image_to_clipboard_but.setFont(font)
self.histograph_image_to_clipboard_but.setFocusPolicy(QtCore.Qt.ClickFocus)
self.histograph_image_to_clipboard_but.setObjectName("histograph_image_to_clipboard_but")
self.chartcontrolHBOX.addWidget(self.histograph_image_to_clipboard_but)
spacerItem3 = QtWidgets.QSpacerItem(40, 20, QtWidgets.QSizePolicy.Expanding, QtWidgets.QSizePolicy.Minimum)
self.chartcontrolHBOX.addItem(spacerItem3)
self.quit_but = QtWidgets.QPushButton(self.plotframe)
font = QtGui.QFont()
font.setPointSize(8)
self.quit_but.setFont(font)
self.quit_but.setFocusPolicy(QtCore.Qt.NoFocus)
self.quit_but.setObjectName("quit_but")
self.chartcontrolHBOX.addWidget(self.quit_but)
self.verticalLayout_3.addLayout(self.chartcontrolHBOX)
self.plotframebox = QtWidgets.QHBoxLayout()
self.plotframebox.setObjectName("plotframebox")
self.verticalLayout_3.addLayout(self.plotframebox)
self.verticalLayout_5.addWidget(self.plotframe)
self.plotframe.raise_()
self.retranslateUi(Histogram)
QtCore.QMetaObject.connectSlotsByName(Histogram)
def retranslateUi(self, Histogram):
_translate = QtCore.QCoreApplication.translate
Histogram.setWindowTitle(_translate("Histogram", "Histogram"))
self.wafernamelabel.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This is the name of the wafer and directory currently under analysis.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">The directory name MUST match the wafer name.</p></body></html>"))
self.wafernamelabel.setText(_translate("Histogram", "Wafer Name"))
self.wafername.setToolTip(_translate("Histogram", "Wafer Name"))
self.Vgs_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Selected Vgs:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This selects the Vgs for all analysis which requires the family of curves data</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">examples of which are Ron, Gon, TLM data, ratio Ron data etc...</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Analysis will be performed assuming the selected Vgs which selects a curve from the</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">family of curves. Changing this will update all data dependent upon Vgs.</p></body></html>"))
self.Vgs_label.setText(_translate("Histogram", "select Vgs for FOC"))
self.Vgs_comboBox.setToolTip(_translate("Histogram", "Gate voltage setting"))
self.Vgs_comboBox.setWhatsThis(_translate("Histogram", "This is the gate voltage setting"))
self.Vds_FOC_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Selected Vgs:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This selects the Vgs for all analysis which requires the family of curves data</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">examples of which are Ron, Gon, TLM data, ratio Ron data etc...</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Analysis will be performed assuming the selected Vgs which selects a curve from the</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">family of curves. Changing this will update all data dependent upon Vgs.</p></body></html>"))
self.Vds_FOC_label.setText(_translate("Histogram", "Vds_FOC for |Idmax|@Vds"))
self.Yf_checkBox.setText(_translate("Histogram", " Y-function Analysis"))
self.deltaVgs_thres_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Selected Vgs:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This selects the Vgs for all analysis which requires the family of curves data</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">examples of which are Ron, Gon, TLM data, ratio Ron data etc...</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Analysis will be performed assuming the selected Vgs which selects a curve from the</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">family of curves. Changing this will update all data dependent upon Vgs.</p></body></html>"))
self.deltaVgs_thres_label.setText(_translate("Histogram", "deltaVgsthres"))
self.delta_Vgs_thres.setText(_translate("Histogram", "-0.5"))
self.Yf_Vgsfitrange_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Selected Vgs:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This selects the Vgs for all analysis which requires the family of curves data</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">examples of which are Ron, Gon, TLM data, ratio Ron data etc...</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Analysis will be performed assuming the selected Vgs which selects a curve from the</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">family of curves. Changing this will update all data dependent upon Vgs.</p></body></html>"))
self.Yf_Vgsfitrange_label.setText(_translate("Histogram", "Yf Vgs fit range fract"))
self.Yf_Vgsfitrange_frac.setText(_translate("Histogram", "0.1"))
self.parameterlabel.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Parameter selector:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Selects parameter to be displayed on histogram.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">If you get the warning message "no devices"</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">then try to adjust the parameter selector to find a </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">parameter for which there are data. The parameter </p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">selector MUST be set to data that exists for any analysis</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">to proceed.</p></body></html>"))
self.parameterlabel.setText(_translate("Histogram", "Parameter"))
self.measurementtype.setToolTip(_translate("Histogram", "Data Format: Resistance (Ron) or Conductance (Gon)"))
self.measurementtype.setWhatsThis(_translate("Histogram", "Ron @ Vds=0V is the slope of Ids/Vds at Vds=0V\n"
"Ron @ |Vds|=maximum is the maximum Vds/Id at maximum Vds\n"
"Gon s are similar to the above but are conductances = 1/Ron"))
self.set_includes_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\"><span style=\" color:#000000;\">Boolean selector:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Selects devices for analysis based on their names</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Default is to analyze all devices.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This is a reverse Polish Boolean evaluator</span></p>\n"
"<p align=\"justify\" style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Operators (binary-two arguments): and, or, xor - call them bx</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Operators (unary-one argument): not - call it ux</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Format for search terms (strings) with operators bx, ux is:</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">A B ba C ua bb D bc .......</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">where ba operates on A and B, ua operates on C, bb operates on the two results of</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">of ba and ua and bc this result and D. Example:</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Aa Bla and Cz not and D or is equivalent to:</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">((Aa and Bla) and not Cz) and D</span></p></body></html>"))
self.set_includes_label.setText(_translate("Histogram", "data filename filter"))
self.set_includes.setToolTip(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Boolean selector:</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Selects devices for analysis based on their names</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\">Default is to analyze all devices.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This is a reverse Polish Boolean evaluator</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Operators (binary-two arguments): and, or, xor - call them bo</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Operators (unary-one argument): not - call it ux</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Format for search terms (strings) with operators bx, ux is:</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">A B ba C ua bb D bc .......</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">where ba operates on A and B, ua operates on C, bb operates on the two results of</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">of ba and ua and bc this result and D. Example:</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Aa Bla and Cz not and D or is equivalent to:</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">((Aa and Bla) and not Cz) and D</span></p></body></html>"))
self.averagelabel.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Average of all data visible within the hysteresis plot.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">When Linear plots button is green and selected, this is the arithmetic mean of the data.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">When log plots (button turns red) is selected, this is the geometric (log average) mean.</p></body></html>"))
self.averagelabel.setText(_translate("Histogram", "average"))
self.standarddeviation.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Standard Deviation of all data visible within the hysteresis plot.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">When Linear plots button is green and selected, this is the simple standard deviation of the data.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">When log plots (button turns red) is selected, this is the standard deviation of the log of the data.</p></body></html>"))
self.standarddeviation.setText(_translate("Histogram", "standard deviation"))
self.standard_deviation.setToolTip(_translate("Histogram", "Of selected range"))
self.log_linear_histogram_but.setText(_translate("Histogram", "Linear plots"))
self.label_numberofdevices.setText(_translate("Histogram", "number of devices"))
self.selectmintype.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Lower limit of data analysis on the histogram displayed as number of standard deviations below mean or a simple value.</p></body></html>"))
self.selectmintype.setItemText(0, _translate("Histogram", "Std Dev below mean"))
self.selectmintype.setItemText(1, _translate("Histogram", "Value"))
self.selectmaxtype.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Upper limit of data analysis on the histogram displayed as number of standard deviations below mean or a simple value.</p></body></html>"))
self.selectmaxtype.setItemText(0, _translate("Histogram", "Std Dev above mean"))
self.selectmaxtype.setItemText(1, _translate("Histogram", "Value"))
self.range_lin_fit_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Range of Vds over which an Id vs Vds curve is fit to a line to determine Ron, Gon.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">The Id(Vds) curve is that at a Vgs selected by the user on the Vgs selector of this GUI window.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This curve fit range starts at Vds=0 and extends to maximum negative Vds * the range fit.</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.range_lin_fit_label.setText(_translate("Histogram", "FOC Ron Range Fit"))
self.transfer_curve_smoothing_factor_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Range of Vds over which an Id vs Vds curve is fit to a line to determine Ron, Gon.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">The Id(Vds) curve is that at a Vgs selected by the user on the Vgs selector of this GUI window.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This curve fit range starts at Vds=0 and extends to maximum negative Vds * the range fit.</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.transfer_curve_smoothing_factor_label.setText(_translate("Histogram", "transfer curve smoothing factor"))
self.TLM_lin_fit_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Range of Vds over which an Id vs Vds curve is fit to a line to determine Ron, Gon.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">The Id(Vds) curve is that at a Vgs selected by the user on the Vgs selector of this GUI window.</p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This curve fit range starts at Vds=0 and extends to maximum negative Vds * the range fit.</p>\n"
"<p style=\"-qt-paragraph-type:empty; margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><br /></p></body></html>"))
self.TLM_lin_fit_label.setText(_translate("Histogram", "TLM linear fit quality"))
self.minTLMlength_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\"><span style=\" color:#000000;\">Minimum channel length of TLM devices.</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\"><span style=\" color:#000000;\">Allows user to select the minimum available channel length of devices in the TLM</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\"><span style=\" color:#000000;\">structures to use in performing TLM analysis of Rc and Rsh (contact and sheet resistance)</span></p></body></html>"))
self.minTLMlength_label.setText(_translate("Histogram", "TLM min length um"))
self.TLMlengthminimum.setToolTip(_translate("Histogram", "Data Format: Resistance (Ron) or Conductance (Gon)"))
self.TLMlengthminimum.setWhatsThis(_translate("Histogram", "Ron @ Vds=0V is the slope of Ids/Vds at Vds=0V\n"
"Ron @ |Vds|=maximum is the maximum Vds/Id at maximum Vds\n"
"Gon s are similar to the above but are conductances = 1/Ron"))
self.maxTLMlength_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Maximum channel length of TLM devices.</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Allows user to select the maximum available channel length of devices in the TLM</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">structures to use in performing TLM analysis of Rc and Rsh (contact and sheet resistance)</span></p></body></html>"))
self.maxTLMlength_label.setText(_translate("Histogram", "TLM max length um"))
self.TLMlengthmaximum.setToolTip(_translate("Histogram", "Data Format: Resistance (Ron) or Conductance (Gon)"))
self.TLMlengthmaximum.setWhatsThis(_translate("Histogram", "Ron @ Vds=0V is the slope of Ids/Vds at Vds=0V\n"
"Ron @ |Vds|=maximum is the maximum Vds/Id at maximum Vds\n"
"Gon s are similar to the above but are conductances = 1/Ron"))
self.binsizepolicy_label.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">User-selection of the method by which histogram bin size is determined</p></body></html>"))
self.binsizepolicy_label.setText(_translate("Histogram", "Bin Size Policy"))
self.label_binsize.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">User manual setting for histogram bin size.</p></body></html>"))
self.label_binsize.setText(_translate("Histogram", "bin size stddev"))
self.opendirbut.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">User to select wafer directory to open and analyze.</span></p></body></html>"))
self.opendirbut.setText(_translate("Histogram", "&open directory"))
self.save_state_but.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">User saves data to open later</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Should reduce loading time of analysis.</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">WARNING: Not working yet!</span></p></body></html>"))
self.save_state_but.setText(_translate("Histogram", "&Save State"))
self.pack_database_but.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">User saves data to open later</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Should reduce loading time of analysis.</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">WARNING: Not working yet!</span></p></body></html>"))
self.pack_database_but.setText(_translate("Histogram", "Pack Database"))
self.open_filter_but.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This opens a new window which allows the user to filter data for analysis by</span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">data values. For example, the user can exclude devices having |Idmax| less than or </span></p>\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">greater than the user-specified values. This is often used to remove bad devices from the analysis.</span></p></body></html>"))
self.open_filter_but.setText(_translate("Histogram", "filter"))
self.export_but.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">This opens a new window which allows the user to obtain TLM device Ron and other parameters from statistical averaged data for each TLM element.</span></p></body></html>"))
self.export_but.setText(_translate("Histogram", "Export statistics"))
self.device_list_but.setWhatsThis(_translate("Histogram", "<html><head/><body><p><span style=\" color:#000000;\">This opens a new window which allows the user to obtain a device listing of device names on the wafer with the devices\' measured/calculated parameters in columns.</span></p></body></html>"))
self.device_list_but.setText(_translate("Histogram", "device list"))
self.Device_Listing_Table.setToolTip(_translate("Histogram", "<html><head/><body><p><span style=\" color:#000000; background-color:#ffffff;\">Device listing from selected bin of the histogram. Note that a ctrl-f opens a window which allows the user to place a Boolean expression to selectively display devices.</span></p><p><span style=\" color:#000000; background-color:#ffffff;\">Left mouse click on parameter (header) to sort. </span></p><p><span style=\" color:#000000; background-color:#ffffff;\">Shift+left mouse click on parameter to select it for copy to clipboard - the selected columns will change color. </span></p><p><span style=\" color:#000000; background-color:#ffffff;\">After selecting all desired parameters, cntl-c to copy them to clipboard. </span></p><p><span style=\" color:#000000; background-color:#ffffff;\">Right mouse click deselects all. </span></p><p><span style=\" color:#000000; background-color:#ffffff;\">Shift right click to load individual cells to clipboard. </span></p><p><span style=\" color:#000000; background-color:#ffffff;\">Left mouse click on device name will allow plotting of selected device parameters.</span></p></body></html>"))
self.backview_but.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Send the histogram view back to the previous setting.</p></body></html>"))
self.backview_but.setText(_translate("Histogram", "&back"))
self.forwardview_but.setWhatsThis(_translate("Histogram", "<html><head/><body><p><span style=\" color:#000000;\">Send the histogram view forward to the next saved setting.</span></p></body></html>"))
self.forwardview_but.setText(_translate("Histogram", "&forward"))
self.fullview_but.setWhatsThis(_translate("Histogram", "<html><head/><body><p><span style=\" color:#000000;\">Send the histogram to full span to view all available data i.e. this is the default setting.</span></p></body></html>"))
self.fullview_but.setText(_translate("Histogram", "&full view"))
self.selected_bin_only_but.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Send the histogram view back to the previous setting.</p></body></html>"))
self.selected_bin_only_but.setText(_translate("Histogram", "selected bin only"))
self.histograph_image_to_clipboard_but.setWhatsThis(_translate("Histogram", "<!DOCTYPE HTML PUBLIC \"-//W3C//DTD HTML 4.0//EN\" \"http://www.w3.org/TR/REC-html40/strict.dtd\">\n"
"<html><head><meta name=\"qrichtext\" content=\"1\" /><style type=\"text/css\">\n"
"p, li { white-space: pre-wrap; }\n"
"</style></head><body style=\" font-family:\'Ubuntu\'; font-size:11pt; font-weight:400; font-style:normal;\">\n"
"<p style=\" margin-top:0px; margin-bottom:0px; margin-left:0px; margin-right:0px; -qt-block-indent:0; text-indent:0px;\"><span style=\" color:#000000;\">Send the histogram view back to the previous setting.</p></body></html>"))
self.histograph_image_to_clipboard_but.setText(_translate("Histogram", "histograph image->&clipboard"))
self.quit_but.setText(_translate("Histogram", "Quit"))
from devtable import DevTable
| [
"[email protected]"
]
| |
a8f2cafb277643c76e1a634c5fcab184d07b9eb5 | 04b1803adb6653ecb7cb827c4f4aa616afacf629 | /third_party/blink/web_tests/external/wpt/tools/third_party/pytest/src/_pytest/_code/__init__.py | 815c13b42c25bd314988dbaa7ff9f4e3d1d2e5c2 | [
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"MIT",
"Apache-2.0"
]
| permissive | Samsung/Castanets | 240d9338e097b75b3f669604315b06f7cf129d64 | 4896f732fc747dfdcfcbac3d442f2d2d42df264a | refs/heads/castanets_76_dev | 2023-08-31T09:01:04.744346 | 2021-07-30T04:56:25 | 2021-08-11T05:45:21 | 125,484,161 | 58 | 49 | BSD-3-Clause | 2022-10-16T19:31:26 | 2018-03-16T08:07:37 | null | UTF-8 | Python | false | false | 410 | py | """ python inspection/code generation API """
from __future__ import absolute_import, division, print_function
from .code import Code # noqa
from .code import ExceptionInfo # noqa
from .code import Frame # noqa
from .code import Traceback # noqa
from .code import getrawcode # noqa
from .source import Source # noqa
from .source import compile_ as compile # noqa
from .source import getfslineno # noqa
| [
"[email protected]"
]
| |
4dde79d5e3be0ffc2d8fdc9b8d3237fd2be57c5b | 5a52ccea88f90dd4f1acc2819997fce0dd5ffb7d | /alipay/aop/api/domain/Dowsure.py | c4c979d73ab936957a9778dbb5945bfedab00234 | [
"Apache-2.0"
]
| permissive | alipay/alipay-sdk-python-all | 8bd20882852ffeb70a6e929038bf88ff1d1eff1c | 1fad300587c9e7e099747305ba9077d4cd7afde9 | refs/heads/master | 2023-08-27T21:35:01.778771 | 2023-08-23T07:12:26 | 2023-08-23T07:12:26 | 133,338,689 | 247 | 70 | Apache-2.0 | 2023-04-25T04:54:02 | 2018-05-14T09:40:54 | Python | UTF-8 | Python | false | false | 961 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.constant.ParamConstants import *
class Dowsure(object):
def __init__(self):
self._application_code = None
@property
def application_code(self):
return self._application_code
@application_code.setter
def application_code(self, value):
self._application_code = value
def to_alipay_dict(self):
params = dict()
if self.application_code:
if hasattr(self.application_code, 'to_alipay_dict'):
params['application_code'] = self.application_code.to_alipay_dict()
else:
params['application_code'] = self.application_code
return params
@staticmethod
def from_alipay_dict(d):
if not d:
return None
o = Dowsure()
if 'application_code' in d:
o.application_code = d['application_code']
return o
| [
"[email protected]"
]
| |
4905f1162de481d5c10d057cf7e2d91f01cd6fba | a3d32e0ff84958d194ced642441f5379c0032465 | /tests/functions/test_image_train_process.py | 3fe7e75cf95dfa56d3155c3a714ddfd2389acd77 | []
| no_license | TensorMSA/tensormsa_old | 406755511d05d4ec179c085337a05f73c0dde80a | ef058737f391de817c74398ef9a5d3a28f973c98 | refs/heads/master | 2021-06-18T11:58:29.349060 | 2017-04-20T10:17:43 | 2017-04-20T10:17:43 | 67,384,681 | 6 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,430 | py | import unittest, requests, os, json,random
from tfmsacore.utils.logger import tfmsa_logger
from django.core.files.uploadedfile import TemporaryUploadedFile
from tfmsacore.data import ImageManager
class TestImageTrainProcess(unittest.TestCase):
"""
./manage.py jenkins ./tests/functions --enable-coverage
./manage.py jenkins ./tests/functions
"""
rand_name = str(random.randrange(1,99999))
def test_image_train(self):
host_name = "{0}:{1}".format(os.environ['HOSTNAME'], "8989")
tfmsa_logger("[1] Image file format update")
resp = requests.post('http://' + host_name + '/api/v1/type/imagefile/base/mes/table/testtable2/format/nn0000090/',
json={"x_size": 32,"y_size": 32 })
if(json.loads(resp.json())['status'] != "200"):
raise Exception ("RESI Service Fail")
tfmsa_logger("[2] Network info update")
resp = requests.post('http://' + host_name + '/api/v1/type/common/nninfo/',
json={
"nn_id": "nn0000090",
"category": "SCM",
"subcate": "csv",
"name": "CENSUS_INCOME",
"desc": "INCOME PREDICT"
})
if (json.loads(resp.json())['status'] != "200"):
raise Exception("RESI Service Fail")
tfmsa_logger("[3] Network configuration update")
resp = requests.post('http://' + host_name + '/api/v1/type/cnn/conf/nn0000090/',
json={
"data":
{
"datalen": 1024,
"taglen": 2,
"matrix": [32, 32],
"learnrate": 0.01,
"epoch": 10
},
"layer":
[
{
"type": "input",
"active": "relu",
"cnnfilter": [2, 2],
"cnnstride": [2, 2],
"maxpoolmatrix": [2, 2],
"maxpoolstride": [2, 2],
"node_in_out": [1, 16],
"regualizer": "",
"padding": "SAME",
"droprate": ""
},
{
"type": "cnn",
"active": "relu",
"cnnfilter": [2, 2],
"cnnstride": [2, 2],
"maxpoolmatrix": [2, 2],
"maxpoolstride": [2, 2],
"node_in_out": [16, 32],
"regualizer": "",
"padding": "SAME",
"droprate": ""
},
{
"type": "reshape",
},
{
"type": "drop",
"active": "relu",
"regualizer": "",
"droprate": "0.5"
},
{
"type": "out",
"active": "softmax",
"cnnfilter": "",
"cnnstride": "",
"maxpoolmatrix": "",
"maxpoolstride": "",
"node_in_out": [32, 2],
"regualizer": "",
"padding": "SAME",
"droprate": ""
}
]
})
if (json.loads(resp.json())['status'] != "200"):
raise Exception("RESI Service Fail")
tfmsa_logger("[4] Train Neural Network")
resp = requests.post('http://' + host_name + '/api/v1/type/cnn/train/nn0000090/',
json={
"epoch": "10",
"testset": "10"
})
if (json.loads(resp.json())['status'] != "200"):
raise Exception("RESI Service Fail")
tfmsa_logger("[5] PASS TEST")
| [
"[email protected]"
]
| |
7cfee5b9df13834712ed1c7dfcb5aaac39cd1210 | cd8f7ecd20c58ce1ae0fe3840f7c7ee961aa5819 | /Third Maximum Number.py | e33e7d88fe709f5f961c58fc1b2f6c3993b73f63 | [
"Apache-2.0"
]
| permissive | sugia/leetcode | 9b0f2a3521b088f8f7e5633c2c6c17c76d33dcaf | 6facec2a54d1d9f133f420c9bce1d1043f57ebc6 | refs/heads/master | 2021-06-05T07:20:04.099488 | 2021-02-24T07:24:50 | 2021-02-24T07:24:50 | 29,124,136 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 855 | py | '''
Given a non-empty array of integers, return the third maximum number in this array. If it does not exist, return the maximum number. The time complexity must be in O(n).
Example 1:
Input: [3, 2, 1]
Output: 1
Explanation: The third maximum is 1.
Example 2:
Input: [1, 2]
Output: 2
Explanation: The third maximum does not exist, so the maximum (2) is returned instead.
Example 3:
Input: [2, 2, 3, 1]
Output: 1
Explanation: Note that the third maximum here means the third maximum distinct number.
Both numbers with value 2 are both considered as second maximum.
'''
class Solution(object):
def thirdMax(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
vec = sorted(list(set(nums)), reverse = True)
if len(vec) > 2:
return vec[2]
return vec[0]
| [
"[email protected]"
]
| |
0b420050e1479b0904e29b59e1c48a5160989fd1 | f392a5e4193d44c41e234696d093140cdf301497 | /tests/example2.py | 8b1a937795148bdddefeb027df7948a1d1727c74 | [
"Apache-2.0"
]
| permissive | GateNLP/gate-lf-python-data | fb151132c94e25f59947d6400692f23914dfa89e | 89880a82458f09702c1d6828ae341997e0b45f73 | refs/heads/master | 2021-03-27T08:55:26.304655 | 2019-05-31T11:44:29 | 2019-05-31T11:44:29 | 113,597,027 | 4 | 1 | Apache-2.0 | 2019-05-30T08:50:59 | 2017-12-08T16:52:39 | Python | UTF-8 | Python | false | false | 362 | py | from __future__ import print_function
from gatelfdata import Dataset
import sys
if len(sys.argv) != 2:
raise Exception("Need one parameter: meta file")
file = sys.argv[1]
ds = Dataset(file)
valset = ds.convert_to_file()
for b in ds.batches_converted(batch_size=20, as_numpy=False, pad_left=True):
print("Batch: len=", len(b))
print("Batch: data=", b)
| [
"[email protected]"
]
| |
8b9843406d7206f8d8eb6ef33274a88f5669773e | b727870804e5c7a474c271e1cf0ebfe05619ddfb | /keras44_5_wine_conv1d.py | 38577cf7df599d8d5b61c45ee04523731daff3ff | []
| no_license | marattang/keras | 843227592f7b3cb08034bfdc2e6319200e62e990 | cc78d1d70bfbe99e78f19ae11053ebbb87f20864 | refs/heads/main | 2023-08-03T21:50:53.438394 | 2021-09-10T05:11:15 | 2021-09-10T05:11:15 | 383,742,096 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,365 | py | import numpy as np
from sklearn.datasets import load_wine
from sklearn.model_selection import train_test_split
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Dense, Conv2D, GlobalAveragePooling2D, Flatten, LSTM, Conv1D
from sklearn.preprocessing import MaxAbsScaler, MinMaxScaler, RobustScaler, StandardScaler, PowerTransformer, QuantileTransformer
import matplotlib.pyplot as plt
from tensorflow.keras.callbacks import EarlyStopping
from matplotlib import font_manager, rc
from tensorflow.python.keras.layers.core import Dropout
font_path = "C:/Windows/Fonts/gulim.ttc"
font = font_manager.FontProperties(fname=font_path).get_name()
rc('font', family=font)
# 완성하시오
# acc 0.8 이상 만들것
dataset = load_wine()
x = dataset.data
y = dataset.target
print(dataset.DESCR)
print(dataset.feature_names)
print(np.unique(y))
y = to_categorical(y)
print(y.shape)
x_train, x_test, y_train, y_test = train_test_split(x, y, train_size=0.7, shuffle=True, random_state=66)
print(x_train)
print(x_train.shape)
scaler = PowerTransformer()
scaler.fit(x_train)
x_train = scaler.transform(x_train)
x_test = scaler.transform(x_test)
# print(x_train.shape)
# print(x_test.shape)
x_train = x_train.reshape(124, 13, 1)
x_test = x_test.reshape(54, 13, 1)
#
# model = Sequential()
# model.add(LSTM(units=128, activation='relu', input_shape=(13, 1)))
# model.add(Dense(256, activation='relu'))
# model.add(Dense(128, activation='relu'))
# model.add(Dense(128, activation='relu'))
# model.add(Dense(64, activation='relu'))
# model.add(Dense(32, activation='relu'))
# model.add(Dropout(0.1))
# model.add(Dense(32, activation='relu'))
# model.add(Dense(3, activation='softmax'))
model = Sequential()
model.add(Conv1D(16, kernel_size=1, activation='relu', input_shape=(13, 1)))
model.add(Conv1D(8, 1))
model.add(Flatten())
model.add(Dense(256, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(128, activation='relu'))
model.add(Dense(64, activation='relu'))
model.add(Dense(32, activation='relu'))
model.add(Dropout(0.1))
model.add(Dense(32, activation='relu'))
model.add(Dense(3, activation='softmax'))
#
es = EarlyStopping(monitor='val_loss', mode='min', patience=15)
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
# hist = model.fit(x_train, y_train, batch_size=32, epochs=500, validation_split=0.1, callbacks=[es])
hist = model.fit(x_train, y_train, batch_size=1, epochs=70, validation_split=0.05)
# plt.plot(hist.history['loss']) # x: epoch, y: hist.history['loss']
# plt.plot(hist.history['val_loss'])
# plt.xlabel('epochs')
# plt.ylabel('loss, val_loss')
# plt.title('로스, 발로스')
# plt.show()
#
loss = model.evaluate(x_test, y_test)
print('loss : ', loss[0])
print('accuracy : ', loss[1])
# DNN
# QuantileTransformer - accuracy : 0.9259259104728699
# MaxAbsScaler - accuracy : 0.9259259104728699
# MinMaxScaler - accuracy : 0.9629629850387573
# RobustScaler - accuracy : 0.9814814925193787
# StandardScaler - accuracy : 0.9814814925193787
# PowerTransformer - accuracy : 0.9814814925193787
# CNN
# accuracy : 0.9814814925193787
# RNN
# epochs 50 -> 70
# 하이퍼 파라미터 작업 후
# accuracy : 0.9444444179534912 -> accuracy : 1.0
# conv1d
# accuracy : 0.9814814925193787 | [
"[email protected]"
]
| |
0208a4a50bebc3bf813bc885b5acd3bc9bda9696 | 88c1f9ccb62e91d6b0574bcde1043921bdeb0126 | /client_cli/src/d1_cli/tests/test_cli.py | cf82f63f797c9ac5a31102d6f830c03b7f3c3656 | [
"Apache-2.0"
]
| permissive | jevans97utk/d1_python | 83b8de8780287c655779844f367b9189413da074 | 3ac4d4f3ca052d3e8641a6a329cab526c8ddcb0d | refs/heads/master | 2020-05-21T01:16:50.677816 | 2019-04-22T16:09:44 | 2019-04-22T16:09:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 28,974 | py | #!/usr/bin/env python
# This work was created by participants in the DataONE project, and is
# jointly copyrighted by participating institutions in DataONE. For
# more information on DataONE, see our web site at http://dataone.org.
#
# Copyright 2009-2019 DataONE
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Test CLI high level functionality."""
import contextlib
import io
import os
import re
import tempfile
import freezegun
import mock
import pytest
import responses
import d1_cli.impl.command_parser
import d1_cli.impl.exceptions
import d1_common.date_time
import d1_common.system_metadata
import d1_common.types.dataoneTypes
import d1_test.d1_test_case
import d1_test.instance_generator.random_data
import d1_test.mock_api.catch_all
import d1_test.mock_api.get
import d1_test.mock_api.get_log_records
import d1_test.mock_api.get_system_metadata
import d1_test.mock_api.list_nodes
import d1_test.mock_api.list_objects
import d1_client.mnclient
@freezegun.freeze_time("1977-03-27")
@d1_test.d1_test_case.reproducible_random_decorator("TestCLI")
class TestCLI(d1_test.d1_test_case.D1TestCase):
def setup_method(self, method):
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("verbose true")
def test_1000(self, cn_client_v2):
"""preloop(): Successful initialization."""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
def test_1010(self, cn_client_v2):
"""preloop(): Successful deinitialization."""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.postloop()
assert "Exiting" in out_stream.getvalue()
def test_1020(self, cn_client_v2):
"""precmd(): Successful line formattting."""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
test_cmd_str = "somecommand arg1 arg2 arg3"
received_line = cli.precmd(test_cmd_str)
assert test_cmd_str in received_line
def test_1030(self, cn_client_v2):
"""default(): Yields unknown command."""
cli = d1_cli.impl.command_parser.CLI()
test_cmd_str = "somecommand arg1 arg2 arg3"
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.default(test_cmd_str)
assert "Unknown command: somecommand" in out_stream.getvalue()
def test_1040(self, cn_client_v2):
"""run_command_line_arguments():"""
cli = d1_cli.impl.command_parser.CLI()
test_cmd_str = "somecommand arg1 arg2 arg3"
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.default(test_cmd_str)
assert "Unknown command: somecommand" in out_stream.getvalue()
def test_1050(self, cn_client_v2):
"""do_help(): Valid command returns help string."""
cli = d1_cli.impl.command_parser.CLI()
cli.stdout = io.StringIO()
test_cmd_str = "get"
cli.do_help(test_cmd_str)
assert "The object is saved to <file>" in cli.stdout.getvalue()
def test_1060(self, cn_client_v2):
"""do_history(): Returns history."""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
test_cmd_str = "somecommand1 arg1 arg2 arg3"
cli.precmd(test_cmd_str)
test_cmd_str = "somecommand2 arg1 arg2 arg3"
cli.precmd(test_cmd_str)
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_history("")
assert "somecommand1" in out_stream.getvalue()
assert "somecommand2" in out_stream.getvalue()
# do_exit()
def test_1070(self, cn_client_v2):
"""do_exit(): Gives option to cancel if the operation queue is not empty."""
self._do_exit("yes", 1)
def test_1080(self, cn_client_v2):
"""do_exit(): Does not exit if cancelled."""
self._do_exit("no", 0)
def _do_exit(self, answer_str, exit_call_count):
"""do_exit(): Gives option to cancel if the operation queue is not empty."""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
fi, tmp_path = tempfile.mkstemp(
prefix="test_dataone_cli.", suffix=".tmp", text=True
)
os.close(fi)
cli.do_set("authoritative-mn urn:node:myTestMN")
cli.do_set("rights-holder test-rights-holder-subject")
create_operation = cli._command_processor._operation_maker.create(
"test_pid", tmp_path, "test_format_id"
)
cli._command_processor._operation_queue.append(create_operation)
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
with d1_test.d1_test_case.mock_input(answer_str):
with mock.patch("sys.exit", return_value="") as mock_method:
cli.do_exit("")
assert mock_method.call_count == exit_call_count
assert (
"There are 1 unperformed operations in the write operation queue"
in out_stream.getvalue()
)
def test_1090(self, cn_client_v2):
"""do_exit(): Calls sys.exit()"""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
with mock.patch("sys.exit", return_value="") as mock_method:
cli.do_quit("")
assert mock_method.call_count > 0
def test_1100(self, cn_client_v2):
"""do_eof(): Calls sys.exit()"""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
with mock.patch("sys.exit", return_value="") as mock_method:
cli.do_eof("")
assert mock_method.call_count > 0
def test_1110(self, cn_client_v2):
"""do_reset(), do_set(), do_save(), do_load(): Session to disk round trip."""
cli = d1_cli.impl.command_parser.CLI()
cli.preloop()
fi, path = tempfile.mkstemp(
prefix="test_dataone_cli.", suffix=".tmp", text=True
)
os.close(fi)
# Reset, set some values and save to file
cli.do_reset("")
cli.do_set("editor test_editor")
cli.do_set("cn-url test_cn-url")
cli.do_set("key-file test-key-file")
cli.do_save(path)
# Reset and check that values are at their defaults
cli.do_reset("")
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("editor")
assert "editor: nano" in out_stream.getvalue()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("cn-url")
assert "cn-url: https://cn.dataone.org/cn" in out_stream.getvalue()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("key-file")
assert "key-file: None" in out_stream.getvalue()
# Load from file and verify
cli.do_load(path)
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("editor")
assert "editor: test_editor" in out_stream.getvalue()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("cn-url")
assert "cn-url: test_cn-url" in out_stream.getvalue()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("key-file")
assert "key-file: test-key-file" in out_stream.getvalue()
def test_1120(self, cn_client_v2):
"""set: Command gives expected output on flag toggle."""
cli = d1_cli.impl.command_parser.CLI()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("verbose true")
assert 'verbose to "true"' in out_stream.getvalue()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("verbose false")
assert 'verbose to "false"' in out_stream.getvalue()
def test_1130(self, cn_client_v2):
"""set: Command gives expected output when setting count."""
cli = d1_cli.impl.command_parser.CLI()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("count 2")
assert 'count to "2"' in out_stream.getvalue()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("count 3")
assert 'count to "3"' in out_stream.getvalue()
def test_1140(self, cn_client_v2):
"""set: Command gives expected output when setting query string."""
cli = d1_cli.impl.command_parser.CLI()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("query a=b")
assert 'variable query to "a=b"' in out_stream.getvalue()
@d1_test.mock_api.catch_all.activate
def test_1150(self, cn_client_v2):
"""ping (no arguments): Ping the CN and MN that is specified in the session."""
d1_test.mock_api.catch_all.add_callback(d1_test.d1_test_case.MOCK_CN_BASE_URL)
d1_test.mock_api.catch_all.add_callback(d1_test.d1_test_case.MOCK_MN_BASE_URL)
cli = d1_cli.impl.command_parser.CLI()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("cn-url {}".format(d1_test.d1_test_case.MOCK_CN_BASE_URL))
cli.do_set("mn-url {}".format(d1_test.d1_test_case.MOCK_MN_BASE_URL))
cli.do_ping("")
def test_1160(self, cn_client_v2):
"""do_allowaccess(): Correctly sets access control."""
cli = d1_cli.impl.command_parser.CLI()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_allowaccess("test_subject_1 write")
cli.do_allowaccess("test_subject_2 write")
cli.do_allowaccess("test_subject_3 changePermission")
access_pyxb = cli._command_processor.get_session().get_access_control()
check_cnt = 0
for allow_pyxb in access_pyxb.allow:
if allow_pyxb in ("test_subject_1", "test_subject_2", "test_subject_3"):
check_cnt += 1
assert check_cnt == 3
assert (
'Set changePermission access for subject "test_subject_3"'
in out_stream.getvalue()
)
def test_1170(self, cn_client_v2):
"""do_denyaccess(): Subject without permissions raises InvalidArguments."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_allowaccess("test_subject_1 write")
cli.do_allowaccess("test_subject_2 write")
cli.do_allowaccess("test_subject_3 changePermission")
with pytest.raises(d1_cli.impl.exceptions.InvalidArguments):
cli.do_denyaccess("unknown_subject")
def test_1180(self, cn_client_v2):
"""do_denyaccess(): Subject with permissions is removed."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_allowaccess("test_subject_1 write")
cli.do_allowaccess("test_subject_2 write")
cli.do_allowaccess("test_subject_3 changePermission")
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("")
env_str = out_stream.getvalue()
assert "test_subject_3: changePermission" in env_str
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_denyaccess("test_subject_3")
assert 'Removed subject "test_subject_3"' in out_stream.getvalue()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("")
env_str = out_stream.getvalue()
assert "test_subject_1: write" in env_str
assert "test_subject_2: write" in env_str
assert "test_subject_3: changePermission" not in env_str
def test_1190(self, cn_client_v2):
"""do_clearaccess(): Removes all subjects."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_allowaccess("test_subject_1 write")
cli.do_allowaccess("test_subject_2 write")
cli.do_allowaccess("test_subject_3 changePermission")
cli.do_clearaccess("")
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_set("")
env_str = out_stream.getvalue()
assert "test_subject_1: write" not in env_str
assert "test_subject_2: write" not in env_str
assert "test_subject_3: changePermission" not in env_str
def test_1200(self, cn_client_v2):
"""do_allowrep(), do_denyrep(): Toggles replication."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_reset("")
cli.do_allowrep("")
assert (
cli._command_processor.get_session()
.get_replication_policy()
.get_replication_allowed()
)
cli.do_denyrep("")
assert (
not cli._command_processor.get_session()
.get_replication_policy()
.get_replication_allowed()
)
def test_1210(self, cn_client_v2):
"""do_preferrep(): Adds preferred replication targets."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_reset("")
cli.do_preferrep("preferred-mn-1")
cli.do_preferrep("preferred-mn-2")
cli.do_preferrep("preferred-mn-3")
preferred_mn_list = (
cli._command_processor.get_session()
.get_replication_policy()
.get_preferred()
)
assert [
"preferred-mn-1",
"preferred-mn-2",
"preferred-mn-3",
] == preferred_mn_list
def test_1220(self, cn_client_v2):
"""do_blockrep(): Adds blocked replication targets."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_reset("")
cli.do_blockrep("blocked-mn-1")
cli.do_blockrep("blocked-mn-2")
cli.do_blockrep("blocked-mn-3")
blocked_mn_list = (
cli._command_processor.get_session().get_replication_policy().get_blocked()
)
assert ["blocked-mn-1", "blocked-mn-2", "blocked-mn-3"] == blocked_mn_list
def test_1230(self, cn_client_v2):
"""do_removerep(): Adds blocked replication targets."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_reset("")
cli.do_preferrep("preferred-mn-1")
cli.do_preferrep("preferred-mn-2")
cli.do_preferrep("preferred-mn-3")
cli.do_blockrep("blocked-mn-1")
cli.do_blockrep("blocked-mn-2")
cli.do_blockrep("blocked-mn-3")
cli.do_removerep("blocked-mn-2")
cli.do_removerep("preferred-mn-3")
preferred_mn_list = (
cli._command_processor.get_session()
.get_replication_policy()
.get_preferred()
)
assert ["preferred-mn-1", "preferred-mn-2"] == preferred_mn_list
blocked_mn_list = (
cli._command_processor.get_session().get_replication_policy().get_blocked()
)
assert ["blocked-mn-1", "blocked-mn-3"] == blocked_mn_list
def test_1240(self, cn_client_v2):
"""do_numberrep(): Sets preferred number of replicas."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_reset("")
cli.do_numberrep("42")
received_num_replicas = (
cli._command_processor.get_session()
.get_replication_policy()
.get_number_of_replicas()
)
assert received_num_replicas == 42
def test_1250(self, cn_client_v2):
"""do_clearrep(): Resets replication policy to default."""
cli = d1_cli.impl.command_parser.CLI()
cli.do_reset("")
cli.do_preferrep("preferred-mn-1")
cli.do_preferrep("preferred-mn-2")
cli.do_blockrep("blocked-mn-1")
cli.do_blockrep("blocked-mn-2")
cli.do_numberrep("42")
cli.do_clearrep("")
preferred_mn_list = (
cli._command_processor.get_session()
.get_replication_policy()
.get_preferred()
)
assert not preferred_mn_list
blocked_mn_list = (
cli._command_processor.get_session().get_replication_policy().get_blocked()
)
assert not blocked_mn_list
@responses.activate
def test_1260(self, capsys):
"""list nodes: Gives expected output."""
d1_test.mock_api.list_nodes.add_callback("http://responses/cn")
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("cn-url http://responses/cn")
cli.do_listnodes("")
stdout, stderr = capsys.readouterr()
self.sample.assert_equals(stdout, "list_nodes")
@responses.activate
def test_1270(self, cn_client_v2):
"""do_get(): Successful file download."""
d1_test.mock_api.get.add_callback("http://responses/cn")
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("mn-url http://responses/cn")
with tempfile.NamedTemporaryFile() as tmp_file:
tmp_file_path = tmp_file.name
pid_str = "test_pid_1234"
cli.do_get("{} {}".format(pid_str, tmp_file_path))
with open(tmp_file_path, "rb") as f:
received_sciobj_bytes = f.read()
client = d1_client.mnclient.MemberNodeClient("http://responses/cn")
expected_sciobj_bytes = client.get(pid_str).content
assert received_sciobj_bytes == expected_sciobj_bytes
@responses.activate
def test_1280(self, cn_client_v2, caplog):
"""do_meta(): Successful system metadata download."""
d1_test.mock_api.get_system_metadata.add_callback("http://responses/cn")
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("cn-url http://responses/cn")
with d1_test.d1_test_case.temp_file_name() as tmp_file_path:
cli.do_meta("test_pid_1234 {}".format(tmp_file_path))
with open(tmp_file_path, "rb") as f:
received_sysmeta_xml = f.read().decode("utf-8")
self.sample.assert_equals(received_sysmeta_xml, "do_meta")
@responses.activate
def test_1290(self, cn_client_v2):
"""do_list(): Successful object listing."""
d1_test.mock_api.list_objects.add_callback("http://responses/cn")
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("mn-url http://responses/cn")
with d1_test.d1_test_case.temp_file_name() as tmp_file_path:
cli.do_list(tmp_file_path)
with open(tmp_file_path, "rb") as f:
received_object_list_xml = f.read().decode("utf-8")
self.sample.assert_equals(received_object_list_xml, "do_list")
@responses.activate
def test_1300(self, cn_client_v2):
"""do_log(): Successful object listing."""
d1_test.mock_api.get_log_records.add_callback("http://responses/cn")
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("mn-url http://responses/cn")
with tempfile.NamedTemporaryFile() as tmp_file:
tmp_file_path = tmp_file.name
cli.do_log(tmp_file_path)
with open(tmp_file_path, "rb") as f:
received_event_log_pyxb = d1_common.types.dataoneTypes.CreateFromDocument(
f.read()
)
now = d1_common.date_time.utc_now()
for log_entry in received_event_log_pyxb.logEntry:
log_entry.dateLogged = now
self.sample.assert_equals(received_event_log_pyxb, "do_log", cn_client_v2)
#
# Write Operations
#
@d1_test.mock_api.catch_all.activate
@freezegun.freeze_time("1977-02-27")
def test_1310(self, cn_client_v2):
"""do_create(): Expected REST call is issued."""
d1_test.mock_api.catch_all.add_callback(d1_test.d1_test_case.MOCK_MN_BASE_URL)
cli = d1_cli.impl.command_parser.CLI()
with self._add_write_operation_to_queue(
cli, cli.do_create, "{pid} {tmp_file_path}"
):
self._assert_queued_operations(cli, 1, "create")
# Check cancel
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
with d1_test.d1_test_case.mock_input("no"):
with pytest.raises(d1_cli.impl.exceptions.InvalidArguments):
cli.do_run("")
assert "Continue" in out_stream.getvalue()
# Check create
with mock.patch("d1_cli.impl.client.CLIMNClient.create") as mock_client:
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
with d1_test.d1_test_case.mock_input("yes"):
cli.do_run("")
name, args, kwargs = mock_client.mock_calls[0]
create_pid_str, tmp_file, create_sysmeta_pyxb = args
d1_common.system_metadata.normalize_in_place(
create_sysmeta_pyxb, reset_timestamps=True
)
self.sample.assert_equals(create_sysmeta_pyxb, "do_create", cn_client_v2)
def test_1320(self, cn_client_v2):
"""do_clearqueue(): Queue can be cleared."""
cli = d1_cli.impl.command_parser.CLI()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
with self._add_write_operation_to_queue(
cli, cli.do_create, "{pid} {tmp_file_path}"
):
self._assert_queued_operations(cli, 1, "create")
with d1_test.d1_test_case.mock_input("yes"):
cli.do_clearqueue("")
self._assert_queue_empty(cli)
assert "You are about to clear" in out_stream.getvalue()
def test_1330(self, cn_client_v2):
"""do_update(): Task is added to queue."""
cli = d1_cli.impl.command_parser.CLI()
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
with self._add_write_operation_to_queue(
cli, cli.do_update, "old_pid {pid} {tmp_file_path}"
):
self._assert_queued_operations(cli, 1, "update")
with d1_test.d1_test_case.mock_input("yes"):
cli.do_clearqueue("")
self._assert_queue_empty(cli)
assert "You are about to clear" in out_stream.getvalue()
def test_1340(self, cn_client_v2):
"""do_package(): Task is added to queue."""
cli = d1_cli.impl.command_parser.CLI()
with self._add_write_operation_to_queue(
cli,
cli.do_package,
"{pid} scimeta_pid sciobj1_pid sciobj2_pid, sciobj3_pid",
):
self._assert_queued_operations(cli, 1, "create_package")
self._clear_queue(cli)
self._assert_queue_empty(cli)
def test_1350(self, cn_client_v2):
"""do_archive(): Tasks are added to queue for each pid."""
cli = d1_cli.impl.command_parser.CLI()
with self._add_write_operation_to_queue(
cli, cli.do_archive, "archive1_pid archive2_pid archive3_pid archive4_pid"
):
self._assert_queued_operations(cli, 4, "archive")
self._clear_queue(cli)
self._assert_queue_empty(cli)
def test_1360(self, cn_client_v2):
"""do_updateaccess(): Tasks are added to queue for each pid."""
cli = d1_cli.impl.command_parser.CLI()
with self._disable_check_for_authenticated_access():
with self._add_write_operation_to_queue(
cli, cli.do_updateaccess, "access1_pid access2_pid access3_pid"
):
self._assert_queued_operations(cli, 3, "update_access_policy")
self._clear_queue(cli)
self._assert_queue_empty(cli)
def test_1370(self, cn_client_v2):
"""do_updatereplication(): Tasks are added to queue for each pid."""
cli = d1_cli.impl.command_parser.CLI()
with self._disable_check_for_authenticated_access():
with self._add_write_operation_to_queue(
cli,
cli.do_updatereplication,
"replication1_pid replication2_pid replication3_pid",
):
self._assert_queued_operations(cli, 3, "update_replication_policy")
self._clear_queue(cli)
self._assert_queue_empty(cli)
def _assert_queue_empty(self, cli):
with pytest.raises(d1_cli.impl.exceptions.InvalidArguments):
cli.do_queue("")
def _clear_queue(self, cli):
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
with d1_test.d1_test_case.mock_input("yes"):
cli.do_clearqueue("")
assert "You are about to clear" in out_stream.getvalue()
@contextlib.contextmanager
def _add_write_operation_to_queue(
self, cli, write_fun, cmd_format_str, **kwargs_dict
):
cli.do_reset("")
cli.do_allowaccess("test_subject_1 write")
cli.do_allowaccess("test_subject_3 changePermission")
cli.do_preferrep("preferred-mn-2")
cli.do_blockrep("blocked-mn-1")
cli.do_blockrep("blocked-mn-2")
cli.do_numberrep("42")
cli.do_set("authoritative-mn urn:node:myTestMN")
cli.do_set("rights-holder test-rights-holder-subject")
cli.do_set("format-id test-format-id")
cli.do_set("cn-url {}".format(d1_test.d1_test_case.MOCK_CN_BASE_URL))
cli.do_set("mn-url {}".format(d1_test.d1_test_case.MOCK_MN_BASE_URL))
pid_str = "test_pid_{}".format(
d1_test.instance_generator.random_data.random_3_words()
)
with tempfile.NamedTemporaryFile(delete=False) as tmp_file:
tmp_file.write("sciobj_for_{}".format(pid_str).encode("utf-8"))
# Add a create task to the queue.
kwargs_dict.update({"pid": pid_str, "tmp_file_path": tmp_file.name})
with d1_test.d1_test_case.capture_std():
write_fun(cmd_format_str.format(**kwargs_dict))
yield pid_str
os.unlink(tmp_file.name)
@contextlib.contextmanager
def _disable_check_for_authenticated_access(self):
with mock.patch(
"d1_cli.impl.operation_validator.OperationValidator."
"_assert_authenticated_access",
return_value=True,
):
yield
def _assert_queued_operations(self, cli, num_operations, operation_str):
with d1_test.d1_test_case.capture_std() as (out_stream, err_stream):
cli.do_queue("")
queue_str = out_stream.getvalue()
assert re.search(r"operation:\s*{}".format(operation_str), queue_str)
assert re.search(r"\d+ of {}".format(num_operations), queue_str)
# def test_1380(self, cn_client_v2):
# """search: Expected Solr query is generated"""
# expect = '*:* dateModified:[* TO *]'
# args = ' '.join([_f for _f in ('id:knb-lter*',) if _f])
# cli = d1_cli.impl.command_parser.CLI()
# actual = cli._command_processor._create_solr_query(args)
# assert expect == actual
def test_1380(self, cn_client_v2):
"""search: Expected Solr query is generated."""
expect = "id:knb-lter* dateModified:[* TO *]"
args = " ".join([_f for _f in ("id:knb-lter*",) if _f])
cli = d1_cli.impl.command_parser.CLI()
actual = cli._command_processor._create_solr_query(args)
assert expect == actual
def test_1390(self, cn_client_v2):
"""search: Expected Solr query is generated."""
expect = "id:knb-lter* abstract:water dateModified:[* TO *]"
args = " ".join([_f for _f in ("id:knb-lter*",) if _f])
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("query abstract:water")
actual = cli._command_processor._create_solr_query(args)
assert expect == actual
def test_1400(self, cn_client_v2):
"""search: Expected Solr query is generated."""
expect = "id:knb-lter* abstract:water dateModified:[* TO *]"
args = " ".join([_f for _f in ("id:knb-lter*",) if _f])
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("query abstract:water")
actual = cli._command_processor._create_solr_query(args)
assert expect == actual
def test_1410(self, cn_client_v2):
"""search: Expected Solr query is generated."""
expect = "id:knb-lter* formatId:text/csv dateModified:[* TO *]"
args = " ".join([_f for _f in ("id:knb-lter*",) if _f])
cli = d1_cli.impl.command_parser.CLI()
cli.do_set("query None")
cli.do_set("search-format-id text/csv")
actual = cli._command_processor._create_solr_query(args)
assert expect == actual
| [
"[email protected]"
]
| |
582e0a4f9404ffe497957148713488fb28333b04 | 49f23f530d0cda7aadbb27be9c5bdefaa794d27f | /server/common_models/user.py | a5b3f4d6f5e5d6819209dd9b15cdda3c1a15dacb | [
"MIT"
]
| permissive | Soopro/totoro | 198f3a51ae94d7466136ee766be98cb559c991f1 | 6be1af50496340ded9879a6450c8208ac9f97e72 | refs/heads/master | 2020-05-14T09:22:21.942621 | 2019-08-03T20:55:23 | 2019-08-03T20:55:23 | 181,738,167 | 0 | 1 | MIT | 2019-10-29T13:43:24 | 2019-04-16T17:42:16 | Python | UTF-8 | Python | false | false | 2,223 | py | # coding=utf-8
from __future__ import absolute_import
from document import BaseDocument, ObjectId, INDEX_DESC
from utils.misc import now
class User(BaseDocument):
STATUS_BEGINNER, STATUS_VIP, STATUS_BANNED = (0, 1, 2)
MAX_QUERY = 120
structure = {
'login': unicode,
'password_hash': unicode,
'openid': unicode,
'unionid': unicode,
'credit': int,
'meta': dict,
'creation': int,
'updated': int,
'status': int,
}
sensitive_fields = ['meta']
required_fields = ['openid']
default_values = {
'login': u'',
'password_hash': u'',
'unionid': u'',
'credit': 0,
'meta': {},
'creation': now,
'updated': now,
'status': STATUS_BEGINNER
}
indexes = [
{
'fields': ['openid'],
'unique': True,
},
{
'fields': ['login'],
'unique': True,
},
{
'fields': ['creation'],
},
{
'fields': ['status'],
}
]
def find_all(self):
return self.find().sort('creation', INDEX_DESC)
def find_activated(self):
return self.find({
'status': self.STATUS_ACTIVATED
}).sort('creation', INDEX_DESC).limit(self.MAX_QUERY)
def find_by_status(self, status):
return self.find({
'status': status
}).sort('creation', INDEX_DESC).limit(self.MAX_QUERY)
def find_one_by_id(self, user_id):
return self.find_one({
'_id': ObjectId(user_id),
})
def find_one_by_login(self, login):
if not login:
return None
return self.find_one({
'login': login,
})
def find_one_by_openid(self, openid):
return self.find_one({
'openid': openid,
})
def displace_login(self, login, openid):
# login can on exists once.
return self.collection.update(
{'openid': {'$ne': openid}, 'login': login},
{'$set': {'login': u'', 'status': self.STATUS_BEGINNER}},
multi=True)
def count_used(self):
return self.find().count()
| [
"[email protected]"
]
| |
63b86d3db2a788557594680b422fe05b9c77afcf | d01f9ff2d7ba3c7c99158678adeaf082f3f15dbc | /model/cpn/ade.cpn.R50_v1c.v38.v2/train.py | 294cb1add0f70efdd177b711e5ca1fc5df2170d0 | [
"MIT"
]
| permissive | akinoriosamura/TorchSeg-mirror | d8e76d99e80d55c2555f4f8f7a7fc3f30ef5dec4 | 34033fe85fc24015bcef7a92aad39d2a25a001a5 | refs/heads/master | 2021-06-18T15:47:00.946788 | 2019-10-26T04:46:07 | 2019-10-26T04:46:07 | 217,657,156 | 0 | 0 | MIT | 2021-06-08T20:36:44 | 2019-10-26T04:46:39 | Python | UTF-8 | Python | false | false | 6,088 | py | from __future__ import division
import os.path as osp
import sys
import argparse
from tqdm import tqdm
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.distributed as dist
import torch.backends.cudnn as cudnn
from config import config
from dataloader import get_train_loader
from network import CPNet
from datasets import ADE
from utils.init_func import init_weight, group_weight
from engine.lr_policy import PolyLR
from engine.logger import get_logger
from engine.engine import Engine
from seg_opr.sync_bn import DataParallelModel, Reduce, BatchNorm2d
from seg_opr.seg_oprs import one_hot
try:
from apex.parallel import SyncBatchNorm, DistributedDataParallel
except ImportError:
raise ImportError(
"Please install apex from https://www.github.com/nvidia/apex .")
logger = get_logger()
torch.manual_seed(config.seed)
if torch.cuda.is_available():
torch.cuda.manual_seed(config.seed)
parser = argparse.ArgumentParser()
with Engine(custom_parser=parser) as engine:
args = parser.parse_args()
cudnn.benchmark = True
if engine.distributed:
torch.cuda.set_device(engine.local_rank)
# data loader
train_loader, train_sampler = get_train_loader(engine, ADE)
# config network and criterion
criterion = nn.CrossEntropyLoss(reduction='mean',
ignore_index=-1)
if engine.distributed:
logger.info('Use the Multi-Process-SyncBatchNorm')
BatchNorm2d = SyncBatchNorm
else:
BatchNorm2d = BatchNorm2d
model = CPNet(config.num_classes, criterion=criterion,
pretrained_model=config.pretrained_model,
norm_layer=BatchNorm2d)
init_weight(model.business_layer, nn.init.kaiming_normal_,
BatchNorm2d, config.bn_eps, config.bn_momentum,
mode='fan_in', nonlinearity='relu')
# group weight and config optimizer
base_lr = config.lr
# if engine.distributed:
# base_lr = config.lr * engine.world_size
params_list = []
params_list = group_weight(params_list, model.backbone,
BatchNorm2d, base_lr)
for module in model.business_layer:
params_list = group_weight(params_list, module, BatchNorm2d,
base_lr * 10)
# config lr policy
total_iteration = config.nepochs * config.niters_per_epoch
lr_policy = PolyLR(base_lr, config.lr_power, total_iteration)
optimizer = torch.optim.SGD(params_list,
lr=base_lr,
momentum=config.momentum,
weight_decay=config.weight_decay)
if engine.distributed:
if torch.cuda.is_available():
model.cuda()
model = DistributedDataParallel(model)
else:
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
model = DataParallelModel(model, engine.devices)
model.to(device)
engine.register_state(dataloader=train_loader, model=model,
optimizer=optimizer)
if engine.continue_state_object:
engine.restore_checkpoint()
optimizer.zero_grad()
model.train()
for epoch in range(engine.state.epoch, config.nepochs):
if engine.distributed:
train_sampler.set_epoch(epoch)
bar_format = '{desc}[{elapsed}<{remaining},{rate_fmt}]'
pbar = tqdm(range(config.niters_per_epoch), file=sys.stdout,
bar_format=bar_format)
dataloader = iter(train_loader)
for idx in pbar:
engine.update_iteration(epoch, idx)
minibatch = dataloader.next()
imgs = minibatch['data']
gts = minibatch['label']
imgs = imgs.cuda(non_blocking=True)
gts = gts.cuda(non_blocking=True)
b, h, w = gts.size()
scaled_gts = F.interpolate((gts.view(b, 1, h, w)).float(),
scale_factor=0.125,
mode="nearest")
b, c, h, w = scaled_gts.size()
scaled_gts = scaled_gts.squeeze_()
C = config.num_classes + 1
one_hot_gts = one_hot(scaled_gts, C).view(b, C, -1)
similarity_gts = torch.bmm(one_hot_gts.permute(0, 2, 1),
one_hot_gts)
gts = gts - 1
loss = model(imgs, gts, similarity_gts)
# reduce the whole loss over multi-gpu
if engine.distributed:
dist.all_reduce(loss, dist.ReduceOp.SUM)
loss = loss / engine.world_size
else:
loss = Reduce.apply(*loss) / len(loss)
optimizer.zero_grad()
loss.backward()
optimizer.step()
current_idx = epoch * config.niters_per_epoch + idx
lr = lr_policy.get_lr(current_idx)
optimizer.param_groups[0]['lr'] = lr
optimizer.param_groups[1]['lr'] = lr
for i in range(2, len(optimizer.param_groups)):
optimizer.param_groups[i]['lr'] = lr * 10
print_str = 'Epoch{}/{}'.format(epoch, config.nepochs) \
+ ' Iter{}/{}:'.format(idx + 1, config.niters_per_epoch) \
+ ' lr=%.2e' % lr \
+ ' loss=%.2f' % loss.item()
pbar.set_description(print_str, refresh=False)
if (epoch >= config.nepochs - 20) or (
epoch % config.snapshot_iter == 0):
if engine.distributed and (engine.local_rank == 0):
engine.save_and_link_checkpoint(config.snapshot_dir,
config.log_dir,
config.log_dir_link)
elif not engine.distributed:
engine.save_and_link_checkpoint(config.snapshot_dir,
config.log_dir,
config.log_dir_link)
| [
"[email protected]"
]
| |
6f6c6ada1f54e063deb49df940dc1cc3650971d6 | b9008dc6326b30de1a16ba01a1f3143aa248f7c3 | /python/chapter3/ex01_10.py | 0eed47d612f626182ab96704303341e3153a0b74 | []
| no_license | wonjongah/multicampus_IoT | ce219f8b9875aa7738ef952a8702d818a571610e | 765a5cd7df09a869a4074d8eafce69f1d6cfda4a | refs/heads/master | 2023-02-13T12:30:19.924691 | 2021-01-08T10:17:42 | 2021-01-08T10:17:42 | 292,800,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 392 | py | country = "Korea"
if country == "Korea":
print("한국입니다")
if country != "Korea":
print("한국이 아닙니다")
if "korea" > "japan":
print("한국이 더 크다")
if "korea" < "japan":
print("일본이 더 크다")
if "Korea" > "korea":
print("Korea가 더 큽니다")
if "Korea" < "korea":
print("korea가 더 큽니다")
print(ord("K"))
print(ord("k")) | [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.