blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
b644fdae6c47bf17c9975e034e80c39484262a6e | cafefb0b182567e5cabe22c44578bb712385e9f5 | /lib/gcloud/datastore/query.py | 15519a01f15b6211619350226205630a2d216c9a | [
"BSD-3-Clause"
] | permissive | gtaylor/evennia-game-index | fe0088e97087c0aaa0c319084e28b2c992c2c00b | b47f27f4dff2a0c32991cee605d95911946ca9a5 | refs/heads/master | 2022-11-25T20:28:23.707056 | 2022-11-07T17:47:25 | 2022-11-07T17:47:25 | 55,206,601 | 2 | 2 | BSD-3-Clause | 2018-04-19T05:41:12 | 2016-04-01T05:40:15 | Python | UTF-8 | Python | false | false | 17,855 | py | # Copyright 2014 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create / interact with gcloud datastore queries."""
import base64
from gcloud._helpers import _ensure_tuple_or_list
from gcloud.datastore._generated import query_pb2 as _query_pb2
from gcloud.datastore import helpers
from gcloud.datastore.key import Key
class Query(object):
"""A Query against the Cloud Datastore.
This class serves as an abstraction for creating a query over data
stored in the Cloud Datastore.
:type client: :class:`gcloud.datastore.client.Client`
:param client: The client used to connect to datastore.
:type kind: string
:param kind: The kind to query.
:type project: string
:param project: The project associated with the query. If not passed,
uses the client's value.
:type namespace: string or None
:param namespace: The namespace to which to restrict results. If not
passed, uses the client's value.
:type ancestor: :class:`gcloud.datastore.key.Key` or None
:param ancestor: key of the ancestor to which this query's results are
restricted.
:type filters: sequence of (property_name, operator, value) tuples
:param filters: property filters applied by this query.
:type projection: sequence of string
:param projection: fields returned as part of query results.
:type order: sequence of string
:param order: field names used to order query results. Prepend '-'
to a field name to sort it in descending order.
:type distinct_on: sequence of string
:param distinct_on: field names used to group query results.
:raises: ValueError if ``project`` is not passed and no implicit
default is set.
"""
OPERATORS = {
'<=': _query_pb2.PropertyFilter.LESS_THAN_OR_EQUAL,
'>=': _query_pb2.PropertyFilter.GREATER_THAN_OR_EQUAL,
'<': _query_pb2.PropertyFilter.LESS_THAN,
'>': _query_pb2.PropertyFilter.GREATER_THAN,
'=': _query_pb2.PropertyFilter.EQUAL,
}
"""Mapping of operator strings and their protobuf equivalents."""
def __init__(self,
client,
kind=None,
project=None,
namespace=None,
ancestor=None,
filters=(),
projection=(),
order=(),
distinct_on=()):
self._client = client
self._kind = kind
self._project = project or client.project
self._namespace = namespace or client.namespace
self._ancestor = ancestor
self._filters = []
# Verify filters passed in.
for property_name, operator, value in filters:
self.add_filter(property_name, operator, value)
self._projection = _ensure_tuple_or_list('projection', projection)
self._order = _ensure_tuple_or_list('order', order)
self._distinct_on = _ensure_tuple_or_list('distinct_on', distinct_on)
@property
def project(self):
"""Get the project for this Query.
:rtype: str
"""
return self._project or self._client.project
@property
def namespace(self):
"""This query's namespace
:rtype: string or None
:returns: the namespace assigned to this query
"""
return self._namespace or self._client.namespace
@namespace.setter
def namespace(self, value):
"""Update the query's namespace.
:type value: string
"""
if not isinstance(value, str):
raise ValueError("Namespace must be a string")
self._namespace = value
@property
def kind(self):
"""Get the Kind of the Query.
:rtype: string
"""
return self._kind
@kind.setter
def kind(self, value):
"""Update the Kind of the Query.
:type value: string
:param value: updated kind for the query.
.. note::
The protobuf specification allows for ``kind`` to be repeated,
but the current implementation returns an error if more than
one value is passed. If the back-end changes in the future to
allow multiple values, this method will be updated to allow passing
either a string or a sequence of strings.
"""
if not isinstance(value, str):
raise TypeError("Kind must be a string")
self._kind = value
@property
def ancestor(self):
"""The ancestor key for the query.
:rtype: Key or None
"""
return self._ancestor
@ancestor.setter
def ancestor(self, value):
"""Set the ancestor for the query
:type value: Key
:param value: the new ancestor key
"""
if not isinstance(value, Key):
raise TypeError("Ancestor must be a Key")
self._ancestor = value
@ancestor.deleter
def ancestor(self):
"""Remove the ancestor for the query."""
self._ancestor = None
@property
def filters(self):
"""Filters set on the query.
:rtype: sequence of (property_name, operator, value) tuples.
"""
return self._filters[:]
def add_filter(self, property_name, operator, value):
"""Filter the query based on a property name, operator and a value.
Expressions take the form of::
.add_filter('<property>', '<operator>', <value>)
where property is a property stored on the entity in the datastore
and operator is one of ``OPERATORS``
(ie, ``=``, ``<``, ``<=``, ``>``, ``>=``)::
>>> from gcloud import datastore
>>> client = datastore.Client()
>>> query = client.query(kind='Person')
>>> query.add_filter('name', '=', 'James')
>>> query.add_filter('age', '>', 50)
:type property_name: string
:param property_name: A property name.
:type operator: string
:param operator: One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
:type value: :class:`int`, :class:`str`, :class:`bool`,
:class:`float`, :class:`NoneType`,
:class:`datetime.datetime`,
:class:`gcloud.datastore.key.Key`
:param value: The value to filter on.
:raises: :class:`ValueError` if ``operation`` is not one of the
specified values, or if a filter names ``'__key__'`` but
passes an invalid value (a key is required).
"""
if self.OPERATORS.get(operator) is None:
error_message = 'Invalid expression: "%s"' % (operator,)
choices_message = 'Please use one of: =, <, <=, >, >=.'
raise ValueError(error_message, choices_message)
if property_name == '__key__' and not isinstance(value, Key):
raise ValueError('Invalid key: "%s"' % value)
self._filters.append((property_name, operator, value))
@property
def projection(self):
"""Fields names returned by the query.
:rtype: sequence of string
:returns: Names of fields in query results.
"""
return self._projection[:]
@projection.setter
def projection(self, projection):
"""Set the fields returned the query.
:type projection: string or sequence of strings
:param projection: Each value is a string giving the name of a
property to be included in the projection query.
"""
if isinstance(projection, str):
projection = [projection]
self._projection[:] = projection
def keys_only(self):
"""Set the projection to include only keys."""
self._projection[:] = ['__key__']
def key_filter(self, key, operator='='):
"""Filter on a key.
:type key: :class:`gcloud.datastore.key.Key`
:param key: The key to filter on.
:type operator: string
:param operator: (Optional) One of ``=``, ``<``, ``<=``, ``>``, ``>=``.
Defaults to ``=``.
"""
self.add_filter('__key__', operator, key)
@property
def order(self):
"""Names of fields used to sort query results.
:rtype: sequence of string
"""
return self._order[:]
@order.setter
def order(self, value):
"""Set the fields used to sort query results.
Sort fields will be applied in the order specified.
:type value: string or sequence of strings
:param value: Each value is a string giving the name of the
property on which to sort, optionally preceded by a
hyphen (-) to specify descending order.
Omitting the hyphen implies ascending order.
"""
if isinstance(value, str):
value = [value]
self._order[:] = value
@property
def distinct_on(self):
"""Names of fields used to group query results.
:rtype: sequence of string
"""
return self._distinct_on[:]
@distinct_on.setter
def distinct_on(self, value):
"""Set fields used to group query results.
:type value: string or sequence of strings
:param value: Each value is a string giving the name of a
property to use to group results together.
"""
if isinstance(value, str):
value = [value]
self._distinct_on[:] = value
def fetch(self, limit=None, offset=0, start_cursor=None, end_cursor=None,
client=None):
"""Execute the Query; return an iterator for the matching entities.
For example::
>>> from gcloud import datastore
>>> client = datastore.Client()
>>> query = client.query(kind='Person')
>>> query.add_filter('name', '=', 'Sally')
>>> list(query.fetch())
[<Entity object>, <Entity object>, ...]
>>> list(query.fetch(1))
[<Entity object>]
:type limit: integer or None
:param limit: An optional limit passed through to the iterator.
:type offset: integer
:param offset: An optional offset passed through to the iterator.
:type start_cursor: bytes
:param start_cursor: An optional cursor passed through to the iterator.
:type end_cursor: bytes
:param end_cursor: An optional cursor passed through to the iterator.
:type client: :class:`gcloud.datastore.client.Client`
:param client: client used to connect to datastore.
If not supplied, uses the query's value.
:rtype: :class:`Iterator`
:raises: ValueError if ``connection`` is not passed and no implicit
default has been set.
"""
if client is None:
client = self._client
return Iterator(
self, client, limit, offset, start_cursor, end_cursor)
class Iterator(object):
"""Represent the state of a given execution of a Query.
:type query: :class:`gcloud.datastore.query.Query`
:param query: Query object holding permanent configuration (i.e.
things that don't change on with each page in
a results set).
:type client: :class:`gcloud.datastore.client.Client`
:param client: The client used to make a request.
:type limit: integer
:param limit: (Optional) Limit the number of results returned.
:type offset: integer
:param offset: (Optional) Defaults to 0. Offset used to begin
a query.
:type start_cursor: bytes
:param start_cursor: (Optional) Cursor to begin paging through
query results.
:type end_cursor: bytes
:param end_cursor: (Optional) Cursor to end paging through
query results.
"""
_NOT_FINISHED = _query_pb2.QueryResultBatch.NOT_FINISHED
_FINISHED = (
_query_pb2.QueryResultBatch.NO_MORE_RESULTS,
_query_pb2.QueryResultBatch.MORE_RESULTS_AFTER_LIMIT,
)
def __init__(self, query, client, limit=None, offset=0,
start_cursor=None, end_cursor=None):
self._query = query
self._client = client
self._limit = limit
self._offset = offset
self._start_cursor = start_cursor
self._end_cursor = end_cursor
self._page = self._more_results = None
def next_page(self):
"""Fetch a single "page" of query results.
Low-level API for fine control: the more convenient API is
to iterate on the current Iterator.
:rtype: tuple, (entities, more_results, cursor)
"""
pb = _pb_from_query(self._query)
start_cursor = self._start_cursor
if start_cursor is not None:
pb.start_cursor = base64.urlsafe_b64decode(start_cursor)
end_cursor = self._end_cursor
if end_cursor is not None:
pb.end_cursor = base64.urlsafe_b64decode(end_cursor)
if self._limit is not None:
pb.limit.value = self._limit
pb.offset = self._offset
transaction = self._client.current_transaction
query_results = self._client.connection.run_query(
query_pb=pb,
project=self._query.project,
namespace=self._query.namespace,
transaction_id=transaction and transaction.id,
)
# NOTE: `query_results` contains an extra value that we don't use,
# namely `skipped_results`.
#
# NOTE: The value of `more_results` is not currently useful because
# the back-end always returns an enum
# value of MORE_RESULTS_AFTER_LIMIT even if there are no more
# results. See
# https://github.com/GoogleCloudPlatform/gcloud-python/issues/280
# for discussion.
entity_pbs, cursor_as_bytes, more_results_enum = query_results[:3]
if cursor_as_bytes == b'':
self._start_cursor = None
else:
self._start_cursor = base64.urlsafe_b64encode(cursor_as_bytes)
self._end_cursor = None
if more_results_enum == self._NOT_FINISHED:
self._more_results = True
elif more_results_enum in self._FINISHED:
self._more_results = False
else:
raise ValueError('Unexpected value returned for `more_results`.')
self._page = [
helpers.entity_from_protobuf(entity)
for entity in entity_pbs]
return self._page, self._more_results, self._start_cursor
def __iter__(self):
"""Generator yielding all results matching our query.
:rtype: sequence of :class:`gcloud.datastore.entity.Entity`
"""
self.next_page()
while True:
for entity in self._page:
yield entity
if not self._more_results:
break
self.next_page()
def _pb_from_query(query):
"""Convert a Query instance to the corresponding protobuf.
:type query: :class:`Query`
:param query: The source query.
:rtype: :class:`gcloud.datastore._generated.query_pb2.Query`
:returns: A protobuf that can be sent to the protobuf API. N.b. that
it does not contain "in-flight" fields for ongoing query
executions (cursors, offset, limit).
"""
pb = _query_pb2.Query()
for projection_name in query.projection:
pb.projection.add().property.name = projection_name
if query.kind:
pb.kind.add().name = query.kind
composite_filter = pb.filter.composite_filter
composite_filter.op = _query_pb2.CompositeFilter.AND
if query.ancestor:
ancestor_pb = query.ancestor.to_protobuf()
# Filter on __key__ HAS_ANCESTOR == ancestor.
ancestor_filter = composite_filter.filters.add().property_filter
ancestor_filter.property.name = '__key__'
ancestor_filter.op = _query_pb2.PropertyFilter.HAS_ANCESTOR
ancestor_filter.value.key_value.CopyFrom(ancestor_pb)
for property_name, operator, value in query.filters:
pb_op_enum = query.OPERATORS.get(operator)
# Add the specific filter
property_filter = composite_filter.filters.add().property_filter
property_filter.property.name = property_name
property_filter.op = pb_op_enum
# Set the value to filter on based on the type.
if property_name == '__key__':
key_pb = value.to_protobuf()
property_filter.value.key_value.CopyFrom(key_pb)
else:
helpers._set_protobuf_value(property_filter.value, value)
if not composite_filter.filters:
pb.ClearField('filter')
for prop in query.order:
property_order = pb.order.add()
if prop.startswith('-'):
property_order.property.name = prop[1:]
property_order.direction = property_order.DESCENDING
else:
property_order.property.name = prop
property_order.direction = property_order.ASCENDING
for distinct_on_name in query.distinct_on:
pb.distinct_on.add().name = distinct_on_name
return pb
| [
"[email protected]"
] | |
adb49559807c17d8ad1f202ae8ecd3b0b3c20d5c | 3338cc681e9760dbf7f7a3ed9bfe14e6ec57ad45 | /gcm_sender/apps.py | 6841decee05cccaa9e3ab7c7156fc3b97a7c4050 | [] | no_license | linxaddict/gcm_web_sender | bd72aed83d848f5924471766bd26d0bf725208a3 | 30ac733f72173327b8a910ae2a9c5a87a0fbe7c6 | refs/heads/master | 2021-01-10T08:45:30.030267 | 2016-02-29T20:03:15 | 2016-02-29T20:03:15 | 52,820,528 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | from django.apps import AppConfig
class GcmSenderConfig(AppConfig):
name = 'gcm_sender'
| [
"[email protected]"
] | |
55ba0ca91879be3c1ad97a693770b8fd4d88b8bc | cf0779621df542169096d73476de493c0eb7eecd | /setup.py | 9454c7f1200e950f028e3ec48532a413397f72cd | [
"MIT"
] | permissive | aixpact/microservices-api | e16884d204dbcc63e1f49a1feb707e5980bdbeec | 639725be630f4f049cef9251cb5946dfd846d234 | refs/heads/master | 2020-05-20T02:55:32.561419 | 2019-05-08T07:21:47 | 2019-05-08T07:21:47 | 185,343,743 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 285 | py | from setuptools import setup
setup(
name='HelloApp-CLI',
version='1.0',
packages=['cli', 'cli.commands'],
include_package_data=True,
install_requires=[
'click',
],
entry_points="""
[console_scripts]
hello_app=cli.cli:cli
""",
)
| [
"[email protected]"
] | |
416b80855f6461627677d64403c3f36b99959cfc | 9ae6ce54bf9a2a86201961fdbd5e7b0ec913ff56 | /google/ads/googleads/v11/enums/types/conversion_origin.py | 636cddf2770b73e0085cb89f0705b9b6c383f762 | [
"Apache-2.0"
] | permissive | GerhardusM/google-ads-python | 73b275a06e5401e6b951a6cd99af98c247e34aa3 | 676ac5fcb5bec0d9b5897f4c950049dac5647555 | refs/heads/master | 2022-07-06T19:05:50.932553 | 2022-06-17T20:41:17 | 2022-06-17T20:41:17 | 207,535,443 | 0 | 0 | Apache-2.0 | 2019-09-10T10:58:55 | 2019-09-10T10:58:55 | null | UTF-8 | Python | false | false | 1,240 | py | # -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import proto # type: ignore
__protobuf__ = proto.module(
package="google.ads.googleads.v11.enums",
marshal="google.ads.googleads.v11",
manifest={"ConversionOriginEnum",},
)
class ConversionOriginEnum(proto.Message):
r"""Container for enum describing possible conversion origins.
"""
class ConversionOrigin(proto.Enum):
r"""The possible places where a conversion can occur."""
UNSPECIFIED = 0
UNKNOWN = 1
WEBSITE = 2
GOOGLE_HOSTED = 3
APP = 4
CALL_FROM_ADS = 5
STORE = 6
YOUTUBE_HOSTED = 7
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
] | |
efd46d8641ce515dc35ba0086dd38668077077c3 | 986a8c5de450fc436897de9aaff4c5f737074ee3 | /剑指offer/字符串/正则表达式匹配.py | aebcc76c678aad65e62c9dd65bc9248eb931a8d6 | [] | no_license | lovehhf/newcoder_py | 7a0ef03f0ea733ec925a10f06566040f6edafa67 | f8ae73deef1d9422ca7b0aa9f484dc96db58078c | refs/heads/master | 2020-04-27T18:20:19.082458 | 2019-05-24T15:30:13 | 2019-05-24T15:30:13 | 174,564,930 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,318 | py | # -*- coding:utf-8 -*-
__author__ = 'huanghf'
"""
题目描述
请实现一个函数用来匹配包括'.'和'*'的正则表达式。模式中的字符'.'表示任意一个字符,而'*'表示它前面的字符可以出现任意次(包含0次)。
在本题中,匹配是指字符串的所有字符匹配整个模式。例如,字符串"aaa"与模式"a.a"和"ab*ac*a"匹配,但是与"aa.a"和"ab*a"均不匹配
"""
class Solution:
def match(self, s, p):
"""
# s, pattern都是字符串
dp[i][j]: s的前i个字符与p的第j个字符是否匹配
:param s:
:param p:
:return:
"""
s = ' ' + s
p = ' ' + p
m, n = len(s), len(p)
dp = [[0] * n for _ in range(m)]
dp[0][0] = 1
for i in range(m):
for j in range(1, n):
if i > 0 and (s[i] == p[j] or p[j] == '.'):
dp[i][j] = dp[i - 1][j - 1]
if p[j] == '*':
dp[i][j] = dp[i][j - 2] | dp[i][j]
if i > 0 and (p[j - 1] == '.' or s[i] == p[j - 1]):
dp[i][j] = dp[i][j] | dp[i - 1][j] | dp[i - 1][j - 2]
# for i in dp:
# print(i)
return dp[-1][-1]
s = ""
p = "."
sol = Solution()
print(sol.match(s, p))
| [
"[email protected]"
] | |
72c5c0b01ae2c2fd37c6514e631e20eaf84a2f41 | 0cfd2faf3f52af67888f082bd9dc29f34ffdc810 | /python/2_0/watchdog.py | e434d9b6e239ae27fd1be2db6b4b5b98b0e61458 | [] | no_license | rsprenkels/kattis | 28bd078a797a9cfa76509145e39db77fe6a895cd | 7d51e8afb841c4bd4debaf5aef0c37f6da6f3abd | refs/heads/master | 2023-08-11T00:07:06.308151 | 2023-07-29T22:04:04 | 2023-07-29T22:04:04 | 69,284,864 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,097 | py | import math
from typing import Tuple, Sequence
class Point:
def __init__(self, x, y):
self.x = x
self.y = y
def __add__(self, other):
return Point(self.x + other.x, self.y + other.y)
def __sub__(self, other):
return Point(self.x - other.x, self.y - other.y)
def __eq__(self, other):
"""Overrides the default implementation"""
if isinstance(other, Point):
return self.x == other.x and self.y == other.y
return False
def mhdist(self, other) -> int:
return abs(self.x - other.x) + abs(self.y - other.y)
def dist(self, other) -> float:
return math.sqrt(math.pow(abs(self.x - other.x), 2) + math.pow(abs(self.y - other.y), 2))
def length(self):
return math.sqrt(self.x * self.x + self.y * self.y)
def __repr__(self):
return f"{self.x} {self.y}"
def multisort(xs, specs):
for key, reverse in reversed(specs):
xs.sort(key=itemgetter(key), reverse=reverse)
return xs
def watchdog(roof_size: int, hatches : Sequence[Point]) -> Tuple[bool, Point]:
for x in range(1, roof_size):
for y in range(1, roof_size):
candidate = Point(x, y)
if candidate not in hatches:
longest_leash = max([candidate.dist(h) for h in hatches])
if candidate.x >= longest_leash and candidate.x <= roof_size - longest_leash and candidate.y >= longest_leash and candidate.y <= roof_size - longest_leash:
return (True, candidate)
return (False, Point(0,0))
# assert watchdog(10, [Point(6,6), Point(5,4)]) == (True, Point(3, 6))
# assert watchdog(20, [Point(1,1), Point(19,19)]) == (False, Point(0, 0))
for _ in range(int(input())):
roof_size, num_hathes = map(int, input().split())
hatches = []
for _ in range(num_hathes):
hatches.append(Point(*map(int, input().split())))
result = watchdog(roof_size, hatches)
if result[0]:
print(result[1])
else:
print('poodle')
# from 370.9 rank 1025 to 372.9 rank 1019 | [
"[email protected]"
] | |
697c6ce021892e01d425df78101ae11b6bfd2b4f | c418bd9d730bc17653611da7f0642bdd25cba65f | /djangosite/users/forms.py | 55126f872392526a2768044a49fa73651634d1de | [] | no_license | ErDeepakSingh/Ajax-State-City | ae18a4f4b8ef8e90932d8aed74553897d7ac9b3b | 72a31424bd9402ef2c76198ee80934ac399fccf9 | refs/heads/master | 2020-08-16T02:29:36.081445 | 2019-10-16T02:51:22 | 2019-10-16T02:51:22 | 215,443,208 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 308 | py | from django import forms
from django.contrib.auth.models import User
from django.contrib.auth.forms import UserChangeForm
class UserUpdateForm(UserChangeForm):
password = None
class Meta:
model = User
fields = ['first_name', 'last_name', 'email']
# exclude = ['first_name']
| [
"[email protected]"
] | |
4e3fac772aa2d67c2aab6d3965c3ef63863ca614 | 574e874ebd7889c23a6292705dcd89594567bae8 | /code/Scheduler_Class.py | 3e9aa38a4a331038c83d92063010e9f28ec5c2e7 | [] | no_license | hammadhaleem/Switching-Fabric-Simulator | 6bb9c65b983c7a23b11fd60f5e6472bc269878c9 | 279a22c349d02fd061bc52fd77e4c6be223041b7 | refs/heads/master | 2021-01-17T06:28:08.162967 | 2015-05-08T08:52:18 | 2015-05-08T08:52:18 | 34,894,625 | 0 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,442 | py | from Queue_Class import *
import time
class StandardScheduler(object):
def __init__(self, number_of_queue,data):
super(StandardScheduler, self).__init__()
self.number_of_queue = number_of_queue
self.input_queue_object = SuperMultiQueue(int(self.number_of_queue))
self.output_queue_object = SuperMultiQueue(int(self.number_of_queue))
self.input_queue_object.insert_data_in_queues(data)
def create_state_variable(self):
states = {}
data =self.Status()
for cards in data :
obj = {}
obj['pointer'] = None
obj['count'] = 0
obj['max'] = self.number_of_queue
for keys in data[cards] :
obj['count'] = obj['count'] + data[cards][keys]
states[cards] =obj
return states
def Test_Queues(self):
print "Input Queues"
self.input_queue_object.debug()
print "\nQutput Queues"
self.output_queue_object.debug()
def Get_Output(self):
return self.output_queue_object.get_data_stream()
# ((input port, output) , output_Card)
# Returns true if the exchange was success.
def Packet_Exchange(self,set_inp,set_out):
try:
data = self.input_queue_object.pop_from_queue(set_inp)
fake_list = [data]
out = data['source']
data['source']= set_out
data['outport'] = set_out
data['time_out'] =time.time()
self.output_queue_object.insert_data_in_queues(fake_list)
return True
except:
pass
return False
def Status(self):
return self.input_queue_object.generate_input_status() | [
"[email protected]"
] | |
6d6487a513d54348666f0ce244513f8c0b0773b1 | 58f095f52d58afa9e8041c69fa903c5a9e4fa424 | /examples_UQ/TEST2.py | 3519916d32cf9db375c1fa916e9fb27bb71fdba2 | [
"BSD-3-Clause"
] | permissive | cdeil/mystic | e41b397e9113aee1843bc78b5b4ca30bd0168114 | bb30994987f36168b8f09431cb9c3823afd892cd | refs/heads/master | 2020-12-25T23:18:52.086894 | 2014-08-13T14:36:09 | 2014-08-13T14:36:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,723 | py | #!/usr/bin/env python
#
# Author: Mike McKerns (mmckerns @caltech and @uqfoundation)
# Copyright (c) 2009-2014 California Institute of Technology.
# License: 3-clause BSD. The full license text is available at:
# - http://trac.mystic.cacr.caltech.edu/project/mystic/browser/mystic/LICENSE
#######################################################################
# scaling and mpi info; also optimizer configuration parameters
# hard-wired: use DE solver, don't use mpi, F-F' calculation
# (similar to concentration.in)
#######################################################################
scale = 1.0
#XXX: <mpi config goes here>
npop = 20
maxiter = 1000
maxfun = 1e+6
convergence_tol = 1e-4
crossover = 0.9
percent_change = 0.9
#######################################################################
# the model function
# (similar to Simulation.cpp)
#######################################################################
def function(x):
"""a simple model function
f = (x1*x2*x3)**(1/3)
Input:
- x -- 1-d array of coefficients [x1,x2,x3]
Output:
- f -- function result
"""
return (x[0]*x[1]*x[2])**(1.0/3.0)
#######################################################################
# the subdiameter calculation
# (similar to driver.sh)
#######################################################################
def costFactory(i):
"""a cost factory for the cost function"""
def cost(rv):
"""compute the diameter as a calculation of cost
Input:
- rv -- 1-d array of model parameters
Output:
- diameter -- scale * | F(x) - F(x')|**2
"""
# prepare x and xprime
params = rv[:-1] #XXX: assumes Xi' is at rv[-1]
params_prime = rv[:i]+rv[-1:]+rv[i+1:-1] #XXX: assumes Xi' is at rv[-1]
# get the F(x) response
Fx = function(params)
# get the F(x') response
Fxp = function(params_prime)
# compute diameter
return -scale * (Fx - Fxp)**2
return cost
#######################################################################
# the differential evolution optimizer
# (replaces the call to dakota)
#######################################################################
def dakota(cost,lb,ub):
from mystic.solvers import DifferentialEvolutionSolver2
from mystic.termination import CandidateRelativeTolerance as CRT
from mystic.strategy import Best1Exp
from mystic.monitors import VerboseMonitor, Monitor
from mystic.tools import getch, random_seed
random_seed(123)
#stepmon = VerboseMonitor(100)
stepmon = Monitor()
evalmon = Monitor()
ndim = len(lb) # [(1 + RVend) - RVstart] + 1
solver = DifferentialEvolutionSolver2(ndim,npop)
solver.SetRandomInitialPoints(min=lb,max=ub)
solver.SetStrictRanges(min=lb,max=ub)
solver.SetEvaluationLimits(maxiter,maxfun)
solver.SetEvaluationMonitor(evalmon)
solver.SetGenerationMonitor(stepmon)
tol = convergence_tol
solver.Solve(cost,termination=CRT(tol,tol),strategy=Best1Exp, \
CrossProbability=crossover,ScalingFactor=percent_change)
print solver.bestSolution
diameter = -solver.bestEnergy / scale
func_evals = solver.evaluations
return diameter, func_evals
#######################################################################
# loop over model parameters to calculate concentration of measure
# (similar to main.cc)
#######################################################################
def UQ(start,end,lower,upper):
diameters = []
function_evaluations = []
total_func_evals = 0
total_diameter = 0.0
for i in range(start,end+1):
lb = lower[start:end+1] + [lower[i]]
ub = upper[start:end+1] + [upper[i]]
#construct cost function and run optimizer
cost = costFactory(i)
subdiameter, func_evals = dakota(cost,lb,ub) #XXX: no initial conditions
function_evaluations.append(func_evals)
diameters.append(subdiameter)
total_func_evals += function_evaluations[-1]
total_diameter += diameters[-1]
print "subdiameters (squared): %s" % diameters
print "diameter (squared): %s" % total_diameter
print "func_evals: %s => %s" % (function_evaluations, total_func_evals)
return
#######################################################################
# rank, bounds, and restart information
# (similar to concentration.variables)
#######################################################################
if __name__ == '__main__':
RVstart = 0; RVend = 2
lower_bounds = [3.0,4.0,1.0]
upper_bounds = [5.0,10.0,10.0]
print " function: f = (x1*x2*x3)**(1/3)"
print " parameters: ['x1', 'x2', 'x3']"
print " lower bounds: %s" % lower_bounds
print " upper bounds: %s" % upper_bounds
print " ..."
UQ(RVstart,RVend,lower_bounds,upper_bounds)
| [
"mmckerns@968178ea-60bd-409e-af13-df8a517b6005"
] | mmckerns@968178ea-60bd-409e-af13-df8a517b6005 |
e4d4a51da27b89cb326de521ee285f8a1a09faee | f6b9758ed08708b9394e35b51a993c33c7f205da | /NtupleAnalyzer/test/compare_14-11-2016/compare_CERN.py | 0733a2528a019ed066d98887bc74c6173886d2a9 | [] | no_license | cirkovic/tHFCNC_sync | 706da41c3eaf3e372bc477d0ac92c73824e038b1 | c6126fd9a80f03f32734f5207a7651c28c8a0a6d | refs/heads/master | 2020-07-31T01:12:59.951839 | 2016-11-30T20:18:31 | 2016-11-30T20:18:31 | 73,612,471 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 21,672 | py | from ROOT import *
import sys
def checkIfMerged(vals):
if True:
if vals[0].count('.') > 0:
v = vals[0]
vals[0] = v[:6]
vals.insert(1, v[6:])
#print vals
#if True:
# for j,v in enumerate(vals):
# if v.count('.') == 2:
# if vals[j][0] != '-':
# vals[j] = v[:7]
# vals.insert(j+1, v[7:])
# else:
# vals[j] = v[:8]
# vals.insert(j+1, v[8:])
if True:
for j,v in enumerate(vals):
if v.count('.') == 2:
#print v
#print v.find('.')
#print v[:v.find('.')+5]
#print v[v.find('.')+6:]
vals[j] = v[:v.find('.')+5]
vals.insert(j+1, v[v.find('.')+6:])
class PhysicsObject:
def __init__(self, l, sf="lepton"):
self.l = ' '.join(l.split())
#print self.l
vals = self.l.split(" ")
checkIfMerged(vals)
self.props = []
if sf == "lepton":
self.props.append(("evt", "%10d", int(vals[len(self.props)])))
self.props.append(("pt", "%10.5f", float(vals[len(self.props)])))
self.props.append(("eta", "%10.5f", float(vals[len(self.props)])))
self.props.append(("phi", "%10.5f", float(vals[len(self.props)])))
self.props.append(("E", "%10.5f", float(vals[len(self.props)])))
elif sf == "jet":
self.props.append(("evt", "%6d", int(vals[len(self.props)])))
self.props.append(("pt", "%10.5f", float(vals[len(self.props)])))
self.props.append(("eta", "%10.5f", float(vals[len(self.props)])))
self.props.append(("phi", "%10.5f", float(vals[len(self.props)])))
self.props.append(("E", "%10.5f", float(vals[len(self.props)])))
elif sf == "tau":
self.props.append(("evt", "%6d", int(vals[len(self.props)])))
self.props.append(("pt", "%10.5f", float(vals[len(self.props)])))
self.props.append(("eta", "%10.5f", float(vals[len(self.props)])))
self.props.append(("phi", "%10.5f", float(vals[len(self.props)])))
self.props.append(("E", "%10.5f", float(vals[len(self.props)])))
def getPrintString(self):
'''
%10d%10.5f%10.5f%10.5f%10.5f%5d%5d%15.5f%15.5f%15.5f%10.5f%10.5f%10.5f%10.5f%10.5f%10.5f%21.5f
'''
s = ""
for p in self.props:
s += (" "+(p[1] % p[2]))
return s
def getParameter(self, name):
ret = None
for p in self.props:
if p[0] == name:
ret = p
break
return ret
def getParVal(self, name):
ret = None
for p in self.props:
if p[0] == name:
ret = p[2]
break
return ret
class Lepton(PhysicsObject):
def __init__(self, l):
PhysicsObject.__init__(self, l, sf="lepton")
vals = self.l.split(" ")
#print vals
checkIfMerged(vals)
self.props.append(("pdgId", "%5d", int(vals[len(self.props)])))
self.props.append(("charge", "%5d", int(vals[len(self.props)])))
self.props.append(("miniRelIso", "%15.5f", float(vals[len(self.props)])))
#self.props.append(("miniRelIsoCharged", "%15.5f", float(vals[len(self.props)])))
self.props.append(("miniIsoCharged", "%15.5f", float(vals[len(self.props)])))
#self.props.append(("miniRelIsoNeutral", "%15.5f", float(vals[len(self.props)])))
self.props.append(("miniIsoNeutral", "%15.5f", float(vals[len(self.props)])))
self.props.append(("jetPtRel", "%10.5f", float(vals[len(self.props)])))
self.props.append(("jetCSV", "%10.5f", float(vals[len(self.props)])))
self.props.append(("jetPtRatio", "%10.5f", float(vals[len(self.props)])))
self.props.append(("sip3d", "%10.5f", float(vals[len(self.props)])))
self.props.append(("dxy", "%10.5f", float(vals[len(self.props)])))
self.props.append(("dz", "%10.5f", float(vals[len(self.props)])))
class Jet(PhysicsObject):
def __init__(self, l):
PhysicsObject.__init__(self, l, sf="jet")
vals = self.l.split(" ")
#print vals
checkIfMerged(vals)
self.props.append(("CSVv2", "%10.5f", float(vals[len(self.props)])))
self.props.append(("MET pt", "%10.5f", float(vals[len(self.props)])))
self.props.append(("MET phi", "%10.5f", float(vals[len(self.props)])))
class Tau(PhysicsObject):
def __init__(self, l):
PhysicsObject.__init__(self, l, sf="tau")
vals = self.l.split(" ")
#print vals
checkIfMerged(vals)
self.props.append(("dxy", "%10.5f", float(vals[len(self.props)])))
self.props.append(("dz", "%10.5f", float(vals[len(self.props)])))
self.props.append(("decayModeFinding", "%10.5f", float(vals[len(self.props)])))
self.props.append(("byLooseCombinedIsolationDeltaBetaCorr3Hits", "%10.5f", float(vals[len(self.props)])))
class Electron(Lepton):
def __init__(self, l):
Lepton.__init__(self, l)
vals = self.l.split(" ")
#print vals
checkIfMerged(vals)
self.props.append(("eleMVA", "%21.5f", float(vals[len(self.props)])))
class Muon(Lepton):
def __init__(self, l):
Lepton.__init__(self, l)
vals = self.l.split(" ")
#print vals
checkIfMerged(vals)
self.props.append(("segmentCompatibility", "%21.5f", float(vals[len(self.props)])))
def main(argv):
roundingErr = 0.01
with open(argv[1]) as f1:
content1 = f1.readlines()
with open(argv[2]) as f2:
content2 = f2.readlines()
objs1 = []
objs2 = []
if argv[0] == "electrons":
for l in content1[1:]:
objs1.append(Electron(l))
for l in content2[1:]:
objs2.append(Electron(l))
elif argv[0] == "muons":
for l in content1[1:]:
objs1.append(Muon(l))
for l in content2[1:]:
objs2.append(Muon(l))
elif argv[0] == "jets":
for l in content1[1:]:
objs1.append(Jet(l))
for l in content2[1:]:
objs2.append(Jet(l))
elif argv[0] == "taus":
for l in content1[1:]:
objs1.append(Tau(l))
for l in content2[1:]:
objs2.append(Tau(l))
evts1 = set([o.getParVal("evt") for o in objs1])
evts2 = set([o.getParVal("evt") for o in objs2])
ex1 = sorted(list(evts1-evts2))
ex2 = sorted(list(evts2-evts1))
ies = sorted(list(evts1.intersection(evts2)))
print
#print "<: ", ex1
#print ">: ", ex2
#print "=: ", ies
if argv[0] == "muons":
print "MUONS"
# print "absolute dxy, dz diffs skipped"
# print "jetPtRel rounding diffs skipped"
# print "jetPtRatio rounding diffs skipped"
print "all rounding diffs skipped"
# print "jetCSV == -10.0 vs. jetCSV == 0.0 skipped"
#print "skip jetCSV comparing"
#print "E rounding diffs skipped"
#print "skip jetPtRel comparing"
# print "skipp all miniIsoNeutral(0.0, < 0.0) diffs"
# print "miniIsoNeutral rounding diffs skipped"
print "<: ", ex1
print ">: ", ex2
print "=: ", ies
iesl = list(ies[:int(argv[3])]) if (int(argv[3]) != -1) else ies
for i in iesl:
ps1 = [o for o in objs1 if o.getParVal("evt") == i][0]
ps2 = [o for o in objs2 if o.getParVal("evt") == i][0]
# if len(ps1) > 1 or len(ps2) > 1:
# sys.exit()
dprsn = str(i)+" ["
for p in xrange(0, len(ps1.props)):
if ps1.props[p] != ps2.props[p]:
if False:
continue
# elif (ps1.props[p][0] in ["dxy", "dz"]) and abs(ps1.props[p][2]) == abs(ps2.props[p][2]): # absolute dxy, dz diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRel"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2]<0.02 if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRatio"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRatio rounding diffs skipped
# continue
elif (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # all rounding diffs skipped
continue
# elif (ps1.props[p][0] in ["jetCSV"]) and (ps1.props[p][2] == -10.0) and (ps2.props[p][2] == 0.0): # jetCSV == -10.0 vs. jetCSV == 0.0 skipped
# continue
#elif (ps1.props[p][0] in ["jetCSV"]) and (ps1.props[p][2] == -99.0) and (ps2.props[p][2] == 0.0): # jetCSV == -99.0 vs. jetCSV == 0.0 skipped
# continue
#elif (ps1.props[p][0] in ["jetCSV"]): # skip jetCSV comparing for muons
# continue
#elif (ps1.props[p][0] in ["jetPtRel"]): # skip jetPtRel comparing
# continue
#elif (ps1.props[p][0] in ["E"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # E rounding diffs skipped
# continue
#elif (ps1.props[p][0] in ["eleMVA"]):
# print ps1.props[p], ps2.props[p]
# sys.exit()
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (ps1.props[p][2] == 0.0 and ps2.props[p][2] < 0): # skipp all miniIsoNeutral(0.0, < 0.0) diffs
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # miniIsoNeutral rounding diffs skipped
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # miniIsoNeutral rounding diffs skipped
# continue
else:
#dprsn += " "+ps1.props[p][0]
dprsn += " "+ps1.props[p][0]+"("+((ps1.props[p][1]) % (ps1.props[p][2])).replace(" ", "")+", "+((ps2.props[p][1]) % (ps2.props[p][2])).replace(" ", "")+")"
dprsn += " ]"
#print ps1[0].getPrintString()
#print ps2[0].getPrintString()
#print
print dprsn
elif argv[0] == "electrons":
print "ELECTRONS"
# print "absolute dxy, dz diffs skipped"
# print "jetPtRel rounding diffs skipped"
# print "jetPtRatio rounding diffs skipped"
print "all rounding diffs skipped"
# print "skip jetCSV comparing"
#print "E rounding diffs skipped"
# print "skipp all miniIsoNeutral(0.0, < 0.0) diffs"
# print "miniIsoNeutral rounding diffs skipped"
print "<: ", ex1
print ">: ", ex2
print "=: ", ies
iesl = list(ies[:int(argv[3])]) if (int(argv[3]) != -1) else ies
for i in iesl:
ps1 = [o for o in objs1 if o.getParVal("evt") == i][0]
ps2 = [o for o in objs2 if o.getParVal("evt") == i][0]
# if len(ps1) > 1 or len(ps2) > 1:
# sys.exit()
dprsn = str(i)+" ["
for p in xrange(0, len(ps1.props)):
if ps1.props[p] != ps2.props[p]:
if False:
continue
# elif (ps1.props[p][0] in ["dxy", "dz"]) and abs(ps1.props[p][2]) == abs(ps2.props[p][2]): # absolute dxy, dz diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRel"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2]<0.02 if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRatio"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRatio rounding diffs skipped
# continue
elif (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # all rounding diffs skipped
continue
# elif (ps1.props[p][0] in ["jetCSV"]): # skip jetCSV comparing
# continue
#elif (ps1.props[p][0] in ["eleMVA"]):
# print ps1.props[p], ps2.props[p]
# sys.exit()
# continue
#elif (ps1.props[p][0] in ["E"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # E rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (ps1.props[p][2] == 0.0 and ps2.props[p][2] < 0): # skipp all miniIsoNeutral(0.0, < 0.0) diffs
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # miniIsoNeutral rounding diffs skipped
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # miniIsoNeutral rounding diffs skipped
# continue
else:
#dprsn += " "+ps1.props[p][0]
dprsn += " "+ps1.props[p][0]+"("+((ps1.props[p][1]) % (ps1.props[p][2])).replace(" ", "")+", "+((ps2.props[p][1]) % (ps2.props[p][2])).replace(" ", "")+")"
dprsn += " ]"
#print ps1[0].getPrintString()
#print ps2[0].getPrintString()
#print
print dprsn
elif argv[0] == "jets":
print "JETS"
print "all rounding diffs skipped"
print "<: ", ex1
print ">: ", ex2
print "=: ", ies
iesl = list(ies[:int(argv[3])]) if (int(argv[3]) != -1) else ies
for i in iesl:
ps1 = [o for o in objs1 if o.getParVal("evt") == i][0]
ps2 = [o for o in objs2 if o.getParVal("evt") == i][0]
# if len(ps1) > 1 or len(ps2) > 1:
# sys.exit()
dprsn = str(i)+" ["
for p in xrange(0, len(ps1.props)):
if ps1.props[p] != ps2.props[p]:
if False:
continue
elif (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # all rounding diffs skipped
continue
# elif (ps1.props[p][0] in ["dxy", "dz"]) and abs(ps1.props[p][2]) == abs(ps2.props[p][2]): # absolute dxy, dz diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRel"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2]<0.02 if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRatio"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRatio rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetCSV"]): # skip jetCSV comparing
# continue
#elif (ps1.props[p][0] in ["eleMVA"]):
# print ps1.props[p], ps2.props[p]
# sys.exit()
# continue
#elif (ps1.props[p][0] in ["E"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # E rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (ps1.props[p][2] == 0.0 and ps2.props[p][2] < 0): # skipp all miniIsoNeutral(0.0, < 0.0) diffs
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # miniIsoNeutral rounding diffs skipped
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # miniIsoNeutral rounding diffs skipped
# continue
else:
#dprsn += " "+ps1.props[p][0]
dprsn += " "+ps1.props[p][0]+"("+((ps1.props[p][1]) % (ps1.props[p][2])).replace(" ", "")+", "+((ps2.props[p][1]) % (ps2.props[p][2])).replace(" ", "")+")"
dprsn += " ]"
#print ps1[0].getPrintString()
#print ps2[0].getPrintString()
#print
print dprsn
elif argv[0] == "taus":
print "TAUS"
print "all rounding diffs skipped"
print "<: ", ex1
print ">: ", ex2
print "=: ", ies
iesl = list(ies[:int(argv[3])]) if (int(argv[3]) != -1) else ies
for i in iesl:
ps1 = [o for o in objs1 if o.getParVal("evt") == i][0]
ps2 = [o for o in objs2 if o.getParVal("evt") == i][0]
# if len(ps1) > 1 or len(ps2) > 1:
# sys.exit()
dprsn = str(i)+" ["
for p in xrange(0, len(ps1.props)):
if ps1.props[p] != ps2.props[p]:
if False:
continue
elif (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # all rounding diffs skipped
continue
# elif (ps1.props[p][0] in ["dxy", "dz"]) and abs(ps1.props[p][2]) == abs(ps2.props[p][2]): # absolute dxy, dz diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRel"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2]<0.02 if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# elif (ps1.props[p][0] in ["jetPtRel"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRel rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetPtRatio"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # jetPtRatio rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["jetCSV"]): # skip jetCSV comparing
# continue
#elif (ps1.props[p][0] in ["eleMVA"]):
# print ps1.props[p], ps2.props[p]
# sys.exit()
# continue
#elif (ps1.props[p][0] in ["E"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # E rounding diffs skipped
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (ps1.props[p][2] == 0.0 and ps2.props[p][2] < 0): # skipp all miniIsoNeutral(0.0, < 0.0) diffs
# continue
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and round(ps1.props[p][2], 2) == round(ps2.props[p][2], 2): # miniIsoNeutral rounding diffs skipped
# elif (ps1.props[p][0] in ["miniIsoNeutral"]) and (abs(ps1.props[p][2]-ps2.props[p][2])/ps1.props[p][2] < roundingErr if ps1.props[p][2] != 0.0 else False): # miniIsoNeutral rounding diffs skipped
# continue
else:
#dprsn += " "+ps1.props[p][0]
dprsn += " "+ps1.props[p][0]+"("+((ps1.props[p][1]) % (ps1.props[p][2])).replace(" ", "")+", "+((ps2.props[p][1]) % (ps2.props[p][2])).replace(" ", "")+")"
dprsn += " ]"
#print ps1[0].getPrintString()
#print ps2[0].getPrintString()
#print
print dprsn
#print list(set(evts1).intersection(evts2))
if __name__ == "__main__":
main(sys.argv[1:])
| [
"[email protected]"
] | |
5fdfc19ffacd51059a30914512e970f4bc922a3d | d1742451b25705fc128acc245524659628ab3e7d | /Codeforces with Python/1144B. Parity Alternated Deletions.py | 928f29fb4554e6e84745a2c4f5cd70b8baf7e179 | [] | no_license | Shovon588/Programming | ebab793a3c97aedddfcad5ea06e7e22f5c54a86e | e4922c9138998358eed09a1be7598f9b060c685f | refs/heads/master | 2022-12-23T18:29:10.141117 | 2020-10-04T17:29:32 | 2020-10-04T17:29:32 | 256,915,133 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 460 | py | n=int(input())
a=list(map(int,input().split()))
codd=0;ceven=0;odd=[];even=[]
for i in a:
if i%2==0:
ceven+=1
even.append(i)
else:
codd+=1
odd.append(i)
even.sort(reverse=True)
odd.sort(reverse=True)
if abs(codd-ceven)<=1:
print(0)
else:
if ceven>codd:
temp=ceven-codd-1
print(sum(even[ceven-temp:]))
else:
temp=codd-ceven-1
print(sum(odd[codd-temp:]))
| [
"[email protected]"
] | |
62d61a289e5ed0d3b6b96274a6af852f57c89682 | 6b29d66ba7927129b68bc00db769f0edf1babaea | /SoftLayer/CLI/image/list.py | 74de2d02dfd63f8749ad77dcae506a180c1ccca1 | [
"MIT"
] | permissive | tdurden82/softlayer-python | 65f42923c347a164995dfc267829721032de261d | 0eed20fa4adedd3228d91d929bb8befb1e445e49 | refs/heads/master | 2021-01-17T10:01:48.087450 | 2015-10-19T18:38:53 | 2015-10-19T18:38:53 | 46,301,339 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,731 | py | """List images."""
# :license: MIT, see LICENSE for more details.
import SoftLayer
from SoftLayer.CLI import environment
from SoftLayer.CLI import formatting
from SoftLayer.CLI import image as image_mod
from SoftLayer import utils
import click
@click.command()
@click.option('--public/--private',
is_flag=True,
default=None,
help='Display only public or private images')
@environment.pass_env
def cli(env, public):
"""List images."""
image_mgr = SoftLayer.ImageManager(env.client)
images = []
if public in [False, None]:
for image in image_mgr.list_private_images(mask=image_mod.MASK):
images.append(image)
if public in [True, None]:
for image in image_mgr.list_public_images(mask=image_mod.MASK):
images.append(image)
table = formatting.Table(['guid',
'name',
'type',
'visibility',
'account'])
images = [image for image in images if image['parentId'] == '']
for image in images:
visibility = (image_mod.PUBLIC_TYPE if image['publicFlag']
else image_mod.PRIVATE_TYPE)
table.add_row([
image.get('globalIdentifier', formatting.blank()),
formatting.FormattedItem(image['name'],
click.wrap_text(image['name'], width=50)),
formatting.FormattedItem(
utils.lookup(image, 'imageType', 'keyName'),
utils.lookup(image, 'imageType', 'name')),
visibility,
image.get('accountId', formatting.blank()),
])
env.fout(table)
| [
"[email protected]"
] | |
93d181edc316819ffb8293d1f14bd6c16374f7a0 | 549270020f6c8724e2ef1b12e38d11b025579f8d | /recipes/rectanglebinpack/all/conanfile.py | f77182eff1815c5c7427bd8ec64b4c03ec271b88 | [
"MIT"
] | permissive | conan-io/conan-center-index | 1bcec065ccd65aa38b1fed93fbd94d9d5fe6bc43 | 3b17e69bb4e5601a850b6e006e44775e690bac33 | refs/heads/master | 2023-08-31T11:34:45.403978 | 2023-08-31T11:13:23 | 2023-08-31T11:13:23 | 204,671,232 | 844 | 1,820 | MIT | 2023-09-14T21:22:42 | 2019-08-27T09:43:58 | Python | UTF-8 | Python | false | false | 3,295 | py | import os
from conan import ConanFile
from conan.tools.build import check_min_cppstd
from conan.tools.cmake import CMake, CMakeToolchain, cmake_layout
from conan.tools.files import apply_conandata_patches, copy, export_conandata_patches, get, load, save
required_conan_version = ">=1.53.0"
class RectangleBinPackConan(ConanFile):
name = "rectanglebinpack"
description = "The code can be used to solve the problem of packing a set of 2D rectangles into a larger bin."
license = "LicenseRef-rectanglebinpack-public-domain"
url = "https://github.com/conan-io/conan-center-index"
homepage = "https://github.com/juj/RectangleBinPack"
topics = ("rectangle", "packing", "bin")
package_type = "library"
settings = "os", "arch", "compiler", "build_type"
options = {
"shared": [True, False],
"fPIC": [True, False],
}
default_options = {
"shared": False,
"fPIC": True,
}
def export_sources(self):
export_conandata_patches(self)
def config_options(self):
if self.settings.os == "Windows":
del self.options.fPIC
def configure(self):
if self.options.shared:
self.options.rm_safe("fPIC")
def layout(self):
cmake_layout(self, src_folder="src")
def validate(self):
if self.settings.compiler.get_safe("cppstd"):
check_min_cppstd(self, 11)
def source(self):
get(self, **self.conan_data["sources"][self.version][0], strip_root=True)
def generate(self):
tc = CMakeToolchain(self)
tc.variables["CMAKE_WINDOWS_EXPORT_ALL_SYMBOLS"] = self.options.shared
tc.generate()
def build(self):
apply_conandata_patches(self)
cmake = CMake(self)
cmake.configure()
cmake.build()
def _extract_license(self):
readme_content = load(self, os.path.join(self.source_folder, "Readme.txt"), encoding="latin-1")
license_content = "\n".join(readme_content.splitlines()[-4:])
save(self, os.path.join(self.package_folder, "licenses", "LICENSE"), license_content)
def package(self):
self._extract_license()
copy(self, "*.h",
dst=os.path.join(self.package_folder, "include", self.name),
src=self.source_folder,
excludes="old/**")
copy(self, "*.dll",
dst=os.path.join(self.package_folder, "bin"),
src=self.build_folder,
keep_path=False)
for pattern in ["*.lib", "*.so", "*.dylib", "*.a"]:
copy(self, pattern,
dst=os.path.join(self.package_folder, "lib"),
src=self.build_folder,
keep_path=False)
def package_info(self):
self.cpp_info.libs = ["RectangleBinPack"]
self.cpp_info.set_property("cmake_file_name", "RectangleBinPack")
self.cpp_info.set_property("cmake_target_name", "RectangleBinPack::RectangleBinPack")
if self.settings.os in ["Linux", "FreeBSD"]:
self.cpp_info.system_libs = ["m"]
# TODO: to remove in conan v2 once cmake_find_package_* generators removed
self.cpp_info.names["cmake_find_package"] = "RectangleBinPack"
self.cpp_info.names["cmake_find_package_multi"] = "RectangleBinPack"
| [
"[email protected]"
] | |
7a774c68112f02b6e66c5d7a1627a7cd16eb9b9e | 1e33b2117a8b601ffb1c7e3af4d7663a7a8969d1 | /Sprint4_3ReportJesse.py | b66c3eaba5f4e9ea60132599e3780adcdab7afee | [] | no_license | Botany-Downs-Secondary-College/mathsquiz-patchigalla | 5d72a52116928857b532495bf0360dc76d5e01b4 | 73959cb31e68a7676a41b4cde53c6a0a48c42bd7 | refs/heads/main | 2023-03-18T22:13:24.168785 | 2021-03-10T21:45:01 | 2021-03-10T21:45:01 | 337,604,400 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 10,017 | py | #MathsQuiz.py
#MathsQuiz app for primary students
#P.Patchigalla, Feb 21
'''importing required modules - tkinter, ttk and random'''
from tkinter import*
from tkinter import ttk
from random import*
import datetime
'''Declare Parent class called MathsQuiz. All objects created from parent class'''
class MathsQuiz:
'''use init method for all widgets'''
def __init__(self,parent):
'''Widgets for Welcome Frame'''
self.Welcome = Frame(parent)
self.Welcome.grid(row=0, column=0)
self.TitleLabel = Label(self.Welcome, text = "Welcome to Maths Quiz",
bg = "black", fg = "white", width = 20, padx = 30, pady = 10,
font = ("Time", '14', "bold italic"))
self.TitleLabel.grid(columnspan = 2)
self.IntroLabel = Label(self.Welcome, text = 'The app helps primary students from 7-12 yrs.')
self.IntroLabel.grid(row = 1, columnspan = 2)
self.NextButton = Button(self.Welcome, text = 'Next', command = self.show_Questions)
self.NextButton.grid(row = 9, column = 1)
#Name and Age labels
self.NameLabel = Label(self.Welcome, text = "Name", anchor = W,
fg = "black", width = 10, padx = 30, pady = 10,font = ("Time", '12', "bold italic"))
self.NameLabel.grid(row = 2, column = 0)
self.AgeLabel = Label(self.Welcome, text = "Age", anchor=W,
fg = "black", width = 10, padx = 30, pady = 10,font = ("Time", '12', "bold italic"))
self.AgeLabel.grid(row=3, column = 0)
#Name and Age Entry widgets and creating name and age instance variables
self.name = StringVar()
self.name.set("")
self.NameEntry = ttk.Entry(self.Welcome, width = 20)
self.NameEntry.grid(row=2, column = 1, columnspan =2)
self.age = IntVar()
self.age.set(0)
self.AgeEntry = ttk.Entry(self.Welcome, width = 20)
self.AgeEntry.grid(row=3, column = 1)
#Warning, Difficulty level labels and Radio buttons
self.WarningLabel = Label(self.Welcome, text = "", anchor=W,
fg = "red", width = 20, padx = 30, pady = 10)
self.WarningLabel.grid(row=4, columnspan = 2)
self.DifficultyLabel = Label(self.Welcome, text = "Choose Difficulty level", anchor=W,
fg = "black", width = 10, padx = 30, pady = 10,font = ("Time", '12', "bold italic"))
self.DifficultyLabel.grid(row=5, column = 0)
self.difficulty = ["Easy", "Medium", "Hard"]
self.diff_lvl = StringVar() #use StringVar() for text in list or IntVar() for numbers in list
self.diff_lvl.set(0) #.set is used so that when the radio buttons appear it is automatically set on the easy difficuty,
self.diff_btns = []
#The difficulty levels are stored in a list and the radion button selected cooresponds to the a number in the list, such as "easy"
for i in range(len(self.difficulty)):
self.rb = Radiobutton(self.Welcome, variable = self.diff_lvl, value = i, text = self.difficulty[i],
anchor = W, padx = 50, width = "5", height = "2")
self.diff_btns.append(self.rb)
self.rb.grid(row = i+6, column = 0, sticky = W)
'''Widgets for Questions Frame'''
self.index = 0
self.score = 0
self.Questions = Frame(parent)
#self.Questions.grid(row=0, column=1)
'''update QuestionsLabel configure method to print question number'''
self.QuestionsLabel = Label(self.Questions, text = "",
bg = "black", fg = "white", width = 30, padx = 30, pady = 10,
font = ("Time", '14', "bold italic"))
self.QuestionsLabel.grid(columnspan = 3)
self.Problems = Label(self.Questions, text = "", pady = 10)
self.Problems.grid(row = 1, column = 0)
self.AnswerEntry = ttk.Entry(self.Questions, width = 20)
self.AnswerEntry.grid(row=1, column = 1)
#Create ScoreLabel to display Score
self.ScoreLabel = Label(self.Questions, text = "")
self.ScoreLabel.grid(row = 1, column = 2)
self.feedback = Label(self.Questions, text = "")
self.feedback.grid(row = 2, column = 0)
self.HomeButton = ttk.Button(self.Questions, text = 'Home', command = self.show_Welcome)
self.HomeButton.grid(row = 8, column = 0)
self.check_button = ttk.Button(self.Questions, text = 'Check answer', command = self.check_answer)
self.check_button.grid(row=8, column=1)
#self.next_button = ttk.Button(self.Questions, text = 'Next question', command = self.next_question)
#self.next_button.grid(row=8, column=2)
'''Widgets for Report Frame'''
self.Report_frame = Frame(parent)
'''Initialising Treeview Widget and configuring it for Report Frame'''
self.report_treeview = ttk.Treeview(self.Report_frame)
self.report_treeview.configure(columns=('age', 'score', 'date'))
self.report_treeview.heading('#0', text='Name', anchor='w')
self.report_treeview.column('#0', anchor='w')
self.report_treeview.heading('age', text='Age')
self.report_treeview.column('age', anchor='center')
self.report_treeview.heading('score', text='Score')
self.report_treeview.column('score', anchor='center')
self.report_treeview.heading('date', text='Date')
self.report_treeview.column('date', anchor='center')
self.ReportHomeButton = ttk.Button(self.Report_frame, text='Home', command=self.show_Welcome)
'''A method that removes Questions Frame'''
def show_Welcome(self):
self.Questions.grid_remove()
self.Report_frame.grid_remove()
self.NameEntry.delete(0, 'end')
self.AgeEntry.delete(0, 'end')
self.Welcome.grid(row=0, column=0)
def show_Questions(self):
try:
#Error checking for empty or non-text user entries for Name
if self.NameEntry.get() == "":
self.WarningLabel.configure(text = "Please enter name")
self.NameEntry.focus()
elif self.NameEntry.get().isalpha() == False:
self.WarningLabel.configure(text = "Pleasae enter text")
self.NameEntry.delete(0, END)
self.NameEntry.focus()
#Error checking for empty and age limit cases
elif self.AgeEntry.get() == "":
self.WarningLabel.configure(text = "Please enter age")
self.AgeEntry.focus()
elif int(self.AgeEntry.get()) > 12:
self.WarningLabel.configure(text = "You are too old!")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
elif int(self.AgeEntry.get()) < 0:
self.WarningLabel.configure(text = "You are too old")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
elif int(self.AgeEntry.get()) < 7:
self.WarningLabel.configure(text = "You are too young")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
else: #if all conditions are met, then show Questions frame
self.name.set(self.NameEntry.get())
self.age.set(self.AgeEntry.get())
self.Welcome.grid_remove()
self.Questions.grid()
self.next_question()# call next question function
except ValueError:
self.WarningLabel.configure(text = "Please enter a number")
self.AgeEntry.delete(0, END)
self.AgeEntry.focus()
def next_question(self):
'''Creates questions stores answer'''
x = randrange(10)
y = randrange(10)
self.answer = x + y
self.index += 1 #keep adding to index number
question_text = str(x) + " + " + str(y) + " = "
self.Problems.configure(text = question_text)
#update QuestionsLabel with question number
self.QuestionsLabel.configure(text = "Quiz Question " + str(self.index)+ "/5")
#limit number of questions to 5, then remove Questions frame and show Welcome frame
if self.index >= 6:
self.Questions.grid_remove()
self.Report_frame.grid(row=0, column=0)
self.report_treeview.grid(row=0, column=0)
self.ReportHomeButton.grid(row=1, column=0, sticky='w')
with open('scores.txt', 'a+') as scores_file:
scores_file.write(self.name.get() + ' ' + str(self.age.get()) + ' ' + str(self.score) + ' ' + str(datetime.date.today()) + '\n')
self.update_report_treeview()
def check_answer(self):
try:
ans = int(self.AnswerEntry.get())
if ans == self.answer:
self.feedback.configure(text = "Correct")
self.score += 1 #add 1 to score for correct answer
score_text = "Score = " + str(self.score)
self.ScoreLabel.configure(text = score_text)
self.AnswerEntry.delete(0, END)
self.AnswerEntry.focus()
self.next_question()
else:
self.feedback.configure(text = "Incorrect", fg = "red")
self.AnswerEntry.delete(0, END)
self.AnswerEntry.focus()
self.next_question()
except ValueError:
self.feedback.configure(text = "Enter a number")
self.AnswerEntry.delete(0, END)
self.AnswerEntry.focus()
def update_report_treeview(self):
#Delete all existing rows from treeview
self.report_treeview.delete(*self.report_treeview.get_children())
with open('scores.txt', 'r') as scores_file:
scores = []
#Add each score to a 2d list, with each line as a list
for line in scores_file.readlines():
scores.append([line.split()[0], line.split()[1], line.split()[2], line.split()[3]])
#Sort the scores by latest date
scores.sort(key=lambda score: (score[3], score[2], score[0]), reverse=True)
for score in scores:
self.report_treeview.insert('', 'end', text=score[0], values=(score[1], score[2], score[3]))
#Main routine
if __name__ == "__main__":
root =Tk()
frames = MathsQuiz(root)
root.title("Quiz")
root.mainloop()
| [
"[email protected]"
] | |
9f08f06ac70f9695835d58acef5f3052583856ea | 6f47c9a25cacc91c83f520f040bbebe7479f433d | /python/ccxt/async_support/poloniexfutures.py | bee0ef2e586314a470cb2fec4bd86b45253197a3 | [
"MIT"
] | permissive | l3dlp-sandbox/ccxt | ae0a3b96a68d4d53cd725e96df2d245527995f51 | 60b2b2aa1f0a07086aa52f37bf8a22fd0e43cae1 | refs/heads/master | 2023-08-08T16:05:33.733548 | 2023-08-07T17:41:03 | 2023-08-07T17:41:03 | 205,384,557 | 0 | 0 | MIT | 2023-09-14T08:58:12 | 2019-08-30T13:09:25 | Python | UTF-8 | Python | false | false | 76,886 | py | # -*- coding: utf-8 -*-
# PLEASE DO NOT EDIT THIS FILE, IT IS GENERATED AND WILL BE OVERWRITTEN:
# https://github.com/ccxt/ccxt/blob/master/CONTRIBUTING.md#how-to-contribute-code
from ccxt.async_support.base.exchange import Exchange
from ccxt.abstract.poloniexfutures import ImplicitAPI
import hashlib
from ccxt.base.types import OrderSide
from ccxt.base.types import OrderType
from typing import Optional
from typing import List
from ccxt.base.errors import AccountSuspended
from ccxt.base.errors import ArgumentsRequired
from ccxt.base.errors import BadRequest
from ccxt.base.errors import InvalidOrder
from ccxt.base.errors import OrderNotFound
from ccxt.base.errors import NotSupported
from ccxt.base.errors import RateLimitExceeded
from ccxt.base.errors import ExchangeNotAvailable
from ccxt.base.errors import InvalidNonce
from ccxt.base.errors import AuthenticationError
from ccxt.base.decimal_to_precision import TICK_SIZE
from ccxt.base.precise import Precise
class poloniexfutures(Exchange, ImplicitAPI):
def describe(self):
return self.deep_extend(super(poloniexfutures, self).describe(), {
'id': 'poloniexfutures',
'name': 'Poloniex Futures',
'countries': ['US'],
# 30 requests per second
'rateLimit': 33.3,
'certified': False,
'pro': True,
'version': 'v1',
'has': {
'CORS': None,
'spot': False,
'margin': True,
'swap': True,
'future': False,
'option': None,
'createOrder': True,
'fetchBalance': True,
'fetchClosedOrders': True,
'fetchCurrencies': False,
'fetchFundingRate': True,
'fetchL3OrderBook': True,
'fetchMarkets': True,
'fetchMyTrades': True,
'fetchOHLCV': True,
'fetchOpenOrders': True,
'fetchOrder': True,
'fetchOrderBook': True,
'fetchOrdersByStatus': True,
'fetchPositions': True,
'fetchTicker': True,
'fetchTickers': True,
'fetchTime': True,
'fetchTrades': True,
'setMarginMode': True,
},
'timeframes': {
'1m': 1,
'5m': 5,
'15m': 15,
'30m': 30,
'1h': 60,
'2h': 120,
'4h': 480,
'12h': 720,
'1d': 1440,
'1w': 10080,
},
'urls': {
'logo': 'https://user-images.githubusercontent.com/1294454/27766817-e9456312-5ee6-11e7-9b3c-b628ca5626a5.jpg',
'api': {
'public': 'https://futures-api.poloniex.com',
'private': 'https://futures-api.poloniex.com',
},
'www': 'https://www.poloniex.com',
'doc': 'https://futures-docs.poloniex.com',
'fees': 'https://poloniex.com/fee-schedule',
'referral': 'https://poloniex.com/signup?c=UBFZJRPJ',
},
'api': {
'public': {
'get': {
'contracts/active': 10,
'contracts/{symbol}': 10,
'ticker': 10,
'tickers': 10, # v2
'level2/snapshot': 180.002,
'level2/depth': 180.002,
'level2/message/query': 180.002,
'level3/snapshot': 180.002, # v2
'trade/history': 10,
'interest/query': 10,
'index/query': 10,
'mark-price/{symbol}/current': 10,
'premium/query': 10,
'funding-rate/{symbol}/current': 10,
'timestamp': 10,
'status': 10,
'kline/query': 10,
},
'post': {
'bullet-public': 10,
},
},
'private': {
'get': {
'account-overview': 1,
'transaction-history': 1,
'orders': 1,
'stopOrders': 1,
'recentDoneOrders': 1,
'orders/{order-id}': 1,
'fills': 1,
'openOrderStatistics': 1,
'position': 1.5,
'positions': 1.5,
'funding-history': 1,
'marginType/query': 1,
},
'post': {
'orders': 1.5,
'batchOrders': 1.5,
'position/margin/auto-deposit-status': 1.5,
'position/margin/deposit-margin': 1.5,
'bullet-private': 1,
'marginType/change': 1,
},
'delete': {
'orders/{order-id}': 1.5,
'orders': 150.016,
'stopOrders': 150.016,
},
},
},
'precisionMode': TICK_SIZE,
'fees': {
'trading': {
'tierBased': False,
'percentage': True,
'taker': self.parse_number('0.00075'),
'maker': self.parse_number('0.0001'),
},
'funding': {
'tierBased': False,
'percentage': False,
'withdraw': {},
'deposit': {},
},
},
'commonCurrencies': {
},
'requiredCredentials': {
'apiKey': True,
'secret': True,
'password': True,
},
'options': {
'networks': {
'OMNI': 'omni',
'ERC20': 'eth',
'TRC20': 'trx',
},
'versions': {
'public': {
'GET': {
'ticker': 'v2',
'tickers': 'v2',
'level3/snapshot': 'v2',
},
},
},
},
'exceptions': {
'exact': {
'400': BadRequest, # Bad Request -- Invalid request format
'401': AuthenticationError, # Unauthorized -- Invalid API Key
'403': NotSupported, # Forbidden -- The request is forbidden
'404': NotSupported, # Not Found -- The specified resource could not be found
'405': NotSupported, # Method Not Allowed -- You tried to access the resource with an invalid method.
'415': BadRequest, # Content-Type -- application/json
'429': RateLimitExceeded, # Too Many Requests -- Access limit breached
'500': ExchangeNotAvailable, # Internal Server Error -- We had a problem with our server. Try again later.
'503': ExchangeNotAvailable, # Service Unavailable -- We're temporarily offline for maintenance. Please try again later.
'400001': AuthenticationError, # Any of KC-API-KEY, KC-API-SIGN, KC-API-TIMESTAMP, KC-API-PASSPHRASE is missing in your request header.
'400002': InvalidNonce, # KC-API-TIMESTAMP Invalid -- Time differs from server time by more than 5 seconds
'400003': AuthenticationError, # KC-API-KEY not exists
'400004': AuthenticationError, # KC-API-PASSPHRASE error
'400005': AuthenticationError, # Signature error -- Please check your signature
'400006': AuthenticationError, # The IP address is not in the API whitelist
'400007': AuthenticationError, # Access Denied -- Your API key does not have sufficient permissions to access the URI
'404000': NotSupported, # URL Not Found -- The requested resource could not be found
'400100': BadRequest, # Parameter Error -- You tried to access the resource with invalid parameters
'411100': AccountSuspended, # User is frozen -- Please contact us via support center
'500000': ExchangeNotAvailable, # Internal Server Error -- We had a problem with our server. Try again later.
},
'broad': {
'Position does not exist': OrderNotFound, # {"code":"200000", "msg":"Position does not exist"}
},
},
})
async def fetch_markets(self, params={}):
"""
retrieves data on all markets for poloniexfutures
see https://futures-docs.poloniex.com/#symbol-2
:param dict [params]: extra parameters specific to the exchange api endpoint
:returns dict[]: an array of objects representing market data
"""
response = await self.publicGetContractsActive(params)
#
# {
# "code": "200000",
# "data": [
# {
# symbol: 'APTUSDTPERP',
# takerFixFee: '0E-10',
# nextFundingRateTime: '20145603',
# makerFixFee: '0E-10',
# type: 'FFWCSX',
# predictedFundingFeeRate: '0.000000',
# turnoverOf24h: '386037.46704292',
# initialMargin: '0.05',
# isDeleverage: True,
# createdAt: '1666681959000',
# fundingBaseSymbol: '.APTINT8H',
# lowPriceOf24h: '4.34499979019165',
# lastTradePrice: '4.4090000000',
# indexPriceTickSize: '0.001',
# fairMethod: 'FundingRate',
# takerFeeRate: '0.00040',
# order: '102',
# updatedAt: '1671076377000',
# displaySettleCurrency: 'USDT',
# indexPrice: '4.418',
# multiplier: '1.0',
# maxLeverage: '20',
# fundingQuoteSymbol: '.USDTINT8H',
# quoteCurrency: 'USDT',
# maxOrderQty: '1000000',
# maxPrice: '1000000.0000000000',
# maintainMargin: '0.025',
# status: 'Open',
# displayNameMap: [Object],
# openInterest: '2367',
# highPriceOf24h: '4.763999938964844',
# fundingFeeRate: '0.000000',
# volumeOf24h: '83540.00000000',
# riskStep: '500000',
# isQuanto: True,
# maxRiskLimit: '20000',
# rootSymbol: 'USDT',
# baseCurrency: 'APT',
# firstOpenDate: '1666701000000',
# tickSize: '0.001',
# markMethod: 'FairPrice',
# indexSymbol: '.PAPTUSDT',
# markPrice: '4.418',
# minRiskLimit: '1000000',
# settlementFixFee: '0E-10',
# settlementSymbol: '',
# priceChgPctOf24h: '-0.0704',
# fundingRateSymbol: '.APTUSDTPERPFPI8H',
# makerFeeRate: '0.00010',
# isInverse: False,
# lotSize: '1',
# settleCurrency: 'USDT',
# settlementFeeRate: '0.0'
# },
# ]
# }
#
result = []
data = self.safe_value(response, 'data', [])
dataLength = len(data)
for i in range(0, dataLength):
market = data[i]
id = self.safe_string(market, 'symbol')
baseId = self.safe_string(market, 'baseCurrency')
quoteId = self.safe_string(market, 'quoteCurrency')
settleId = self.safe_string(market, 'rootSymbol')
base = self.safe_currency_code(baseId)
quote = self.safe_currency_code(quoteId)
settle = self.safe_currency_code(settleId)
symbol = base + '/' + quote + ':' + settle
inverse = self.safe_value(market, 'isInverse')
status = self.safe_string(market, 'status')
multiplier = self.safe_string(market, 'multiplier')
tickSize = self.safe_number(market, 'indexPriceTickSize')
lotSize = self.safe_number(market, 'lotSize')
limitAmountMax = self.safe_number(market, 'maxOrderQty')
limitPriceMax = self.safe_number(market, 'maxPrice')
result.append({
'id': id,
'symbol': symbol,
'base': base,
'quote': quote,
'settle': settle,
'baseId': baseId,
'quoteId': quoteId,
'settleId': settleId,
'type': 'swap',
'spot': False,
'margin': False,
'swap': True,
'future': False,
'option': False,
'active': (status == 'Open'),
'contract': True,
'linear': not inverse,
'inverse': inverse,
'taker': self.safe_number(market, 'takerFeeRate'),
'maker': self.safe_number(market, 'makerFeeRate'),
'contractSize': self.parse_number(Precise.string_abs(multiplier)),
'expiry': None,
'expiryDatetime': None,
'strike': None,
'optionType': None,
'precision': {
'amount': lotSize,
'price': tickSize,
},
'limits': {
'leverage': {
'min': self.parse_number('1'),
'max': self.safe_number(market, 'maxLeverage'),
},
'amount': {
'min': lotSize,
'max': limitAmountMax,
},
'price': {
'min': tickSize,
'max': limitPriceMax,
},
'cost': {
'min': None,
'max': None,
},
},
'info': market,
})
return result
def parse_ticker(self, ticker, market=None):
#
# {
# "symbol": "BTCUSDTPERP", # Market of the symbol
# "sequence": 45, # Sequence number which is used to judge the continuity of the pushed messages
# "side": "sell", # Transaction side of the last traded taker order
# "price": 3600.00, # Filled price
# "size": 16, # Filled quantity
# "tradeId": "5c9dcf4170744d6f5a3d32fb", # Order ID
# "bestBidSize": 795, # Best bid size
# "bestBidPrice": 3200.00, # Best bid
# "bestAskPrice": 3600.00, # Best ask size
# "bestAskSize": 284, # Best ask
# "ts": 1553846081210004941 # Filled time - nanosecond
# }
#
# {
# "volume": 30449670, #24h Volume
# "turnover": 845169919063, #24h Turnover
# "lastPrice": 3551, #Last price
# "priceChgPct": 0.0043, #24h Change
# "ts": 1547697294838004923 #Snapshot time(nanosecond)
# }
#
marketId = self.safe_string(ticker, 'symbol')
symbol = self.safe_symbol(marketId, market)
timestamp = self.safe_integer_product(ticker, 'ts', 0.000001)
last = self.safe_string_2(ticker, 'price', 'lastPrice')
percentage = Precise.string_mul(self.safe_string(ticker, 'priceChgPct'), '100')
return self.safe_ticker({
'symbol': symbol,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'high': None,
'low': None,
'bid': self.safe_string(ticker, 'bestBidPrice'),
'bidVolume': self.safe_string(ticker, 'bestBidSize'),
'ask': self.safe_string(ticker, 'bestAskPrice'),
'askVolume': self.safe_string(ticker, 'bestAskSize'),
'vwap': None,
'open': None,
'close': last,
'last': last,
'previousClose': None,
'change': None,
'percentage': percentage,
'average': None,
'baseVolume': self.safe_string_2(ticker, 'size', 'volume'),
'quoteVolume': self.safe_string(ticker, 'turnover'),
'info': ticker,
}, market)
async def fetch_ticker(self, symbol: str, params={}):
"""
fetches a price ticker, a statistical calculation with the information calculated over the past 24 hours for a specific market
see https://futures-docs.poloniex.com/#get-real-time-ticker-2-0
:param str symbol: unified symbol of the market to fetch the ticker for
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: a `ticker structure <https://docs.ccxt.com/#/?id=ticker-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.publicGetTicker(self.extend(request, params))
#
# {
# code: '200000',
# data: {
# sequence: '11574719',
# symbol: 'BTCUSDTPERP',
# side: 'sell',
# size: '1',
# price: '16990.1',
# bestBidSize: '3',
# bestBidPrice: '16990.1',
# bestAskPrice: '16991.0',
# tradeId: '639c8a529fd7cf0001af4157',
# bestAskSize: '505',
# ts: '1671203410721232337'
# }
# }
#
return self.parse_ticker(self.safe_value(response, 'data', {}), market)
async def fetch_tickers(self, symbols: Optional[List[str]] = None, params={}):
"""
fetches price tickers for multiple markets, statistical calculations with the information calculated over the past 24 hours each market
see https://futures-docs.poloniex.com/#get-real-time-ticker-of-all-symbols
:param str[]|None symbols: unified symbols of the markets to fetch the ticker for, all market tickers are returned if not assigned
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: a dictionary of `ticker structures <https://docs.ccxt.com/#/?id=ticker-structure>`
"""
await self.load_markets()
response = await self.publicGetTickers(params)
return self.parse_tickers(self.safe_value(response, 'data', []), symbols)
async def fetch_order_book(self, symbol: str, limit: Optional[int] = None, params={}):
"""
fetches information on open orders with bid(buy) and ask(sell) prices, volumes and other data
see https://futures-docs.poloniex.com/#get-full-order-book-level-2
see https://futures-docs.poloniex.com/#get-full-order-book-level-3
:param str symbol: unified symbol of the market to fetch the order book for
:param int [limit]: the maximum amount of order book entries to return
:param dict [params]: extra parameters specific to the poloniexfuturesfutures api endpoint
:returns dict: A dictionary of `order book structures <https://docs.ccxt.com/#/?id=order-book-structure>` indexed by market symbols
"""
await self.load_markets()
level = self.safe_number(params, 'level')
params = self.omit(params, 'level')
if level is not None and level != 2 and level != 3:
raise BadRequest(self.id + ' fetchOrderBook() can only return level 2 & 3')
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = None
if level == 3:
response = await self.publicGetLevel3Snapshot(self.extend(request, params))
else:
response = await self.publicGetLevel2Snapshot(self.extend(request, params))
# L2
#
# {
# "code": "200000",
# "data": {
# "symbol": "BTCUSDTPERP",
# "sequence": 1669149851334,
# "asks": [
# [
# 16952,
# 12
# ],
# ],
# "bids": [
# [
# 16951.9,
# 13
# ],
# ],
# }
#
# L3
#
# {
# "code": "200000",
# "data": {
# "symbol": "BTCUSDTPERP",
# "sequence": 1669149851334,
# "asks": [
# [
# "639c95388cba5100084eabce",
# "16952.0",
# "1",
# 1671206200542484700
# ],
# ],
# "bids": [
# [
# "626659d83385c200072e690b",
# "17.0",
# "1000",
# 1650874840161291000
# ],
# ],
# }
#
data = self.safe_value(response, 'data', {})
timestamp = self.safe_integer_product(data, 'ts', 0.000001)
orderbook = None
if level == 3:
orderbook = self.parse_order_book(data, market['symbol'], timestamp, 'bids', 'asks', 1, 2)
else:
orderbook = self.parse_order_book(data, market['symbol'], timestamp, 'bids', 'asks', 0, 1)
orderbook['nonce'] = self.safe_integer(data, 'sequence')
return orderbook
async def fetch_l3_order_book(self, symbol: str, limit: Optional[int] = None, params={}):
"""
fetches level 3 information on open orders with bid(buy) and ask(sell) prices, volumes and other data
see https://futures-docs.poloniex.com/#get-full-order-book-level-3
:param str symbol: unified market symbol
:param int [limit]: max number of orders to return, default is None
:param dict [params]: extra parameters specific to the blockchaincom api endpoint
:returns dict: an `order book structure <https://docs.ccxt.com/#/?id=order-book-structure>`
"""
await self.load_markets()
market = self.market(symbol)
return self.fetch_order_book(market['id'], None, {'level': 3})
def parse_trade(self, trade, market=None):
#
# fetchTrades(public)
#
# {
# "sequence": 11827985,
# "side": "buy",
# "size": 101,
# "price": "16864.0000000000",
# "takerOrderId": "639c986f0ac2470007be75ee",
# "makerOrderId": "639c986fa69d280007b76111",
# "tradeId": "639c986f9fd7cf0001afd7ee",
# "ts": 1671207023485924400
# }
#
# fetchMyTrades
#
# {
# "symbol": "BTCUSDTPERP", #Ticker symbol of the contract
# "tradeId": "5ce24c1f0c19fc3c58edc47c", #Trade ID
# "orderId": "5ce24c16b210233c36ee321d", # Order ID
# "side": "sell", #Transaction side
# "liquidity": "taker", #Liquidity- taker or maker
# "price": "8302", #Filled price
# "size": 10, #Filled amount
# "value": "0.001204529", #Order value
# "feeRate": "0.0005", #Floating fees
# "fixFee": "0.00000006", #Fixed fees
# "feeCurrency": "XBT", #Charging currency
# "stop": "", #A mark to the stop order type
# "fee": "0.0000012022", #Transaction fee
# "orderType": "limit", #Order type
# "tradeType": "trade", #Trade type(trade, liquidation, ADL or settlement)
# "createdAt": 1558334496000, #Time the order created
# "settleCurrency": "XBT", #settlement currency
# "tradeTime": 1558334496000000000 #trade time in nanosecond
# }
#
marketId = self.safe_string(trade, 'symbol')
market = self.safe_market(marketId, market, '-')
id = self.safe_string(trade, 'tradeId')
orderId = self.safe_string(trade, 'orderId')
takerOrMaker = self.safe_string(trade, 'liquidity')
timestamp = self.safe_integer(trade, 'ts')
if timestamp is not None:
timestamp = self.parse_to_int(timestamp / 1000000)
else:
timestamp = self.safe_integer(trade, 'createdAt')
# if it's a historical v1 trade, the exchange returns timestamp in seconds
if ('dealValue' in trade) and (timestamp is not None):
timestamp = timestamp * 1000
priceString = self.safe_string(trade, 'price')
amountString = self.safe_string(trade, 'size')
side = self.safe_string(trade, 'side')
fee = None
feeCostString = self.safe_string(trade, 'fee')
if feeCostString is not None:
feeCurrencyId = self.safe_string(trade, 'feeCurrency')
feeCurrency = self.safe_currency_code(feeCurrencyId)
if feeCurrency is None:
feeCurrency = market['quote'] if (side == 'sell') else market['base']
fee = {
'cost': feeCostString,
'currency': feeCurrency,
'rate': self.safe_string(trade, 'feeRate'),
}
type = self.safe_string(trade, 'orderType')
if type == 'match':
type = None
costString = self.safe_string(trade, 'value')
return self.safe_trade({
'info': trade,
'id': id,
'order': orderId,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'symbol': market['symbol'],
'type': type,
'takerOrMaker': takerOrMaker,
'side': side,
'price': priceString,
'amount': amountString,
'cost': costString,
'fee': fee,
}, market)
async def fetch_trades(self, symbol: str, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
get the list of most recent trades for a particular symbol
see https://futures-docs.poloniex.com/#historical-data
:param str symbol: unified symbol of the market to fetch trades for
:param int [since]: timestamp in ms of the earliest trade to fetch
:param int [limit]: the maximum amount of trades to fetch
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns Trade[]: a list of `trade structures <https://docs.ccxt.com/en/latest/manual.html?#public-trades>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.publicGetTradeHistory(self.extend(request, params))
#
# {
# "code": "200000",
# "data": [
# {
# "sequence": 11827985,
# "side": "buy",
# "size": 101,
# "price": "16864.0000000000",
# "takerOrderId": "639c986f0ac2470007be75ee",
# "makerOrderId": "639c986fa69d280007b76111",
# "tradeId": "639c986f9fd7cf0001afd7ee",
# "ts": 1671207023485924400
# },
# }
#
trades = self.safe_value(response, 'data', [])
return self.parse_trades(trades, market, since, limit)
async def fetch_time(self, params={}):
"""
fetches the current integer timestamp in milliseconds from the poloniexfutures server
see https://futures-docs.poloniex.com/#time
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns int: the current integer timestamp in milliseconds from the poloniexfutures server
"""
response = await self.publicGetTimestamp(params)
#
# {
# "code":"200000",
# "msg":"success",
# "data":1546837113087
# }
#
return self.safe_integer(response, 'data')
async def fetch_ohlcv(self, symbol: str, timeframe='1m', since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches historical candlestick data containing the open, high, low, and close price, and the volume of a market
see https://futures-docs.poloniex.com/#k-chart
:param str symbol: unified symbol of the market to fetch OHLCV data for
:param str timeframe: the length of time each candle represents
:param int [since]: timestamp in ms of the earliest candle to fetch
:param int [limit]: the maximum amount of candles to fetch
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns int[][]: A list of candles ordered, open, high, low, close, volume
"""
await self.load_markets()
market = self.market(symbol)
marketId = market['id']
parsedTimeframe = self.safe_integer(self.timeframes, timeframe)
request = {
'symbol': marketId,
}
if parsedTimeframe is not None:
request['granularity'] = parsedTimeframe
else:
request['granularity'] = timeframe
duration = self.parse_timeframe(timeframe) * 1000
endAt = self.milliseconds()
if since is not None:
request['from'] = since
if limit is None:
limit = self.safe_integer(self.options, 'fetchOHLCVLimit', 200)
endAt = self.sum(since, limit * duration)
request['to'] = endAt
elif limit is not None:
since = endAt - limit * duration
request['from'] = since
response = await self.publicGetKlineQuery(self.extend(request, params))
#
# {
# "code": "200000",
# "data": [
# [1636459200000, 4779.3, 4792.1, 4768.7, 4770.3, 78051],
# [1636460100000, 4770.25, 4778.55, 4757.55, 4777.25, 80164],
# [1636461000000, 4777.25, 4791.45, 4774.5, 4791.3, 51555]
# ]
# }
#
data = self.safe_value(response, 'data', [])
return self.parse_ohlcvs(data, market, timeframe, since, limit)
def parse_balance(self, response):
result = {
'info': response,
'timestamp': None,
'datetime': None,
}
data = self.safe_value(response, 'data')
currencyId = self.safe_string(data, 'currency')
code = self.safe_currency_code(currencyId)
account = self.account()
account['free'] = self.safe_string(data, 'availableBalance')
account['total'] = self.safe_string(data, 'accountEquity')
result[code] = account
return self.safe_balance(result)
async def fetch_balance(self, params={}):
"""
query for balance and get the amount of funds available for trading or funds locked in orders
see https://futures-docs.poloniex.com/#get-account-overview
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: a `balance structure <https://docs.ccxt.com/en/latest/manual.html?#balance-structure>`
"""
await self.load_markets()
currencyId = self.safe_string(params, 'currency')
request = {}
if currencyId is not None:
currency = self.currency(currencyId)
request = {
'currency': currency['id'],
}
response = await self.privateGetAccountOverview(self.extend(request, params))
#
# {
# code: '200000',
# data: {
# accountEquity: 0.00005,
# unrealisedPNL: 0,
# marginBalance: 0.00005,
# positionMargin: 0,
# orderMargin: 0,
# frozenFunds: 0,
# availableBalance: 0.00005,
# currency: 'XBT'
# }
# }
#
return self.parse_balance(response)
async def create_order(self, symbol: str, type: OrderType, side: OrderSide, amount, price=None, params={}):
"""
Create an order on the exchange
see https://futures-docs.poloniex.com/#place-an-order
:param str symbol: Unified CCXT market symbol
:param str type: 'limit' or 'market'
:param str side: 'buy' or 'sell'
:param float amount: the amount of currency to trade
:param float price: *ignored in "market" orders* the price at which the order is to be fullfilled at in units of the quote currency
:param dict [params]: Extra parameters specific to the exchange API endpoint
:param float [params.leverage]: Leverage size of the order
:param float [params.stopPrice]: The price at which a trigger order is triggered at
:param bool [params.reduceOnly]: A mark to reduce the position size only. Set to False by default. Need to set the position size when reduceOnly is True.
:param str [params.timeInForce]: GTC, GTT, IOC, or FOK, default is GTC, limit orders only
:param str [params.postOnly]: Post only flag, invalid when timeInForce is IOC or FOK
:param str [params.clientOid]: client order id, defaults to uuid if not passed
:param str [params.remark]: remark for the order, length cannot exceed 100 utf8 characters
:param str [params.stop]: 'up' or 'down', defaults to 'up' if side is sell and 'down' if side is buy, requires stopPrice
:param str [params.stopPriceType]: TP, IP or MP, defaults to TP
:param bool [params.closeOrder]: set to True to close position
:param bool [params.forceHold]: A mark to forcely hold the funds for an order, even though it's an order to reduce the position size. This helps the order stay on the order book and not get canceled when the position size changes. Set to False by default.
:returns dict: an `order structure <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
market = self.market(symbol)
# required param, cannot be used twice
clientOrderId = self.safe_string_2(params, 'clientOid', 'clientOrderId', self.uuid())
params = self.omit(params, ['clientOid', 'clientOrderId'])
if amount < 1:
raise InvalidOrder(self.id + ' createOrder() minimum contract order amount is 1')
preciseAmount = int(self.amount_to_precision(symbol, amount))
request = {
'clientOid': clientOrderId,
'side': side,
'symbol': market['id'],
'type': type, # limit or market
'size': preciseAmount,
'leverage': 1,
}
stopPrice = self.safe_value_2(params, 'triggerPrice', 'stopPrice')
if stopPrice:
request['stop'] = 'up' if (side == 'buy') else 'down'
stopPriceType = self.safe_string(params, 'stopPriceType', 'TP')
request['stopPriceType'] = stopPriceType
request['stopPrice'] = self.price_to_precision(symbol, stopPrice)
timeInForce = self.safe_string_upper(params, 'timeInForce')
if type == 'limit':
if price is None:
raise ArgumentsRequired(self.id + ' createOrder() requires a price argument for limit orders')
else:
request['price'] = self.price_to_precision(symbol, price)
if timeInForce is not None:
request['timeInForce'] = timeInForce
postOnly = self.safe_value(params, 'postOnly', False)
hidden = self.safe_value(params, 'hidden')
if postOnly and (hidden is not None):
raise BadRequest(self.id + ' createOrder() does not support the postOnly parameter together with a hidden parameter')
iceberg = self.safe_value(params, 'iceberg')
if iceberg:
visibleSize = self.safe_value(params, 'visibleSize')
if visibleSize is None:
raise ArgumentsRequired(self.id + ' createOrder() requires a visibleSize parameter for iceberg orders')
params = self.omit(params, ['timeInForce', 'stopPrice', 'triggerPrice']) # Time in force only valid for limit orders, exchange error when gtc for market orders
response = await self.privatePostOrders(self.extend(request, params))
#
# {
# code: "200000",
# data: {
# orderId: "619717484f1d010001510cde",
# },
# }
#
data = self.safe_value(response, 'data', {})
return {
'id': self.safe_string(data, 'orderId'),
'clientOrderId': None,
'timestamp': None,
'datetime': None,
'lastTradeTimestamp': None,
'symbol': None,
'type': None,
'side': None,
'price': None,
'amount': None,
'cost': None,
'average': None,
'filled': None,
'remaining': None,
'status': None,
'fee': None,
'trades': None,
'timeInForce': None,
'postOnly': None,
'stopPrice': None,
'info': response,
}
async def cancel_order(self, id: str, symbol: Optional[str] = None, params={}):
"""
cancels an open order
see https://futures-docs.poloniex.com/#cancel-an-order
:param str id: order id
:param str symbol: unified symbol of the market the order was made in
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
request = {
'order-id': id,
}
response = await self.privateDeleteOrdersOrderId(self.extend(request, params))
#
# {
# code: "200000",
# data: {
# cancelledOrderIds: [
# "619714b8b6353000014c505a",
# ],
# cancelFailedOrders: [
# {
# orderId: "63a9c5c2b9e7d70007eb0cd5",
# orderState: "2"
# }
# ],
# },
# }
#
data = self.safe_value(response, 'data')
cancelledOrderIds = self.safe_value(data, 'cancelledOrderIds')
cancelledOrderIdsLength = len(cancelledOrderIds)
if cancelledOrderIdsLength == 0:
raise InvalidOrder(self.id + ' cancelOrder() order already cancelled')
return self.parse_order(data)
async def fetch_positions(self, symbols: Optional[List[str]] = None, params={}):
"""
fetch all open positions
see https://futures-docs.poloniex.com/#get-position-list
:param str[]|None symbols: list of unified market symbols
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict[]: a list of `position structure <https://docs.ccxt.com/#/?id=position-structure>`
"""
await self.load_markets()
response = await self.privateGetPositions(params)
#
# {
# "code": "200000",
# "data": [
# {
# "id": "615ba79f83a3410001cde321",
# "symbol": "ETHUSDTM",
# "autoDeposit": False,
# "maintMarginReq": 0.005,
# "riskLimit": 1000000,
# "realLeverage": 18.61,
# "crossMode": False,
# "delevPercentage": 0.86,
# "openingTimestamp": 1638563515618,
# "currentTimestamp": 1638576872774,
# "currentQty": 2,
# "currentCost": 83.64200000,
# "currentComm": 0.05018520,
# "unrealisedCost": 83.64200000,
# "realisedGrossCost": 0.00000000,
# "realisedCost": 0.05018520,
# "isOpen": True,
# "markPrice": 4225.01,
# "markValue": 84.50020000,
# "posCost": 83.64200000,
# "posCross": 0.0000000000,
# "posInit": 3.63660870,
# "posComm": 0.05236717,
# "posLoss": 0.00000000,
# "posMargin": 3.68897586,
# "posMaint": 0.50637594,
# "maintMargin": 4.54717586,
# "realisedGrossPnl": 0.00000000,
# "realisedPnl": -0.05018520,
# "unrealisedPnl": 0.85820000,
# "unrealisedPnlPcnt": 0.0103,
# "unrealisedRoePcnt": 0.2360,
# "avgEntryPrice": 4182.10,
# "liquidationPrice": 4023.00,
# "bankruptPrice": 4000.25,
# "settleCurrency": "USDT",
# "isInverse": False
# }
# ]
# }
#
data = self.safe_value(response, 'data')
return self.parse_positions(data, symbols)
def parse_position(self, position, market=None):
#
# {
# "code": "200000",
# "data": [
# {
# "id": "615ba79f83a3410001cde321", # Position ID
# "symbol": "ETHUSDTM", # Symbol
# "autoDeposit": False, # Auto deposit margin or not
# "maintMarginReq": 0.005, # Maintenance margin requirement
# "riskLimit": 1000000, # Risk limit
# "realLeverage": 25.92, # Leverage of the order
# "crossMode": False, # Cross mode or not
# "delevPercentage": 0.76, # ADL ranking percentile
# "openingTimestamp": 1638578546031, # Open time
# "currentTimestamp": 1638578563580, # Current timestamp
# "currentQty": 2, # Current postion quantity
# "currentCost": 83.787, # Current postion value
# "currentComm": 0.0167574, # Current commission
# "unrealisedCost": 83.787, # Unrealised value
# "realisedGrossCost": 0.0, # Accumulated realised gross profit value
# "realisedCost": 0.0167574, # Current realised position value
# "isOpen": True, # Opened position or not
# "markPrice": 4183.38, # Mark price
# "markValue": 83.6676, # Mark value
# "posCost": 83.787, # Position value
# "posCross": 0.0, # added margin
# "posInit": 3.35148, # Leverage margin
# "posComm": 0.05228309, # Bankruptcy cost
# "posLoss": 0.0, # Funding fees paid out
# "posMargin": 3.40376309, # Position margin
# "posMaint": 0.50707892, # Maintenance margin
# "maintMargin": 3.28436309, # Position margin
# "realisedGrossPnl": 0.0, # Accumulated realised gross profit value
# "realisedPnl": -0.0167574, # Realised profit and loss
# "unrealisedPnl": -0.1194, # Unrealised profit and loss
# "unrealisedPnlPcnt": -0.0014, # Profit-loss ratio of the position
# "unrealisedRoePcnt": -0.0356, # Rate of return on investment
# "avgEntryPrice": 4189.35, # Average entry price
# "liquidationPrice": 4044.55, # Liquidation price
# "bankruptPrice": 4021.75, # Bankruptcy price
# "settleCurrency": "USDT", # Currency used to clear and settle the trades
# "isInverse": False
# }
# ]
# }
#
symbol = self.safe_string(position, 'symbol')
market = self.safe_market(symbol, market)
timestamp = self.safe_integer(position, 'currentTimestamp')
size = self.safe_string(position, 'currentQty')
side = None
if Precise.string_gt(size, '0'):
side = 'long'
elif Precise.string_lt(size, '0'):
side = 'short'
notional = Precise.string_abs(self.safe_string(position, 'posCost'))
initialMargin = self.safe_string(position, 'posInit')
initialMarginPercentage = Precise.string_div(initialMargin, notional)
# marginRatio = Precise.string_div(maintenanceRate, collateral)
unrealisedPnl = self.safe_string(position, 'unrealisedPnl')
crossMode = self.safe_value(position, 'crossMode')
# currently crossMode is always set to False and only isolated positions are supported
marginMode = 'cross' if crossMode else 'isolated'
return {
'info': position,
'id': None,
'symbol': self.safe_string(market, 'symbol'),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'initialMargin': self.parse_number(initialMargin),
'initialMarginPercentage': self.parse_number(initialMarginPercentage),
'maintenanceMargin': self.safe_number(position, 'posMaint'),
'maintenanceMarginPercentage': self.safe_number(position, 'maintMarginReq'),
'entryPrice': self.safe_number(position, 'avgEntryPrice'),
'notional': self.parse_number(notional),
'leverage': self.safe_number(position, 'realLeverage'),
'unrealizedPnl': self.parse_number(unrealisedPnl),
'contracts': self.parse_number(Precise.string_abs(size)),
'contractSize': self.safe_value(market, 'contractSize'),
'marginRatio': None,
'liquidationPrice': self.safe_number(position, 'liquidationPrice'),
'markPrice': self.safe_number(position, 'markPrice'),
'collateral': self.safe_number(position, 'maintMargin'),
'marginMode': marginMode,
'side': side,
'percentage': self.parse_number(Precise.string_div(unrealisedPnl, initialMargin)),
}
async def fetch_funding_history(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch the history of funding payments paid and received on self account
see https://futures-docs.poloniex.com/#get-funding-history
:param str symbol: unified market symbol
:param int [since]: the earliest time in ms to fetch funding history for
:param int [limit]: the maximum number of funding history structures to retrieve
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: a `funding history structure <https://docs.ccxt.com/#/?id=funding-history-structure>`
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' fetchFundingHistory() requires a symbol argument')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
if since is not None:
request['startAt'] = since
if limit is not None:
# * Since is ignored if limit is defined
request['maxCount'] = limit
response = await self.privateGetFundingHistory(self.extend(request, params))
#
# {
# "code": "200000",
# "data": {
# "dataList": [
# {
# "id": 239471298749817,
# "symbol": "ETHUSDTM",
# "timePoint": 1638532800000,
# "fundingRate": 0.000100,
# "markPrice": 4612.8300000000,
# "positionQty": 12,
# "positionCost": 553.5396000000,
# "funding": -0.0553539600,
# "settleCurrency": "USDT"
# },
# ...
# ],
# "hasMore": True
# }
# }
#
data = self.safe_value(response, 'data')
dataList = self.safe_value(data, 'dataList', [])
dataListLength = len(dataList)
fees = []
for i in range(0, dataListLength):
listItem = dataList[i]
timestamp = self.safe_integer(listItem, 'timePoint')
fees.append({
'info': listItem,
'symbol': symbol,
'code': self.safe_currency_code(self.safe_string(listItem, 'settleCurrency')),
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'id': self.safe_number(listItem, 'id'),
'amount': self.safe_number(listItem, 'funding'),
'fundingRate': self.safe_number(listItem, 'fundingRate'),
'markPrice': self.safe_number(listItem, 'markPrice'),
'positionQty': self.safe_number(listItem, 'positionQty'),
'positionCost': self.safe_number(listItem, 'positionCost'),
})
return fees
async def cancel_all_orders(self, symbol: Optional[str] = None, params={}):
"""
cancel all open orders
:param str symbol: unified market symbol, only orders in the market of self symbol are cancelled when symbol is not None
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:param dict [params.stop]: When True, all the trigger orders will be cancelled
:returns dict[]: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
request = {}
if symbol is not None:
request['symbol'] = self.market_id(symbol)
stop = self.safe_value(params, 'stop')
method = 'privateDeleteStopOrders' if stop else 'privateDeleteOrders'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# code: "200000",
# data: {
# cancelledOrderIds: [
# "619714b8b6353000014c505a",
# ],
# },
# }
#
data = self.safe_value(response, 'data')
result = []
cancelledOrderIds = self.safe_value(data, 'cancelledOrderIds')
cancelledOrderIdsLength = len(cancelledOrderIds)
for i in range(0, cancelledOrderIdsLength):
cancelledOrderId = self.safe_string(cancelledOrderIds, i)
result.append({
'id': cancelledOrderId,
'clientOrderId': None,
'timestamp': None,
'datetime': None,
'lastTradeTimestamp': None,
'symbol': None,
'type': None,
'side': None,
'price': None,
'amount': None,
'cost': None,
'average': None,
'filled': None,
'remaining': None,
'status': None,
'fee': None,
'trades': None,
'timeInForce': None,
'postOnly': None,
'stopPrice': None,
'info': response,
})
return result
async def fetch_orders_by_status(self, status, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches a list of orders placed on the exchange
see https://futures-docs.poloniex.com/#get-order-list
see https://futures-docs.poloniex.com/#get-untriggered-stop-order-list
:param str status: 'active' or 'closed', only 'active' is valid for stop orders
:param str symbol: unified symbol for the market to retrieve orders from
:param int [since]: timestamp in ms of the earliest order to retrieve
:param int [limit]: The maximum number of orders to retrieve
:param dict [params]: exchange specific parameters
:param bool [params.stop]: set to True to retrieve untriggered stop orders
:param int [params.until]: End time in ms
:param str [params.side]: buy or sell
:param str [params.type]: limit or market
:returns: An `array of order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
stop = self.safe_value(params, 'stop')
until = self.safe_integer_2(params, 'until', 'till')
params = self.omit(params, ['stop', 'until', 'till'])
if status == 'closed':
status = 'done'
request = {}
if not stop:
request['status'] = status == 'active' if 'open' else 'done'
elif status != 'open':
raise BadRequest(self.id + ' fetchOrdersByStatus() can only fetch untriggered stop orders')
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['startAt'] = since
if until is not None:
request['endAt'] = until
method = 'privateGetStopOrders' if stop else 'privateGetOrders'
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "code": "200000",
# "data": {
# "totalNum": 1,
# "totalPage": 1,
# "pageSize": 50,
# "currentPage": 1,
# "items": [
# {
# "symbol": "ADAUSDTPERP",
# "leverage": "1",
# "hidden": False,
# "forceHold": False,
# "closeOrder": False,
# "type": "limit",
# "isActive": True,
# "createdAt": 1678936920000,
# "orderTime": 1678936920480905922,
# "price": "0.3",
# "iceberg": False,
# "stopTriggered": False,
# "id": "64128b582cc0710007a3c840",
# "value": "3",
# "timeInForce": "GTC",
# "updatedAt": 1678936920000,
# "side": "buy",
# "stopPriceType": "",
# "dealValue": "0",
# "dealSize": 0,
# "settleCurrency": "USDT",
# "stp": "",
# "filledValue": "0",
# "postOnly": False,
# "size": 1,
# "stop": "",
# "filledSize": 0,
# "reduceOnly": False,
# "marginType": 1,
# "cancelExist": False,
# "clientOid": "ba669f39-dfcc-4664-9801-a42d06e59c2e",
# "status": "open"
# }
# ]
# }
# }
#
responseData = self.safe_value(response, 'data', {})
orders = self.safe_value(responseData, 'items', [])
ordersLength = len(orders)
result = []
for i in range(0, ordersLength):
order = orders[i]
orderStatus = self.safe_string(order, 'status')
if status == orderStatus:
result.append(orders[i])
return self.parse_orders(result, market, since, limit)
async def fetch_open_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all unfilled currently open orders
see https://futures-docs.poloniex.com/#get-order-list
see https://futures-docs.poloniex.com/#get-untriggered-stop-order-list
:param str symbol: unified market symbol
:param int [since]: the earliest time in ms to fetch open orders for
:param int [limit]: the maximum number of open orders structures to retrieve
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:param int [params.till]: end time in ms
:param str [params.side]: buy or sell
:param str [params.type]: limit, or market
:returns Order[]: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
return await self.fetch_orders_by_status('open', symbol, since, limit, params)
async def fetch_closed_orders(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetches information on multiple closed orders made by the user
see https://futures-docs.poloniex.com/#get-order-list
see https://futures-docs.poloniex.com/#get-untriggered-stop-order-list
:param str symbol: unified market symbol of the market orders were made in
:param int [since]: the earliest time in ms to fetch orders for
:param int [limit]: the maximum number of orde structures to retrieve
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:param int [params.till]: end time in ms
:param str [params.side]: buy or sell
:param str [params.type]: limit, or market
:returns Order[]: a list of `order structures <https://docs.ccxt.com/#/?id=order-structure>`
"""
return await self.fetch_orders_by_status('closed', symbol, since, limit, params)
async def fetch_order(self, id=None, symbol: Optional[str] = None, params={}):
"""
fetches information on an order made by the user
see https://futures-docs.poloniex.com/#get-details-of-a-single-order
see https://futures-docs.poloniex.com/#get-single-order-by-clientoid
:param str symbol: unified symbol of the market the order was made in
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: An `order structure <https://docs.ccxt.com/#/?id=order-structure>`
"""
await self.load_markets()
request = {}
method = 'privateGetOrdersOrderId'
if id is None:
clientOrderId = self.safe_string_2(params, 'clientOid', 'clientOrderId')
if clientOrderId is None:
raise InvalidOrder(self.id + ' fetchOrder() requires parameter id or params.clientOid')
request['clientOid'] = clientOrderId
method = 'privateGetOrdersByClientOid'
params = self.omit(params, ['clientOid', 'clientOrderId'])
else:
request['order-id'] = id
response = await getattr(self, method)(self.extend(request, params))
#
# {
# "code": "200000",
# "data": {
# "symbol": "ADAUSDTPERP",
# "leverage": "1",
# "hidden": False,
# "forceHold": False,
# "closeOrder": False,
# "type": "market",
# "isActive": False,
# "createdAt": 1678929587000,
# "orderTime": 1678929587248115582,
# "iceberg": False,
# "stopTriggered": False,
# "id": "64126eb38c6919000737dcdc",
# "value": "3.1783",
# "timeInForce": "GTC",
# "updatedAt": 1678929587000,
# "side": "buy",
# "stopPriceType": "",
# "dealValue": "3.1783",
# "dealSize": 1,
# "settleCurrency": "USDT",
# "trades": [
# {
# "feePay": "0.00158915",
# "tradeId": "64126eb36803eb0001ff99bc"
# }
# ],
# "endAt": 1678929587000,
# "stp": "",
# "filledValue": "3.1783",
# "postOnly": False,
# "size": 1,
# "stop": "",
# "filledSize": 1,
# "reduceOnly": False,
# "marginType": 1,
# "cancelExist": False,
# "clientOid": "d19e8fcb-2df4-44bc-afd4-67dd42048246",
# "status": "done"
# }
# }
#
market = self.market(symbol) if (symbol is not None) else None
responseData = self.safe_value(response, 'data')
return self.parse_order(responseData, market)
def parse_order(self, order, market=None):
#
# createOrder
#
# {
# code: "200000",
# data: {
# orderId: "619717484f1d010001510cde",
# },
# }
#
# fetchOrder
#
# {
# "symbol": "ADAUSDTPERP",
# "leverage": "1",
# "hidden": False,
# "forceHold": False,
# "closeOrder": False,
# "type": "market",
# "isActive": False,
# "createdAt": 1678929587000,
# "orderTime": 1678929587248115582,
# "iceberg": False,
# "stopTriggered": False,
# "id": "64126eb38c6919000737dcdc",
# "value": "3.1783",
# "timeInForce": "GTC",
# "updatedAt": 1678929587000,
# "side": "buy",
# "stopPriceType": "",
# "dealValue": "3.1783",
# "dealSize": 1,
# "settleCurrency": "USDT",
# "trades": [
# {
# "feePay": "0.00158915",
# "tradeId": "64126eb36803eb0001ff99bc"
# }
# ],
# "endAt": 1678929587000,
# "stp": "",
# "filledValue": "3.1783",
# "postOnly": False,
# "size": 1,
# "stop": "",
# "filledSize": 1,
# "reduceOnly": False,
# "marginType": 1,
# "cancelExist": False,
# "clientOid": "d19e8fcb-2df4-44bc-afd4-67dd42048246",
# "status": "done"
# }
#
# cancelOrder
#
# {
# cancelledOrderIds: [
# "619714b8b6353000014c505a",
# ],
# cancelFailedOrders: [
# {
# orderId: "63a9c5c2b9e7d70007eb0cd5",
# orderState: "2"
# }
# ],
# },
#
marketId = self.safe_string(order, 'symbol')
market = self.safe_market(marketId, market)
timestamp = self.safe_integer(order, 'createdAt')
# price is zero for market order
# omitZero is called in safeOrder2
feeCurrencyId = self.safe_string(order, 'feeCurrency')
filled = self.safe_string(order, 'dealSize')
rawCost = self.safe_string_2(order, 'dealFunds', 'filledValue')
average = None
if Precise.string_gt(filled, '0'):
contractSize = self.safe_string(market, 'contractSize')
if market['linear']:
average = Precise.string_div(rawCost, Precise.string_mul(contractSize, filled))
else:
average = Precise.string_div(Precise.string_mul(contractSize, filled), rawCost)
# precision reported by their api is 8 d.p.
# average = Precise.string_div(rawCost, Precise.string_mul(filled, market['contractSize']))
# bool
isActive = self.safe_value(order, 'isActive', False)
cancelExist = self.safe_value(order, 'cancelExist', False)
status = 'open' if isActive else 'closed'
id = self.safe_string(order, 'id')
if 'cancelledOrderIds' in order:
cancelledOrderIds = self.safe_value(order, 'cancelledOrderIds')
id = self.safe_string(cancelledOrderIds, 0)
return self.safe_order({
'info': order,
'id': id,
'clientOrderId': self.safe_string(order, 'clientOid'),
'symbol': self.safe_string(market, 'symbol'),
'type': self.safe_string(order, 'type'),
'timeInForce': self.safe_string(order, 'timeInForce'),
'postOnly': self.safe_value(order, 'postOnly'),
'side': self.safe_string(order, 'side'),
'amount': self.safe_string(order, 'size'),
'price': self.safe_string(order, 'price'),
'stopPrice': self.safe_string(order, 'stopPrice'),
'cost': self.safe_string(order, 'dealValue'),
'filled': filled,
'remaining': None,
'timestamp': timestamp,
'datetime': self.iso8601(timestamp),
'fee': {
'currency': self.safe_currency_code(feeCurrencyId),
'cost': self.safe_string(order, 'fee'),
},
'status': 'canceled' if cancelExist else status,
'lastTradeTimestamp': None,
'average': average,
'trades': None,
}, market)
async def fetch_funding_rate(self, symbol: str, params={}):
"""
fetch the current funding rate
see https://futures-docs.poloniex.com/#get-premium-index
:param str symbol: unified market symbol
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: a `funding rate structure <https://docs.ccxt.com/#/?id=funding-rate-structure>`
"""
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
}
response = await self.publicGetFundingRateSymbolCurrent(self.extend(request, params))
#
# {
# "symbol": ".BTCUSDTPERPFPI8H",
# "granularity": 28800000,
# "timePoint": 1558000800000,
# "value": 0.00375,
# "predictedValue": 0.00375
# }
#
data = self.safe_value(response, 'data')
fundingTimestamp = self.safe_integer(data, 'timePoint')
# the website displayes the previous funding rate as "funding rate"
return {
'info': data,
'symbol': market['symbol'],
'markPrice': None,
'indexPrice': None,
'interestRate': None,
'estimatedSettlePrice': None,
'timestamp': None,
'datetime': None,
'fundingRate': self.safe_number(data, 'predictedValue'),
'fundingTimestamp': None,
'fundingDatetime': None,
'nextFundingRate': None,
'nextFundingTimestamp': None,
'nextFundingDatetime': None,
'previousFundingRate': self.safe_number(data, 'value'),
'previousFundingTimestamp': fundingTimestamp,
'previousFundingDatetime': self.iso8601(fundingTimestamp),
}
async def fetch_my_trades(self, symbol: Optional[str] = None, since: Optional[int] = None, limit: Optional[int] = None, params={}):
"""
fetch all trades made by the user
see https://futures-docs.poloniex.com/#get-fills
:param str symbol: unified market symbol
:param int [since]: the earliest time in ms to fetch trades for
:param int [limit]: the maximum number of trades structures to retrieve
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:param str orderIdFills: filles for a specific order(other parameters can be ignored if specified)
:param str side: buy or sell
:param str type: limit, market, limit_stop or market_stop
:param int endAt: end time(milisecond)
:returns Trade[]: a list of `trade structures <https://docs.ccxt.com/#/?id=trade-structure>`
"""
await self.load_markets()
request = {
}
market = None
if symbol is not None:
market = self.market(symbol)
request['symbol'] = market['id']
if since is not None:
request['startAt'] = since
response = await self.privateGetFills(self.extend(request, params))
#
# {
# "code": "200000",
# "data": {
# "currentPage":1,
# "pageSize":1,
# "totalNum":251915,
# "totalPage":251915,
# "items":[
# {
# "symbol": "BTCUSDTPERP", #Ticker symbol of the contract
# "tradeId": "5ce24c1f0c19fc3c58edc47c", #Trade ID
# "orderId": "5ce24c16b210233c36ee321d", # Order ID
# "side": "sell", #Transaction side
# "liquidity": "taker", #Liquidity- taker or maker
# "price": "8302", #Filled price
# "size": 10, #Filled amount
# "value": "0.001204529", #Order value
# "feeRate": "0.0005", #Floating fees
# "fixFee": "0.00000006", #Fixed fees
# "feeCurrency": "XBT", #Charging currency
# "stop": "", #A mark to the stop order type
# "fee": "0.0000012022", #Transaction fee
# "orderType": "limit", #Order type
# "tradeType": "trade", #Trade type(trade, liquidation, ADL or settlement)
# "createdAt": 1558334496000, #Time the order created
# "settleCurrency": "XBT", #settlement currency
# "tradeTime": 1558334496000000000 #trade time in nanosecond
# }
# ]
# }
# }
#
data = self.safe_value(response, 'data', {})
trades = self.safe_value(data, 'items', {})
return self.parse_trades(trades, market, since, limit)
async def set_margin_mode(self, marginMode, symbol, params={}):
"""
set margin mode to 'cross' or 'isolated'
see https://futures-docs.poloniex.com/#change-margin-mode
:param int marginMode: 0(isolated) or 1(cross)
:param str symbol: unified market symbol
:param dict [params]: extra parameters specific to the poloniexfutures api endpoint
:returns dict: response from the exchange
"""
if symbol is None:
raise ArgumentsRequired(self.id + ' setMarginMode() requires a symbol argument')
if (marginMode != 0) and (marginMode != 1):
raise ArgumentsRequired(self.id + ' setMarginMode() marginMode must be 0(isolated) or 1(cross)')
await self.load_markets()
market = self.market(symbol)
request = {
'symbol': market['id'],
'marginType': marginMode,
}
return await self.privatePostMarginTypeChange(request)
def sign(self, path, api='public', method='GET', params={}, headers=None, body=None):
url = self.urls['api'][api]
versions = self.safe_value(self.options, 'versions', {})
apiVersions = self.safe_value(versions, api, {})
methodVersions = self.safe_value(apiVersions, method, {})
defaultVersion = self.safe_string(methodVersions, path, self.version)
version = self.safe_string(params, 'version', defaultVersion)
tail = '/api/' + version + '/' + self.implode_params(path, params)
url += tail
query = self.omit(params, path)
queryLength = query
if api == 'public':
if queryLength:
url += '?' + self.urlencode(query)
else:
self.check_required_credentials()
endpoint = '/api/v1/' + self.implode_params(path, params)
bodyEncoded = self.urlencode(query)
if method != 'GET' and method != 'HEAD':
body = query
else:
if queryLength and bodyEncoded != '':
url += '?' + bodyEncoded
endpoint += '?' + bodyEncoded
now = str(self.milliseconds())
endpart = ''
if body is not None:
body = self.json(query)
endpart = body
payload = now + method + endpoint + endpart
signature = self.hmac(self.encode(payload), self.encode(self.secret), hashlib.sha256, 'base64')
headers = {
'PF-API-SIGN': signature,
'PF-API-TIMESTAMP': now,
'PF-API-KEY': self.apiKey,
'PF-API-PASSPHRASE': self.password,
}
headers['Content-Type'] = 'application/json'
return {'url': url, 'method': method, 'body': body, 'headers': headers}
def handle_errors(self, code, reason, url, method, headers, body, response, requestHeaders, requestBody):
if not response:
self.throw_broadly_matched_exception(self.exceptions['broad'], body, body)
return None
#
# bad
# {"code": "400100", "msg": "validation.createOrder.clientOidIsRequired"}
# good
# {code: '200000', data: {...}}
#
errorCode = self.safe_string(response, 'code')
message = self.safe_string(response, 'msg', '')
feedback = self.id + ' ' + message
self.throw_exactly_matched_exception(self.exceptions['exact'], message, feedback)
self.throw_exactly_matched_exception(self.exceptions['exact'], errorCode, feedback)
self.throw_broadly_matched_exception(self.exceptions['broad'], body, feedback)
return None
| [
"[email protected]"
] | |
aaf71114a0ea633a8470abae97853ce02b109f69 | 365c85a280596d88082c1f150436453f96e18c15 | /Python/Interview/电梯与爬楼.py | 40bfb9e1b5e9d4195c9faf6bda94d32585345960 | [] | no_license | Crisescode/leetcode | 0177c1ebd47b0a63476706562bcf898f35f1c4f2 | c3a60010e016995f06ad4145e174ae19668e15af | refs/heads/master | 2023-06-01T06:29:41.992368 | 2023-05-16T12:32:10 | 2023-05-16T12:32:10 | 243,040,322 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 489 | py | import sys
class Solution:
def find_floor(self, floor, nums):
res = [0 for i in range(floor)]
min_res, min_index = sys.maxsize, 0
for i in range(floor):
for j in range(len(nums)):
res[i] += abs(i - nums[j])
if min_res > res[i]:
min_res = res[i]
min_index = i
print(min_res)
print(min_index)
if __name__ == "__main__":
Solution().find_floor(10, [1, 3, 8, 10, 9])
| [
"[email protected]"
] | |
56087d81dafd62bf5c993992aa00023609074dce | acc7137e34fdc950fbb2593b2c4b0355c975faa3 | /diffy_api/schemas.py | 4efb43587c24a360186b0e9ac3a5e8e8c37271bb | [
"Apache-2.0"
] | permissive | kevgliss/diffy | ba8b01b2c0daa81faa39d69e1380eea16b84d732 | 681d5edd4a72e47a924d4b1b1136d40efa52b631 | refs/heads/master | 2020-03-14T20:10:21.797782 | 2018-05-01T22:09:16 | 2018-05-01T22:09:16 | 131,772,394 | 0 | 0 | null | 2018-05-01T23:10:15 | 2018-05-01T23:10:14 | null | UTF-8 | Python | false | false | 1,166 | py | """
.. module: diffy.schemas
:platform: Unix
:copyright: (c) 2018 by Netflix Inc., see AUTHORS for more
:license: Apache, see LICENSE for more details.
.. moduleauthor:: Kevin Glisson <[email protected]>
"""
from marshmallow import fields
from diffy.schema import (
TargetPluginSchema,
PersistencePluginSchema,
CollectionPluginSchema,
PayloadPluginSchema,
AnalysisPluginSchema
)
from diffy_api.common.schema import DiffyInputSchema
class BaselineSchema(DiffyInputSchema):
target_key = fields.String(required=True)
incident_id = fields.String(required=True)
target_plugin = fields.Nested(TargetPluginSchema, missing={})
persistence_plugin = fields.Nested(PersistencePluginSchema, missing={})
collection_plugin = fields.Nested(CollectionPluginSchema, missing={})
payload_plugin = fields.Nested(PayloadPluginSchema, missing={})
class AnalysisSchema(BaselineSchema):
analysis_plugin = fields.Nested(AnalysisPluginSchema, missing={})
baseline_input_schema = BaselineSchema()
baseline_output_schema = BaselineSchema()
analysis_input_schema = AnalysisSchema()
analysis_output_schema = AnalysisSchema()
| [
"[email protected]"
] | |
697bac709aa09a2bdbb3d97f1417cfb4bbcc306d | f4b011992dd468290d319d078cbae4c015d18338 | /Array/Container_With_most_Water.py | 1c1adf9cf2aa2c41e70c6b93616bcd5d35dbfaf5 | [] | no_license | Neeraj-kaushik/Geeksforgeeks | deca074ca3b37dcb32c0136b96f67beb049f9592 | c56de368db5a6613d59d9534de749a70b9530f4c | refs/heads/master | 2023-08-06T05:00:43.469480 | 2021-10-07T13:37:33 | 2021-10-07T13:37:33 | 363,420,292 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 309 | py | def container(li):
max = 0
for i in range(len(li)-1):
for j in range(i+1, len(li)):
a = j-i
b = min(li[i], li[j])
c = a*b
if max < c:
max = c
print(max)
n = int(input())
li = [int(x) for x in input().split()]
container(li)
| [
"[email protected]"
] | |
f78f2203b461289608061f69e6fbe921db6e52b3 | 8afb5afd38548c631f6f9536846039ef6cb297b9 | /GIT-USERS/TOM-Lambda/CSEU2-Graphs-gp/src/challenge2/isl.py | ac24bdb92ca489c6b326e7896af3ef9e3e78d855 | [
"MIT"
] | permissive | bgoonz/UsefulResourceRepo2.0 | d87588ffd668bb498f7787b896cc7b20d83ce0ad | 2cb4b45dd14a230aa0e800042e893f8dfb23beda | refs/heads/master | 2023-03-17T01:22:05.254751 | 2022-08-11T03:18:22 | 2022-08-11T03:18:22 | 382,628,698 | 10 | 12 | MIT | 2022-10-10T14:13:54 | 2021-07-03T13:58:52 | null | UTF-8 | Python | false | false | 2,689 | py | from util import Queue
<<<<<<< HEAD
# island count problem
def island_counter(arr):
rows = len(arr)
cols = len(arr[0])
count = 0
for i in range(rows):
for j in range(cols):
if arr[i][j] == 1:
deleteOnes(arr, i, j, rows, cols)
count += 1
return count
def deleteOnes(grid, i, j, rows, cols):
q = Queue()
q.enqueue([i, j])
grid[i][j] = 0
while q.size() > 0:
node = q.dequeue()
row = node[0]
col = node[1]
for row2, col2 in ((row + 1, col), (row - 1, col), (row, col + 1), (row, col -1)):
if 0 <= row2 < rows and 0 <= col2 < cols and grid[row2][col2] != 0:
grid[row2][col2] = 0
q.enqueue([row2, col2])
islands = [[0, 1, 0, 1, 0],
[1, 1, 0, 1, 1],
[0, 0, 1, 0, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 0, 0]]
island_counter(islands) # 4
islands = [[1, 0, 0, 1, 1, 0, 1, 1, 0, 1],
[0, 0, 1, 1, 0, 1, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 1, 0, 1],
[0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[0, 0, 1, 1, 0, 1, 0, 1, 1, 0],
[0, 1, 0, 1, 1, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 1, 0, 0, 0],
[1, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 1, 0, 0, 1, 0]]
island_counter(islands) # 13
=======
# island count problem
def island_counter(arr):
rows = len(arr)
cols = len(arr[0])
count = 0
for i in range(rows):
for j in range(cols):
if arr[i][j] == 1:
deleteOnes(arr, i, j, rows, cols)
count += 1
return count
def deleteOnes(grid, i, j, rows, cols):
q = Queue()
q.enqueue([i, j])
grid[i][j] = 0
while q.size() > 0:
node = q.dequeue()
row = node[0]
col = node[1]
for row2, col2 in (
(row + 1, col),
(row - 1, col),
(row, col + 1),
(row, col - 1),
):
if 0 <= row2 < rows and 0 <= col2 < cols and grid[row2][col2] != 0:
grid[row2][col2] = 0
q.enqueue([row2, col2])
islands = [
[0, 1, 0, 1, 0],
[1, 1, 0, 1, 1],
[0, 0, 1, 0, 0],
[1, 0, 1, 0, 0],
[1, 1, 0, 0, 0],
]
island_counter(islands) # 4
islands = [
[1, 0, 0, 1, 1, 0, 1, 1, 0, 1],
[0, 0, 1, 1, 0, 1, 0, 0, 0, 0],
[0, 1, 1, 1, 0, 0, 0, 1, 0, 1],
[0, 0, 1, 0, 0, 1, 0, 0, 1, 1],
[0, 0, 1, 1, 0, 1, 0, 1, 1, 0],
[0, 1, 0, 1, 1, 1, 0, 1, 0, 0],
[0, 0, 1, 0, 0, 1, 1, 0, 0, 0],
[1, 0, 1, 1, 0, 0, 0, 1, 1, 0],
[0, 1, 1, 0, 0, 0, 1, 1, 0, 0],
[0, 0, 1, 1, 0, 1, 0, 0, 1, 0],
]
island_counter(islands) # 13
>>>>>>> 23fb4d348bb9c7b7b370cb2afcd785793e3816ea
| [
"[email protected]"
] | |
97b2ab61542ae094603a2691a04ef0fffc95cf21 | 8a25ada37271acd5ea96d4a4e4e57f81bec221ac | /home/pi/GrovePi/Software/Python/others/temboo/Library/eBay/Trading/EndItem.py | 87aa0de2ea3a105cb636525193a819e0e0d694df | [
"MIT",
"Apache-2.0"
] | permissive | lupyuen/RaspberryPiImage | 65cebead6a480c772ed7f0c4d0d4e08572860f08 | 664e8a74b4628d710feab5582ef59b344b9ffddd | refs/heads/master | 2021-01-20T02:12:27.897902 | 2016-11-17T17:32:30 | 2016-11-17T17:32:30 | 42,438,362 | 7 | 8 | null | null | null | null | UTF-8 | Python | false | false | 4,780 | py | # -*- coding: utf-8 -*-
###############################################################################
#
# EndItem
# Ends the specified item listing before the date and time that it is scheduled to end per the listing duration.
#
# Python versions 2.6, 2.7, 3.x
#
# Copyright 2014, Temboo Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
#
#
###############################################################################
from temboo.core.choreography import Choreography
from temboo.core.choreography import InputSet
from temboo.core.choreography import ResultSet
from temboo.core.choreography import ChoreographyExecution
import json
class EndItem(Choreography):
def __init__(self, temboo_session):
"""
Create a new instance of the EndItem Choreo. A TembooSession object, containing a valid
set of Temboo credentials, must be supplied.
"""
super(EndItem, self).__init__(temboo_session, '/Library/eBay/Trading/EndItem')
def new_input_set(self):
return EndItemInputSet()
def _make_result_set(self, result, path):
return EndItemResultSet(result, path)
def _make_execution(self, session, exec_id, path):
return EndItemChoreographyExecution(session, exec_id, path)
class EndItemInputSet(InputSet):
"""
An InputSet with methods appropriate for specifying the inputs to the EndItem
Choreo. The InputSet object is used to specify input parameters when executing this Choreo.
"""
def set_EndingReason(self, value):
"""
Set the value of the EndingReason input for this Choreo. ((required, string) The reason the listing is ending early. Valid values are: LostOrBroken, NotAvailable, Incorrect, OtherListingError, CustomCode, SellToHighBidder, or Sold.)
"""
super(EndItemInputSet, self)._set_input('EndingReason', value)
def set_ItemID(self, value):
"""
Set the value of the ItemID input for this Choreo. ((required, string) The ID of the item to end.)
"""
super(EndItemInputSet, self)._set_input('ItemID', value)
def set_ResponseFormat(self, value):
"""
Set the value of the ResponseFormat input for this Choreo. ((optional, string) The format that the response should be in. Valid values are: json (the default) and xml.)
"""
super(EndItemInputSet, self)._set_input('ResponseFormat', value)
def set_SandboxMode(self, value):
"""
Set the value of the SandboxMode input for this Choreo. ((conditional, boolean) Indicates that the request should be made to the sandbox endpoint instead of the production endpoint. Set to 1 to enable sandbox mode.)
"""
super(EndItemInputSet, self)._set_input('SandboxMode', value)
def set_SellerInventoryID(self, value):
"""
Set the value of the SellerInventoryID input for this Choreo. ((optional, string) Unique identifier that the seller specified when they listed the Half.com item. This paramater only applies to Half.com.)
"""
super(EndItemInputSet, self)._set_input('SellerInventoryID', value)
def set_SiteID(self, value):
"""
Set the value of the SiteID input for this Choreo. ((optional, string) The eBay site ID that you want to access. Defaults to 0 indicating the US site.)
"""
super(EndItemInputSet, self)._set_input('SiteID', value)
def set_UserToken(self, value):
"""
Set the value of the UserToken input for this Choreo. ((required, string) A valid eBay Auth Token.)
"""
super(EndItemInputSet, self)._set_input('UserToken', value)
class EndItemResultSet(ResultSet):
"""
A ResultSet with methods tailored to the values returned by the EndItem Choreo.
The ResultSet object is used to retrieve the results of a Choreo execution.
"""
def getJSONFromString(self, str):
return json.loads(str)
def get_Response(self):
"""
Retrieve the value for the "Response" output from this Choreo execution. (The response from eBay.)
"""
return self._output.get('Response', None)
class EndItemChoreographyExecution(ChoreographyExecution):
def _make_result_set(self, response, path):
return EndItemResultSet(response, path)
| [
"[email protected]"
] | |
65261f757b8d466feca4479d5346beec5a78e31b | e6d862a9df10dccfa88856cf16951de8e0eeff2b | /Core/worker/python-aiohttp/api_server/models/worker_performance_event_duty_details.py | f29762066c70ea15c67959153241105705c85d5b | [] | no_license | AllocateSoftware/API-Stubs | c3de123626f831b2bd37aba25050c01746f5e560 | f19d153f8e9a37c7fb1474a63c92f67fc6c8bdf0 | refs/heads/master | 2022-06-01T07:26:53.264948 | 2020-01-09T13:44:41 | 2020-01-09T13:44:41 | 232,816,845 | 0 | 0 | null | 2022-05-20T21:23:09 | 2020-01-09T13:34:35 | C# | UTF-8 | Python | false | false | 3,121 | py | # coding: utf-8
from datetime import date, datetime
from typing import List, Dict, Type
from api_server.models.base_model_ import Model
from api_server import util
class WorkerPerformanceEventDutyDetails(Model):
"""NOTE: This class is auto generated by OpenAPI Generator (https://openapi-generator.tech).
Do not edit the class manually.
"""
def __init__(self, duty_id: str=None, _from: datetime=None, to: datetime=None):
"""WorkerPerformanceEventDutyDetails - a model defined in OpenAPI
:param duty_id: The duty_id of this WorkerPerformanceEventDutyDetails.
:param _from: The _from of this WorkerPerformanceEventDutyDetails.
:param to: The to of this WorkerPerformanceEventDutyDetails.
"""
self.openapi_types = {
'duty_id': str,
'_from': datetime,
'to': datetime
}
self.attribute_map = {
'duty_id': 'dutyId',
'_from': 'from',
'to': 'to'
}
self._duty_id = duty_id
self.__from = _from
self._to = to
@classmethod
def from_dict(cls, dikt: dict) -> 'WorkerPerformanceEventDutyDetails':
"""Returns the dict as a model
:param dikt: A dict.
:return: The WorkerPerformanceEvent_dutyDetails of this WorkerPerformanceEventDutyDetails.
"""
return util.deserialize_model(dikt, cls)
@property
def duty_id(self):
"""Gets the duty_id of this WorkerPerformanceEventDutyDetails.
ID of the duty within the allocate system
:return: The duty_id of this WorkerPerformanceEventDutyDetails.
:rtype: str
"""
return self._duty_id
@duty_id.setter
def duty_id(self, duty_id):
"""Sets the duty_id of this WorkerPerformanceEventDutyDetails.
ID of the duty within the allocate system
:param duty_id: The duty_id of this WorkerPerformanceEventDutyDetails.
:type duty_id: str
"""
self._duty_id = duty_id
@property
def _from(self):
"""Gets the _from of this WorkerPerformanceEventDutyDetails.
When the duty started
:return: The _from of this WorkerPerformanceEventDutyDetails.
:rtype: datetime
"""
return self.__from
@_from.setter
def _from(self, _from):
"""Sets the _from of this WorkerPerformanceEventDutyDetails.
When the duty started
:param _from: The _from of this WorkerPerformanceEventDutyDetails.
:type _from: datetime
"""
self.__from = _from
@property
def to(self):
"""Gets the to of this WorkerPerformanceEventDutyDetails.
When the duty ended
:return: The to of this WorkerPerformanceEventDutyDetails.
:rtype: datetime
"""
return self._to
@to.setter
def to(self, to):
"""Sets the to of this WorkerPerformanceEventDutyDetails.
When the duty ended
:param to: The to of this WorkerPerformanceEventDutyDetails.
:type to: datetime
"""
self._to = to
| [
"[email protected]"
] | |
0f79ee95751b41818a702247f7f40d6f88f71c80 | 7950c4faf15ec1dc217391d839ddc21efd174ede | /leetcode-cn/1220.0_Count_Vowels_Permutation.py | 01e3682d75f3280c9a127e70607bc6363f6959ee | [] | no_license | lixiang2017/leetcode | f462ecd269c7157aa4f5854f8c1da97ca5375e39 | f93380721b8383817fe2b0d728deca1321c9ef45 | refs/heads/master | 2023-08-25T02:56:58.918792 | 2023-08-22T16:43:36 | 2023-08-22T16:43:36 | 153,090,613 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 709 | py | '''
执行用时:120 ms, 在所有 Python3 提交中击败了63.64% 的用户
内存消耗:15.1 MB, 在所有 Python3 提交中击败了59.09% 的用户
通过测试用例:43 / 43
'''
class Solution:
def countVowelPermutation(self, n: int) -> int:
'''
a -> e
e -> a/i
i -> a/e/o/u
o -> i/u
u -> a
'''
MOD = 10 ** 9 + 7
a1 = e1 = i1 = o1 = u1 = 1
a2 = e2 = i2 = o2 = u2 = 0
for i in range(n - 1):
a2, e2, i2, o2, u2 = e1 + i1 + u1, a1 + i1, e1 + o1, i1, i1 + o1
a1, e1, i1, o1, u1 = a2 % MOD, e2 % MOD, i2 % MOD, o2 % MOD, u2 % MOD
return (a1 + e1 + i1 + o1 + u1) % MOD
| [
"[email protected]"
] | |
f19f605ddf8db8b480c00c74ed23b523b12ed70d | 319d3dfc79d6249bf6d6dab1c51a7d5d0af3c860 | /tests/test_line_set_data.py | 00aa7bc5bec56fa3340c23db28ff5477fc06cb6e | [
"MIT"
] | permissive | jameshensman/matplotlib2tikz | 1d365b6a9e91453492a17ec28c5eb74f2279e26e | 450712b4014799ec5f151f234df84335c90f4b9d | refs/heads/master | 2023-01-24T07:59:53.641565 | 2022-02-28T11:27:51 | 2022-02-28T11:27:51 | 169,421,478 | 1 | 0 | MIT | 2019-02-06T14:47:09 | 2019-02-06T14:47:08 | null | UTF-8 | Python | false | false | 320 | py | # from <https://github.com/nschloe/tikzplotlib/issues/339>
import matplotlib.pyplot as plt
def plot():
fig = plt.figure()
line = plt.plot(0, 0, "kx")[0]
line.set_data(0, 0)
return fig
def test():
from .helpers import assert_equality
assert_equality(plot, "test_line_set_data_reference.tex")
| [
"[email protected]"
] | |
b56d1a4f34a8e9cc9ae7192fc5b710a3e1a0ee47 | c3523080a63c7e131d8b6e0994f82a3b9ed901ce | /django/hello_world/hello_world_project/my_app/views.py | 93892aad6dc8f05c979061a01916dbfbfe83c670 | [] | no_license | johnlawrenceaspden/hobby-code | 2c77ffdc796e9fe863ae66e84d1e14851bf33d37 | d411d21aa19fa889add9f32454915d9b68a61c03 | refs/heads/master | 2023-08-25T08:41:18.130545 | 2023-08-06T12:27:29 | 2023-08-06T12:27:29 | 377,510 | 6 | 4 | null | 2023-02-22T00:57:49 | 2009-11-18T19:57:01 | Clojure | UTF-8 | Python | false | false | 655 | py | from django.shortcuts import render
# Create your views here.
from django.http import HttpResponse
import datetime
def index(request):
return HttpResponse(
"Hello, World!"
"<a href='secret'>secrets</a>"
"<a href='geeks_view'>geeks_view</a>"
"<a href='template'>template</a>"
)
def secret(request):
return HttpResponse("Secrets!")
def geeks_view(request):
now = datetime.datetime.now()
html = "Hello, World<br/> time is {} <br/> ".format(now)
return HttpResponse(html)
def template_view(request):
return render(request, "template_view.html")
# <a href="{% url 'secret' %}">secrets</a>
| [
"[email protected]"
] | |
7b1190e83ad63f84b348c940915312eff30c3b58 | 444a9480bce2035565332d4d4654244c0b5cd47b | /research/nlp/gpt2/src/utils/metric_method.py | 721d3f0619b1251f5d85a89639ac74cfe067a333 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-proprietary-license"
] | permissive | mindspore-ai/models | 7ede9c6454e77e995e674628204e1c6e76bd7b27 | eab643f51336dbf7d711f02d27e6516e5affee59 | refs/heads/master | 2023-07-20T01:49:34.614616 | 2023-07-17T11:43:18 | 2023-07-17T11:43:18 | 417,393,380 | 301 | 92 | Apache-2.0 | 2023-05-17T11:22:28 | 2021-10-15T06:38:37 | Python | UTF-8 | Python | false | false | 6,095 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""metric method for downstream task"""
import string
import re
from collections import Counter
import numpy as np
from .rouge_score import get_rouge_score
from .bleu import compute_bleu
class LastWordAccuracy():
"""
LastWordAccuracy class is for lambada task (predict the final word of sentence)
"""
def __init__(self):
self.acc_num = 0
self.total_num = 0
def normalize(self, word):
"""normalization"""
word = word.lstrip()
word = word.rstrip()
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(ch for ch in text if ch not in exclude)
def lower(text):
return text.lower()
return remove_punc(lower(word))
def update(self, predict_label, gold_label):
if isinstance(predict_label, str) and isinstance(gold_label, str):
predict_label = [predict_label]
gold_label = [gold_label]
for predict_word, gold_word in zip(predict_label, gold_label):
self.total_num += 1
if self.normalize(predict_word) == self.normalize(gold_word):
self.acc_num += 1
class Accuracy():
"""
calculate accuracy
"""
def __init__(self):
self.acc_num = 0
self.total_num = 0
def update(self, logits, labels):
"""accuracy update"""
labels = np.reshape(labels, -1)
logits_id = np.argmax(logits, axis=-1)
print(" | Preict Label: {} Gold Label: {}".format(logits_id, labels))
self.acc_num += np.sum(labels == logits_id)
self.total_num += len(labels)
print("\n| Accuracy = {} \n".format(self.acc_num / self.total_num))
class F1():
"""calculate F1 score"""
def __init__(self):
self.f1_score = 0.0
def get_normalize_answer_token(self, string_):
"""Lower text and remove punctuation, article and extra whitespace."""
def remove_articles(text):
regex = re.compile(r'\b(a|an|the)\b', re.UNICODE)
return re.sub(regex, ' ', text)
def white_space_fix(text):
return ' '.join(text.split())
def remove_punc(text):
exclude = set(string.punctuation)
return ''.join(char for char in text if char not in exclude)
def lower(text):
return text.lower()
return white_space_fix(remove_articles(remove_punc(lower(string_)))).split()
def update(self, pred_answer, gold_answer):
"""F1 update"""
common = Counter(pred_answer) & Counter(gold_answer)
num_same = sum(common.values())
# the number of same tokens between pred_answer and gold_answer
precision = 1.0 * num_same / len(pred_answer) if pred_answer else 0
recall = 1.0 * num_same / len(gold_answer) if gold_answer else 0
if ' '.join(pred_answer).strip() == "" and ' '.join(gold_answer).strip() == "":
self.f1_score += 1
else:
self.f1_score += 2 * precision * recall / float(precision + recall) if (precision + recall) != 0 else 0.0
print('| precision: {}, recall: {}\n'.format(precision, recall))
class BLEU():
"""calculate BLEU score"""
def __init__(self, tokenizer=None, max_order=4, smooth=True):
self.bleu = 0.0
self.total_num = 0
self.tokenizer = tokenizer
self.max_order = max_order
self.smooth = smooth
def sum_bleu(self, references, translations, max_order, smooth):
"""calculate the sum of bleu score"""
all_result = []
bleu_avg = 0.0
for refer, trans in zip(references, translations):
result = compute_bleu([[refer]], [trans], max_order, smooth)
all_result.append(result)
bleu_avg += result[0]
bleu_avg /= len(references)
return bleu_avg, all_result
def update(self, hypotheses, references):
"""BLEU update"""
hypo_l = []
ref_l = []
if self.tokenizer is not None:
for hypo, ref in zip(hypotheses, references):
if ref.strip() == '':
print("Reference is None, skip it !")
continue
if hypo.strip() == '':
print("translation is None, skip it !")
continue
hypo_l.append(self.tokenizer.encode(hypo))
ref_l.append(self.tokenizer.encode(ref))
if hypo_l and ref_l:
hypotheses = hypo_l
references = ref_l
bleu_avg, _ = self.sum_bleu(references, hypotheses, self.max_order, self.smooth)
self.bleu += bleu_avg * 100
self.total_num += 1
print("============== BLEU: {} ==============".format(float(self.bleu / self.total_num)))
class Rouge():
'''
Get Rouge Score
'''
def __init__(self):
self.Rouge1 = 0.0
self.Rouge2 = 0.0
self.RougeL = 0.0
self.total_num = 0
def update(self, hypothesis, targets):
scores = get_rouge_score(hypothesis, targets)
self.Rouge1 += scores['rouge-1']['f'] * 100
self.Rouge2 += scores['rouge-2']['f'] * 100
self.RougeL += scores['rouge-l']['f'] * 100
self.total_num += 1
print("=============== ROUGE: {} ===============".format(
(self.Rouge1 + self.Rouge2 + self.RougeL) / float(3.0 * self.total_num)))
| [
"[email protected]"
] | |
8c77d762715a190f6a21873d09291edc7d9199dd | 491c1e520a64e3ebd5349130f35047aaed1e70ec | /stack/monotonic_stack/739 dailyTemperatures.py | c8944e497eca5d5c03af42c94c50ed5f70a3e33b | [] | no_license | pangyouzhen/data-structure | 33a7bd7790c8db3e018114d85a137f5f3d6b92f8 | cd46cf08a580c418cc40a68bf9b32371fc69a803 | refs/heads/master | 2023-05-26T12:02:30.800301 | 2023-05-21T08:07:57 | 2023-05-21T08:07:57 | 189,315,047 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,414 | py | from typing import List
class Solution:
# 暴力解法
def dailyTemperatures_(self, temperatures: List[int]) -> List[int]:
if not temperatures:
return []
l = len(temperatures)
res = [0] * l
for i in range(l):
for j in range(i + 1, l):
if temperatures[j] > temperatures[i]:
print(f"{temperatures[j]} > {temperatures[i]}")
res[i] = j - i
break
return res
# 单调栈应用场景: 每个数右边第一个比它大的数
def dailyTemperatures(self, temperatures: List[int]) -> List[int]:
l = len(temperatures)
ans = [0] * l
stack = []
for i in range(l):
temperature = temperatures[i]
while stack and temperature > temperatures[stack[-1]]:
prev_index = stack.pop()
ans[prev_index] = i - prev_index
stack.append(i)
return ans
# 单调栈这里改成如果得到的是值怎么改
# TODO
def dailyTemperatures_value(self, temperatures: List[int]) -> List[int]:
pass
if __name__ == '__main__':
temperatures = [73, 74, 75, 71, 69, 72, 76, 73]
# temperatures = [30, 40, 50, 60]
# temperatures = [30, 60, 90]
func = Solution().dailyTemperatures
print(func(temperatures))
| [
"[email protected]"
] | |
301c10bb286366de50022142a49a5e4c3d4219c9 | c80b3cc6a8a144e9858f993c10a0e11e633cb348 | /plugins/gateway-acl/acl/api/group.py | a647f836086fc9940457cbd96486ebaae5fd5068 | [] | no_license | cristidomsa/Ally-Py | e08d80b67ea5b39b5504f4ac048108f23445f850 | e0b3466b34d31548996d57be4a9dac134d904380 | refs/heads/master | 2021-01-18T08:41:13.140590 | 2013-11-06T09:51:56 | 2013-11-06T09:51:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,462 | py | '''
Created on Aug 7, 2013
@package: gateway acl
@copyright: 2012 Sourcefabric o.p.s.
@license: http://www.gnu.org/licenses/gpl-3.0.txt
@author: Gabriel Nistor
API specifications for access group.
'''
from .domain_acl import modelACL
from acl.api.acl import IAclPrototype
from acl.api.compensate import ICompensatePrototype
from ally.api.config import service, query
from ally.api.criteria import AsBooleanOrdered
from ally.api.option import SliceAndTotal # @UnusedImport
from ally.support.api.entity_named import Entity, IEntityService, QEntity
# --------------------------------------------------------------------
@modelACL
class Group(Entity):
'''
Defines the group of ACL access.
Name - the group unique name.
IsAnonymous - if true it means that the group should be delivered for anonymous access.
Description - a description explaining the group.
'''
IsAnonymous = bool
Description = str
# --------------------------------------------------------------------
@query(Group)
class QGroup(QEntity):
'''
Provides the query for group.
'''
isAnonymous = AsBooleanOrdered
# --------------------------------------------------------------------
@service((Entity, Group), (QEntity, QGroup), ('ACL', Group))
class IGroupService(IEntityService, IAclPrototype, ICompensatePrototype):
'''
The ACL access group service used for allowing accesses based on group.
'''
| [
"[email protected]"
] | |
9377ca1d4e1a8f7e874803665efdc587668509ce | 4bc19f4dd098ebedcb6ee78af0ae12cb633671fe | /rekvizitka/templatetags/signin_tags.py | 8226953cbaa61707f115f701fe9c2a6a61ba2593 | [] | no_license | StanislavKraev/rekvizitka | 958ab0e002335613a724fb14a8e4123f49954446 | ac1f30e7bb2e987b3b0bda4c2a8feda4d3f5497f | refs/heads/master | 2021-01-01T05:44:56.372748 | 2016-04-27T19:20:26 | 2016-04-27T19:20:26 | 57,240,406 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 613 | py | # -*- coding: utf-8 -*-
from django import template
from rek.rekvizitka.forms import SigninForm, SignupForm
register = template.Library()
@register.inclusion_tag("includes/navigation/index.html", takes_context=True)
def signin_form(context):
result = {}
for d in context:
result.update(d)
if 'top_signin_form' not in result:
result['top_signin_form'] = SigninForm()
if 'signup_form' not in result:
result['signup_form'] = SignupForm()
if 'request' in context:
result['show_login_form'] = 'next' in context['request'].GET
return result
| [
"[email protected]"
] | |
430c29e62b60c6a030c6eebfbbf4f5c8806ae29f | f48f9798819b12669a8428f1dc0639e589fb1113 | /office/misc/zekr/actions.py | 7f611db22f147aa0500f55fdb6215f5233806360 | [] | no_license | vdemir/PiSiPackages-pardus-2011-devel | 781aac6caea2af4f9255770e5d9301e499299e28 | 7e1867a7f00ee9033c70cc92dc6700a50025430f | refs/heads/master | 2020-12-30T18:58:18.590419 | 2012-03-12T03:16:34 | 2012-03-12T03:16:34 | 51,609,831 | 1 | 0 | null | 2016-02-12T19:05:41 | 2016-02-12T19:05:40 | null | UTF-8 | Python | false | false | 1,131 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright 2011 TUBITAK/BILGEM
# Licensed under the GNU General Public License, version 2.
# See the file http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt
from pisi.actionsapi import autotools
from pisi.actionsapi import pisitools
from pisi.actionsapi import shelltools
from pisi.actionsapi import get
WorkDir = "%s/%s" % (get.ARCH(), get.srcNAME())
BASEDIR = "/usr/share/java/zekr"
def setup():
shelltools.system("ant clean")
def build():
shelltools.system("ant")
def install():
pisitools.insinto(BASEDIR, "*")
pisitools.dosym("%s/zekr.sh" % BASEDIR, "/usr/bin/zekr")
pisitools.dodoc("doc/changes.txt", "doc/license/*", "doc/readme.txt")
# Remove redundant files
pisitools.removeDir("%s/build" % BASEDIR)
pisitools.remove("%s/build.xml" % BASEDIR)
pisitools.remove("%s/readme.txt" % BASEDIR)
# Javadoc generation
# shelltools.system("ant javadoc")
# shelltools.copytree("build/docs/javadocs", "%s/%s/%s" %(get.installDIR(), get.docDIR(), get.srcNAME()))
# shelltools.unlinkDir("%s%s/build" % (get.installDIR(), BASEDIR))
| [
"[email protected]"
] | |
30d47c3b4db546a33d6f8b9cc2e181c424689c59 | 23e0629881270a881e68b2b07c6b8bc8b53c4127 | /glmtools/test/residual_test_int_scaling_nc.py | 746dcf06ef19d887e7e25f87429c712f3319eb1e | [
"BSD-3-Clause"
] | permissive | fluxtransport/glmtools | 7a78ed697ef3515869fa5c46afa9cd1b03700514 | ae17d95b61af011cf966392ba94863c5928053b7 | refs/heads/master | 2022-09-12T00:20:31.378392 | 2020-06-05T16:25:48 | 2020-06-05T16:25:48 | 266,206,763 | 1 | 1 | BSD-3-Clause | 2020-05-22T21:02:06 | 2020-05-22T21:02:06 | null | UTF-8 | Python | false | false | 1,655 | py | """
This set of tests reads the GLM data two ways, one by applying the unsigned integer conversion
manually, and the other by using the automatic method implemented in the library.
It was used to test PR #658 developed in response to issue #656 on the unidata/netcdf4-python library.
The second, full-auto method should work if the version (>=1.2.8) of netcdf4-python post-dates this PR.
These tests were developed with GLM data dating after 24 April 2017, but may not work with
later production upgrades if the unsigned int encoding method used in the production system changes.
The correct answers are:
-139.505
-43.7424
"""
filename = '/data/LCFA-production/OR_GLM-L2-LCFA_G16_s20171161230400_e20171161231000_c20171161231027.nc'
some_flash = 6359
import netCDF4
nc = netCDF4.Dataset(filename)
event_lons = nc.variables['event_lon']
event_lons.set_auto_scale(False)
scale_factor = event_lons.scale_factor
add_offset = event_lons.add_offset
event_lons = event_lons[:].astype('u2')
event_lons_fixed = (event_lons[:])*scale_factor+add_offset
nc.close()
print("Manual scaling")
print(event_lons_fixed.min())
print(event_lons_fixed.max())
# lon_fov = (-156.06, -22.94)
# dlon_fov = lon_fov[1]-lon_fov[0]
# lat_fov = (-66.56, 66.56)
# scale_factor = 0.00203128 # from file and spec; same for both
# ------
filename = '/data/LCFA-production/OR_GLM-L2-LCFA_G16_s20171161230400_e20171161231000_c20171161231027.nc'
some_flash = 6359
import netCDF4
nc = netCDF4.Dataset(filename)
event_lons = nc.variables['event_lon']
event_lons_fixed = event_lons[:]
nc.close()
print("Auto scaling")
print(event_lons_fixed.min())
print(event_lons_fixed.max())
| [
"[email protected]"
] | |
165573a4fe8aadb00a0a2ffec9a278040aa43dc8 | 3c8c2af952f2a785ca648c44954961a198c2ac6b | /tensorflower/Examples/NeuralNetworkExample/simple_neural_network_with_eager.py | a0be16daa338f9e52c6f3ae4318fdfcd600cdaf4 | [] | no_license | SCismycat/TensorFlowLearning | 0b8db07ca24f6a7ac75ddc9a91f7a13c71709104 | e5fe6359e2ae9fdfc6a6824efdfc2660c7a9d061 | refs/heads/master | 2020-11-25T18:30:55.859591 | 2020-01-03T12:45:24 | 2020-01-03T12:45:24 | 228,793,975 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,190 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# @Author : Leslee
# @Email : [email protected]
# @Time : 2019.11.27 14:05
import tensorflower as tf
tf.enable_eager_execution()
tfe = tf.contrib.eager
# Import MNIST data
from tensorflower.examples.tutorials.mnist import input_data
mnist = input_data.read_data_sets("/tmp/data/", one_hot=False)
# Parameters
learning_rate = 0.001
num_steps = 1000
batch_size = 128
display_step = 100
# Network Parameters
n_hidden_1 = 256 # 1st layer number of neurons
n_hidden_2 = 256 # 2nd layer number of neurons
num_input = 784 # MNIST data input (img shape: 28*28)
num_classes = 10 # MNIST total classes (0-9 digits)
# 先分好数据的batchs
dataset = tf.data.Dataset.from_tensor_slices(
(mnist.train.images,mnist.train.labels))
dateset = dataset.repeat().batch(batch_size).prefetch(batch_size)
dataset_iter = tfe.Iterator(dataset)
# 定义神经网络,使用eager API和tf.layer API
class NeuralNetwork(tfe.Network):
def __init__(self):
# 定义每个层
super(NeuralNetwork, self).__init__()
self.layer1 = self.track_layer(
tf.layers.Dense(n_hidden_1,activation=tf.nn.relu))
self.layer2 = self.track_layer(
tf.layers.Dense(n_hidden_2,activation=tf.nn.relu))
self.out_layer = self.track_layer(tf.layers.Dense(num_classes))
def call(self,x):
x = self.layer1(x)
x = self.layer2(x)
return self.out_layer(x)
neural_network = NeuralNetwork()
# 交叉熵损失函数
def loss_fn(inference_fn,inputs,labels):
return tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=inference_fn(inputs),labels=labels))
# 计算 acc
def accuracy_fn(inference_fn,inputs,labels):
prediction = tf.nn.softmax(inference_fn(inputs))
correct_pred = tf.equal(tf.argmax(prediction,1),labels)
return tf.reduce_mean(tf.cast(correct_pred,tf.float32))
optimizer = tf.train.AdamOptimizer(learning_rate)
# 计算梯度
grad = tfe.implicit_gradients(loss_fn)
# 开始训练
average_loss = 0.
average_acc = 0.
for step in range(num_steps):
d = dataset_iter.next()
# Images
x_batch = d[0]
y_batch = tf.cast(d[1],dtype=tf.int64)
# 计算整个batch的loss
batch_loss = loss_fn(neural_network,x_batch,y_batch)
average_loss += batch_loss
# 计算整个batch的accuracy
batch_accuracy = accuracy_fn(neural_network,x_batch,y_batch)
average_acc += batch_accuracy
if step == 0:
# 打印优化前的初始的cost
print("Initial loss= {:.9f}".format(average_loss))
optimizer.apply_gradients(grad(neural_network,x_batch,y_batch))
# 打印细节
if (step+1) % display_step == 0 or step == 0:
if step >0:
average_loss/=display_step
average_acc /= display_step
print("Step:", '%04d' % (step + 1), " loss=",
"{:.9f}".format(average_loss), " accuracy=",
"{:.4f}".format(average_acc))
average_loss = 0.
average_acc = 0.
test_X = mnist.test.images
test_Y = mnist.test.labels
test_acc = accuracy_fn(neural_network,test_X,test_Y)
print("Testset Accuracy: {:.4f}".format(test_acc))
| [
"[email protected]"
] | |
c3a297697958ee4ea8b8da950940b83f77a8ac04 | 72c01ee9f863f2b6ab02bf01e0e0e16595aeee4a | /session_3/hello.py | 57d217a9b41da8d94c1986b8db70f3dfb7cbb708 | [] | no_license | voronind/la | 1151d3066b0500864e59fda6d665e47d1f329f95 | 4f73f9e68c085b1d60cdc435f0c850ff27976182 | refs/heads/master | 2021-05-27T11:37:06.035679 | 2013-12-03T19:15:43 | 2013-12-03T19:15:43 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 90 | py | #!/usr/bin/env python
import os
print('Hello, {user}!'.format(user=os.environ['USER']))
| [
"[email protected]"
] | |
65949528935c33a7194c89fc9126372595d6568f | 1adc05008f0caa9a81cc4fc3a737fcbcebb68995 | /hardhat/recipes/python/wtforms.py | d5f3bc0b4cb6ebf88755c2e29ac9acc1dcd3dcdf | [
"MIT",
"BSD-3-Clause"
] | permissive | stangelandcl/hardhat | 4aa995518697d19b179c64751108963fa656cfca | 1ad0c5dec16728c0243023acb9594f435ef18f9c | refs/heads/master | 2021-01-11T17:19:41.988477 | 2019-03-22T22:18:44 | 2019-03-22T22:18:52 | 79,742,340 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 347 | py | from .base import PipBaseRecipe
class WTFormsRecipe(PipBaseRecipe):
def __init__(self, *args, **kwargs):
super(WTFormsRecipe, self).__init__(*args, **kwargs)
self.sha256 = 'ffdf10bd1fa565b8233380cb77a304cd' \
'36fd55c73023e91d4b803c96bc11d46f'
self.name = 'wtforms'
self.version = '2.1'
| [
"[email protected]"
] | |
ee4aa5b7b8776a3802c3d5fa8988bbc65c97c2f1 | 169cf85b51fc4aec751292e75d671f5df3afc6fb | /tests/test_pyutils.py | c8d938400f6a91bd6d7e9f475afe110cbabb4017 | [] | no_license | juliotux/astropop | e1461e377783710b7ad529700c3e399977489e23 | f6cb6f4aa9183be4c171779a35bad6624afbbbf9 | refs/heads/main | 2023-08-31T06:40:34.474068 | 2023-07-09T15:08:10 | 2023-07-09T15:08:10 | 127,958,109 | 14 | 12 | null | 2023-04-03T12:06:23 | 2018-04-03T19:25:33 | Python | UTF-8 | Python | false | false | 11,701 | py | # Licensed under a 3-clause BSD style license - see LICENSE.rst
# flake8: noqa: F403, F405
import pytest
import shlex
from astropop.py_utils import string_fix, process_list, \
check_iterable, batch_key_replace, \
run_command, IndexedDict, check_number, \
broadcast
import numpy as np
from astropop.testing import *
from astropop.logger import logger, log_to_list
@pytest.mark.parametrize('v, exp', [(1, True),
(1.5, True),
('2', False),
('2.5', False),
(1+3j, False),
(5j, False),
('A', False),
('AB', False),
([1, 2, 3], False),
(np.array([1, 2]), False),
(np.int_('3'), True),
(np.float_(3), True),
(False, False),
((1, 2, 3), False)])
def test_check_number(v, exp):
if exp:
assert_true(check_number(v))
else:
assert_false(check_number(v))
@pytest.mark.parametrize("inp, enc, res", [("a!^1ö~[😀", None, "a!^1ö~[😀"),
("a!^1ö~[😀", "utf-8", "a!^1ö~[😀"),
("a!1[", 'latin-1', "a!1["),
(b'bytes', None, 'bytes'),
(42, None, "42")])
def test_string_fix(inp, enc, res):
if enc is not None:
inp = inp.encode(enc)
assert_equal(string_fix(inp, enc), res)
else:
assert_equal(string_fix(inp), res)
class Test_RunCommand():
com = (["bash", "-c", 'for i in {1..10}; do echo "$i"; sleep 0.1; done'],
"bash -c 'for i in {1..10}; do echo \"$i\"; sleep 0.1; done'")
com2 = ("bash -c 'echo \"this is an error\" 1>&2'",
["bash", "-c", 'echo "this is an error" 1>&2'])
# This test break all the others
# def test_nested_async(self):
# import asyncio
# async def async_func():
# run_command(['ls', '/'])
# asyncio.run(async_func())
def test_process_error(self):
import subprocess
with pytest.raises(subprocess.CalledProcessError):
run_command('python -c "import sys; sys.exit(1000)"')
@pytest.mark.parametrize('com', com)
def test_run_command(self, com):
stdout = []
stderr = []
_, out, err = run_command(com, stdout=stdout, stderr=stderr,
stdout_loglevel='WARN')
assert_is(out, stdout)
assert_is(err, stderr)
assert_equal(stdout, [str(i) for i in range(1, 11)])
assert_equal(stderr, [])
@pytest.mark.parametrize('com', com2)
def test_run_command_stderr(self, com):
stdout = []
stderr = []
_, out, err = run_command(com, stdout=stdout, stderr=stderr,
stdout_loglevel='WARN')
assert_is(out, stdout)
assert_is(err, stderr)
assert_equal(stdout, [])
assert_equal(stderr, ['this is an error'])
@pytest.mark.parametrize('com', com)
def test_logging(self, com):
logger.setLevel('DEBUG')
logl = []
expect_log = []
if not isinstance(com, list):
com = shlex.split(com)
# expect_log += ['Converting string using shlex']
logcmd = com
logcmd = " ".join(logcmd)
expect_log += [f"Runing: {logcmd}"]
expect_log += list(range(1, 11))
expect_log += [f"Done with process: {logcmd}"]
lh = log_to_list(logger, logl)
stdout = []
stderr = []
_, out, err = run_command(com, stdout=stdout, stderr=stderr,
stdout_loglevel='WARN')
assert_is(out, stdout)
assert_is(err, stderr)
assert_equal(stdout, [str(i) for i in range(1, 11)])
assert_equal(stderr, [])
assert_equal(logl, expect_log)
logger.removeHandler(lh)
@pytest.mark.parametrize('com', com2)
def test_logging_err(self, com):
logger.setLevel('DEBUG')
logl = []
expect_log = []
if not isinstance(com, list):
com = shlex.split(com)
# expect_log += ['Converting string using shlex']
logcmd = com
logcmd = " ".join(logcmd)
expect_log += [f"Runing: {logcmd}"]
expect_log += ['this is an error']
expect_log += [f"Done with process: {logcmd}"]
lh = log_to_list(logger, logl)
stdout = []
stderr = []
_, out, err = run_command(com, stdout=stdout, stderr=stderr,
stdout_loglevel='DEBUG',
stderr_loglevel='ERROR')
assert_is(out, stdout)
assert_is(err, stderr)
assert_equal(stdout, [])
assert_equal(stderr, ['this is an error'])
assert_equal(logl, expect_log)
logger.removeHandler(lh)
class Test_ProcessList():
def test_process_list(self):
def dummy_func(i):
i = 1
return i
a = np.zeros(20)
b = np.ones(20)
c = process_list(dummy_func, a)
assert_equal(b, c)
assert_false(np.array_equal(a, c))
def test_process_list_with_args(self):
def dummy_func(i, a, b):
return (i+a)*b
i_array = np.arange(20)
a_val = 2
b_val = 3
res = process_list(dummy_func, i_array, a_val, b=b_val)
assert_equal((i_array+a_val)*b_val, res)
class Test_CheckIterable():
def test_check_iterabel_array(self):
a = [1, 2, 3, 4, 5]
assert_true(check_iterable(a))
def test_check_iterabel_string(self):
a = '12345'
assert_false(check_iterable(a))
def test_check_iterabel_nparray(self):
a = np.zeros(20)
assert_true(check_iterable(a))
def test_check_iterabel_number(self):
a = 10
assert_false(check_iterable(a))
def test_check_iterabel_range(self):
a = range(10)
assert_true(check_iterable(a))
def test_check_iterabel_dict(self):
a = dict(a=1, b=2, c=3, d=4)
assert_true(check_iterable(a))
assert_true(check_iterable(a.items()))
assert_true(check_iterable(a.keys()))
assert_true(check_iterable(a.values()))
class Test_BatchKeyReplace():
def test_batch_key_replace(self):
dic1 = {'a': '{b} value', 'b': '6{c}', 'c': 2}
batch_key_replace(dic1)
assert_equal(dic1['a'], '62 value')
def test_batch_key_replace_list(self):
dic1 = {'a': '{b} value', 'b': ['6{c}', '4{d}'], 'c': 1, 'd': 2}
batch_key_replace(dic1)
assert_equal(dic1['a'], "['61', '42'] value")
class Test_Broadcast():
def test_broadcast(self):
a = np.arange(10)
b = np.arange(1, 11)
c = 2
bc = broadcast(a, b, c)
iterator = iter(bc)
indx = 0
for i in range(10):
assert_equal(next(iterator), [a[indx], b[indx], c])
indx += 1
with pytest.raises(StopIteration):
next(iterator)
def test_broadcast_empty(self):
with pytest.raises(ValueError):
broadcast()
def test_broadcast_wrong_shape(self):
a = np.arange(10)
b = np.arange(5)
with pytest.raises(ValueError):
broadcast(a, b)
def test_broadcast_only_scalars(self):
a = 1
b = 2
c = 3
bc = broadcast(a, b, c)
for i in bc:
assert_equal(i, [a, b, c])
def test_broadcast_superpass_32_limit(self):
arr = [np.arange(10)]*64
bc = broadcast(*arr)
assert_equal(len(bc), 10)
it = iter(bc)
for i in range(10):
assert_equal(next(it), [i]*64)
def test_broadcast_iters_only_scalars(self):
bc = broadcast(1, 2, 3)
assert_equal(bc.iters, [[1], [2], [3]])
def test_broadcast_iters(self):
bc = broadcast(np.arange(10), 3, 2)
assert_equal(bc.iters, [np.arange(10), [3]*10, [2]*10])
class Test_IndexedDict():
def test_indexeddict_create(self):
d = dict(a=1, b=2, c=3)
i = IndexedDict(a=1, b=2, c=3)
assert_is_instance(i, dict)
assert_equal(len(d), len(i))
# Python 3.6 and above ensure items order
assert_equal(list(d.keys()), list(i.keys()))
assert_equal(list(d.values()), list(i.values()))
assert_equal(i, d)
def test_indexeddict_insert_at(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_at(2, 'e', 5)
assert_equal(a, {'a': 1, 'b': 2, 'e': 5, 'c': 3, 'd': 4})
def test_indexeddict_insert_at_first(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_at(0, 'e', 5)
assert_equal(a, {'e': 5, 'a': 1, 'b': 2, 'c': 3, 'd': 4})
def test_indexeddict_insert_at_last(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_at(4, 'e', 5)
assert_equal(a, {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5})
def test_indexeddict_insert_at_away(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_at(42, 'e', 5)
assert_equal(a, {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5})
def test_indexeddict_insert_at_negative(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_at(-2, 'e', 5)
assert_equal(a, {'a': 1, 'b': 2, 'c': 3, 'e': 5, 'd': 4})
def test_indexeddict_after(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_after('b', 'e', 5)
assert_equal(a, {'a': 1, 'b': 2, 'e': 5, 'c': 3, 'd': 4})
def test_indexeddict_before(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_before('b', 'e', 5)
assert_equal(a, {'a': 1, 'e': 5, 'b': 2, 'c': 3, 'd': 4})
def test_indexeddict_existing_before_before(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_before('b', 'c', 3)
assert_equal(a, {'a': 1, 'c': 3, 'b': 2, 'd': 4})
def test_indexeddict_existing_after_before(self):
a = IndexedDict(a=1, b=2, c=3, d=4, e=5)
a.insert_before('e', 'c', 4)
assert_equal(a, {'a': 1, 'b': 2, 'd': 4, 'c': 4, 'e': 5})
def test_indexeddict_existing_before_after(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_after('b', 'c', 3)
assert_equal(a, {'a': 1, 'c': 3, 'b': 2, 'd': 4})
def test_indexeddict_existing_after_after(self):
a = IndexedDict(a=1, b=2, c=3, d=4, e=5)
a.insert_after('e', 'c', 4)
assert_equal(a, {'a': 1, 'b': 2, 'd': 4, 'c': 4, 'e': 5})
def test_indexeddict_first(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_before('a', 'e', 5)
assert_equal(a, {'e': 5, 'a': 1, 'b': 2, 'c': 3, 'd': 4})
def test_indexeddict_last(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
a.insert_after('d', 'e', 5)
assert_equal(a, {'a': 1, 'b': 2, 'c': 3, 'd': 4, 'e': 5})
@pytest.mark.parametrize('val, res', [('a', 0), ('b', 1),
('c', 2), ('d', 3)])
def test_indexeddict_index(self, val, res):
a = IndexedDict(a=1, b=2, c=3, d=4)
assert_equal(a.index(val), res)
def test_indexeddict_invalid_key(self):
a = IndexedDict(a=1, b=2, c=3, d=4)
with pytest.raises(KeyError):
a.index('e')
| [
"[email protected]"
] | |
fb2ae029326cdc5260c2c34c847575975d292b52 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03449/s515079577.py | ed708e8b8f9b546f606183d8cdb3a5cc6cd6ae6f | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 929 | py | import math
from math import gcd,pi,sqrt
INF = float("inf")
MOD = 10**9 + 7
import sys
sys.setrecursionlimit(10**6)
import itertools
import bisect
from collections import Counter,deque
def i_input(): return int(input())
def i_map(): return map(int, input().split())
def i_list(): return list(i_map())
def i_row(N): return [i_input() for _ in range(N)]
def i_row_list(N): return [i_list() for _ in range(N)]
def s_input(): return input()
def s_map(): return input().split()
def s_list(): return list(s_map())
def s_row(N): return [s_input for _ in range(N)]
def s_row_str(N): return [s_list() for _ in range(N)]
def s_row_list(N): return [list(s_input()) for _ in range(N)]
def main():
n = i_input()
a = i_list()
a.append(0)
b = i_list()
b.append(0)
m = 0
for i in range(n):
trial = sum(a[:i+1]) + sum(b[i:])
m = max(m, trial)
print(m)
if __name__=="__main__":
main()
| [
"[email protected]"
] | |
3a60fe79b32100607536ae6536cd91c46be9e2ed | eb9c3dac0dca0ecd184df14b1fda62e61cc8c7d7 | /google/cloud/mediatranslation/v1beta1/mediatranslation-v1beta1-py/noxfile.py | 235477334e1e16d28e4cf484fb7e4a2071eaf77b | [
"Apache-2.0"
] | permissive | Tryweirder/googleapis-gen | 2e5daf46574c3af3d448f1177eaebe809100c346 | 45d8e9377379f9d1d4e166e80415a8c1737f284d | refs/heads/master | 2023-04-05T06:30:04.726589 | 2021-04-13T23:35:20 | 2021-04-13T23:35:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,994 | py | # -*- coding: utf-8 -*-
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import os
import shutil
import nox # type: ignore
@nox.session(python=['3.6', '3.7', '3.8', '3.9'])
def unit(session):
"""Run the unit test suite."""
session.install('coverage', 'pytest', 'pytest-cov', 'asyncmock', 'pytest-asyncio')
session.install('-e', '.')
session.run(
'py.test',
'--quiet',
'--cov=google/cloud/mediatranslation_v1beta1/',
'--cov-config=.coveragerc',
'--cov-report=term',
'--cov-report=html',
os.path.join('tests', 'unit', ''.join(session.posargs))
)
@nox.session(python=['3.6', '3.7'])
def mypy(session):
"""Run the type checker."""
session.install('mypy')
session.install('.')
session.run(
'mypy',
'--explicit-package-bases',
'google',
)
@nox.session(python='3.6')
def docs(session):
"""Build the docs for this library."""
session.install("-e", ".")
session.install("sphinx<3.0.0", "alabaster", "recommonmark")
shutil.rmtree(os.path.join("docs", "_build"), ignore_errors=True)
session.run(
"sphinx-build",
"-W", # warnings as errors
"-T", # show full traceback on exception
"-N", # no colors
"-b",
"html",
"-d",
os.path.join("docs", "_build", "doctrees", ""),
os.path.join("docs", ""),
os.path.join("docs", "_build", "html", ""),
)
| [
"bazel-bot-development[bot]@users.noreply.github.com"
] | bazel-bot-development[bot]@users.noreply.github.com |
d6c5fb6544862ff09a8094ced5cad5531d23f3bd | ede51c5a9031e896155d6abc2c43295e94bc2abe | /apps/account/views.py | 2bbe7486e5275e07307fc6759e35f1f5250a6e5b | [
"MIT"
] | permissive | ChinaParrot/loonflow | 1b49ae352476dae8dfc8156ab89a79b5174160f1 | 5cc57fbb0ab328cd90aaeb6dd9daf16bf39c49be | refs/heads/master | 2020-05-20T03:08:19.538246 | 2019-05-06T15:52:21 | 2019-05-06T15:52:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,688 | py | import json
from django.shortcuts import redirect
from django.utils.decorators import method_decorator
from django.views import View
from django.contrib.auth import authenticate, login, logout
from django.contrib.auth.decorators import login_required
from service.account.account_base_service import AccountBaseService
from service.format_response import api_response
@method_decorator(login_required, name='dispatch')
class LoonUserView(View):
def get(self, request, *args, **kwargs):
"""
获取用户列表
:param request:
:param args:
:param kwargs:
:return:
"""
request_data = request.GET
search_value = request_data.get('search_value', '')
per_page = int(request_data.get('per_page', 10))
page = int(request_data.get('page', 1))
user_result_object_list, msg = AccountBaseService().get_user_list(search_value, page, per_page)
if user_result_object_list is not False:
data = dict(value=user_result_object_list, per_page=msg['per_page'], page=msg['page'], total=msg['total'])
code, msg, = 0, ''
else:
code, data = -1, ''
return api_response(code, msg, data)
@method_decorator(login_required, name='dispatch')
class LoonRoleView(View):
def get(self, request, *args, **kwargs):
"""
用户角色列表
:param request:
:param args:
:param kwargs:
:return:
"""
request_data = request.GET
search_value = request_data.get('search_value', '')
per_page = int(request_data.get('per_page', 10))
page = int(request_data.get('page', 1))
role_result_object_list, msg = AccountBaseService().get_role_list(search_value, page, per_page)
if role_result_object_list is not False:
data = dict(value=role_result_object_list, per_page=msg['per_page'], page=msg['page'], total=msg['total'])
code, msg, = 0, ''
else:
code, data = -1, ''
return api_response(code, msg, data)
@method_decorator(login_required, name='dispatch')
class LoonDeptView(View):
def get(self, request, *args, **kwargs):
"""
用户角色列表
:param request:
:param args:
:param kwargs:
:return:
"""
request_data = request.GET
search_value = request_data.get('search_value', '')
per_page = int(request_data.get('per_page', 10))
page = int(request_data.get('page', 1))
dept_result_object_list, msg = AccountBaseService().get_dept_list(search_value, page, per_page)
if dept_result_object_list is not False:
data = dict(value=dept_result_object_list, per_page=msg['per_page'], page=msg['page'], total=msg['total'])
code, msg, = 0, ''
else:
code, data = -1, ''
return api_response(code, msg, data)
@method_decorator(login_required, name='dispatch')
class LoonAppTokenView(View):
def get(self, request, *args, **kwargs):
"""
调用权限列表
:param request:
:param args:
:param kwargs:
:return:
"""
request_data = request.GET
search_value = request_data.get('search_value', '')
per_page = int(request_data.get('per_page', 10))
page = int(request_data.get('page', 1))
token_result_object_list, msg = AccountBaseService().get_token_list(search_value, page, per_page)
if token_result_object_list is not False:
data = dict(value=token_result_object_list, per_page=msg['per_page'], page=msg['page'], total=msg['total'])
code, msg, = 0, ''
else:
code, data = -1, ''
return api_response(code, msg, data)
def post(self, request, *args, **kwargs):
"""
新增调用权限记录
:param request:
:param args:
:param kwargs:
:return:
"""
json_str = request.body.decode('utf-8')
if not json_str:
return api_response(-1, 'post参数为空', {})
request_data_dict = json.loads(json_str)
app_name = request_data_dict.get('app_name', '')
ticket_sn_prefix = request_data_dict.get('ticket_sn_prefix', '')
workflow_ids = request_data_dict.get('workflow_ids', '')
# username = request.user.username
username = request.META.get('HTTP_USERNAME')
flag, msg = AccountBaseService().add_token_record(app_name, ticket_sn_prefix, workflow_ids, username)
if flag is False:
code, data = -1, {}
else:
code, data = 0, {'id': msg}
return api_response(code, msg, data)
@method_decorator(login_required, name='dispatch')
class LoonAppTokenDetailView(View):
def get(self, request, *args, **kwargs):
"""
获取token详情
:param request:
:param args:
:param kwargs:
:return:
"""
app_token_id = kwargs.get('app_token_id')
pass
def patch(self, request, *args, **kwargs):
"""
编辑token
:param request:
:param args:
:param kwargs:
:return:
"""
app_token_id = kwargs.get('app_token_id')
json_str = request.body.decode('utf-8')
if not json_str:
return api_response(-1, 'patch参数为空', {})
request_data_dict = json.loads(json_str)
app_name = request_data_dict.get('app_name', '')
ticket_sn_prefix = request_data_dict.get('ticket_sn_prefix', '')
workflow_ids = request_data_dict.get('workflow_ids', '')
flag, msg = AccountBaseService.update_token_record(app_token_id, app_name, ticket_sn_prefix, workflow_ids)
if flag is False:
code, data = -1, {}
else:
code, data = 0, {}
return api_response(code, msg, data)
def delete(self, request, *args, **kwargs):
"""
删除记录
:param request:
:param args:
:param kwargs:
:return:
"""
app_token_id = kwargs.get('app_token_id')
flag, msg = AccountBaseService.del_token_record(app_token_id)
if flag is False:
code, data = -1, {}
else:
code, data = 0, {}
return api_response(code, msg, data)
class LoonLoginView(View):
def post(self, request, *args, **kwargs):
"""
登录验证
:param request:
:param args:
:param kwargs:
:return:
"""
json_str = request.body.decode('utf-8')
if not json_str:
return api_response(-1, 'patch参数为空', {})
request_data_dict = json.loads(json_str)
username = request_data_dict.get('username', '')
password = request_data_dict.get('password', '')
user = authenticate(username=username, password=password)
if user is not None:
login(request, user)
return api_response(0, '', {})
else:
return api_response(0, 'username or password is invalid', {})
class LoonLogoutView(View):
def get(self, request, *args, **kwargs):
"""
注销
:param request:
:param args:
:param kwargs:
:return:
"""
logout(request)
return redirect('/manage')
class LoonUserRoleView(View):
def get(self, request, *args, **kwargs):
"""
用户角色信息
"""
user_id = kwargs.get('user_id', 0)
search_value = request.GET.get('search_value', '')
role_info_list, msg = AccountBaseService.get_user_role_info_by_user_id(user_id, search_value)
if role_info_list is not False:
data = dict(value=role_info_list, per_page=msg['per_page'], page=msg['page'], total=msg['total'])
code, msg, = 0, ''
else:
code, data = -1, ''
return api_response(code, msg, data)
class LoonRoleUserView(View):
def get(self, request, *args, **kwargs):
"""
角色的用户信息
:param request:
:param args:
:param kwargs:
:return:
"""
role_id = kwargs.get('role_id', 0)
search_value = request.GET.get('search_value', '')
user_info_list, msg = AccountBaseService.get_role_user_info_by_role_id(role_id, search_value)
if user_info_list is not False:
data = dict(value=user_info_list, per_page=msg['per_page'], page=msg['page'], total=msg['total'])
code, msg, = 0, ''
else:
code, data = -1, ''
return api_response(code, msg, data)
| [
"[email protected]"
] | |
7b2e484e63c5fc45c04b1b476c54ad6f48df39f1 | a34b9c6cb03e13e0b13f55f3fcd23e974187a19b | /ample/utils.py | c31b1cbbbeb76b55ddb36918d3661cf0b51c3a1e | [
"MIT"
] | permissive | hhcho/ample | a9496e18943da1116a804cee27a2759905ce29a1 | cdb28d1ff1d285d851350e0446d0dc5e48a7a561 | refs/heads/master | 2020-04-10T19:08:10.569231 | 2018-12-12T17:42:03 | 2018-12-12T17:42:03 | 161,224,274 | 0 | 0 | MIT | 2018-12-12T17:42:04 | 2018-12-10T19:14:06 | Python | UTF-8 | Python | false | false | 1,633 | py | import errno
from fbpca import pca
import datetime
import numpy as np
import os
from sklearn.random_projection import SparseRandomProjection as JLSparse
import sys
# Default parameters.
DIMRED = 100
def log(string):
string = str(string)
sys.stdout.write(str(datetime.datetime.now()) + ' | [ample] ')
sys.stdout.write(string + '\n')
sys.stdout.flush()
def reduce_dimensionality(X, method='svd', dimred=DIMRED, raw=False):
if method == 'svd':
k = min((dimred, X.shape[0], X.shape[1]))
U, s, Vt = pca(X, k=k, raw=raw)
return U[:, range(k)] * s[range(k)]
elif method == 'jl_sparse':
jls = JLSparse(n_components=dimred)
return jls.fit_transform(X).toarray()
elif method == 'hvg':
X = X.tocsc()
disp = dispersion(X)
highest_disp_idx = np.argsort(disp)[::-1][:dimred]
return X[:, highest_disp_idx].toarray()
else:
sys.stderr.write('ERROR: Unknown method {}.'.format(svd))
exit(1)
def dispersion(X, eps=1e-10):
mean = X.mean(0).A1
X_nonzero = X[:, mean > eps]
nonzero_mean = X_nonzero.mean(0).A1
nonzero_var = (X_nonzero.multiply(X_nonzero)).mean(0).A1
del X_nonzero
nonzero_dispersion = (nonzero_var / nonzero_mean)
dispersion = np.zeros(X.shape[1])
dispersion[mean > eps] = nonzero_dispersion
dispersion[mean <= eps] = float('-inf')
return dispersion
def mkdir_p(path):
try:
os.makedirs(path)
except OSError as exc: # Python >2.5
if exc.errno == errno.EEXIST and os.path.isdir(path):
pass
else:
raise
| [
"[email protected]"
] | |
9206f7deeac420809be8a7ba2e64e36170b26ce7 | 254f1c347c1c9412b8e7d2c41d4b53eae57e8ead | /analysis/hchii_candidates.py | e69ab1aa3cec535cb56308252e05a014f131f73c | [] | no_license | keflavich/MGPS | 16d9b2343e4e78609d77c9138341c04273d62b10 | a9f9dbaead132c42dd74de9915d2996c1fb0cf02 | refs/heads/master | 2021-06-03T06:33:49.335347 | 2020-11-04T01:21:45 | 2020-11-04T01:21:45 | 136,971,627 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | from dendrocat.aperture import Circle, Annulus
from astropy import wcs
from astropy.io import fits
from astropy.stats import mad_std
from astropy.convolution import convolve_fft, Gaussian2DKernel
from astropy import units as u
from astropy import coordinates
from astropy.table import Column, Table
import regions
import pylab as pl
from paths import catalog_figure_path, catalog_path, overview_figure_path
from files import files
from constants import mustang_central_frequency, mustang_beam_fwhm
from astropy.visualization import (MinMaxInterval, AsinhStretch,
PercentileInterval,
ImageNormalize)
reglist = regions.io.read_ds9('cutout_regions.reg')
cutout_regions = {reg.meta['label']: reg for reg in reglist}
for regname,fn in files.items():
for threshold,min_npix in ((4, 100),): # (6, 15), (8, 15), (10, 15)):
for min_delta in (1, ):
print(f"{regname}, {fn}")
catalog = Table.read(f'{catalog_path}/{regname}_dend_contour_thr{threshold}_minn{min_npix}_mind{min_delta}_crossmatch.ipac', format='ascii.ipac')
| [
"[email protected]"
] | |
3bcaece774afadaf9337a2966e2f51dc0850ba20 | cb25407fc1480f771391bb09e36dad123ec9fca2 | /bin/backupz.py | 48644d780bc7d83d269ca55a769e30ee3e83314d | [] | no_license | prataprc/zeta | f68925c9dfbf70331eae59ff5cf173956d249696 | 9c3bc88c56c67d0fff5c0790d768ad6cac79642f | refs/heads/master | 2021-01-20T01:51:20.194893 | 2017-04-25T08:07:41 | 2017-04-25T08:07:41 | 89,334,343 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,182 | py | #! /usr/bin/env python
# This file is subject to the terms and conditions defined in
# file 'LICENSE', which is part of this source code package.
# Copyright (c) 2009 SKR Farms (P) LTD.
# Gotcha :
# Notes :
# * The back-up direcotry structure,
# <bkpdir>
# |---<name>
# |---<name>-bkp-<timestamp> (backup directory)
# | |----<name> (hard-link to deployed dir)
# | |----<name>-sql-<timestamp> (sqldumpfile)
# |
# |---<name>-bkp-<timestamp>.tar.gz
# |---backupz-log-<timestamp>
import sys
import getopt
from optparse import OptionParser
import os
from os.path import basename, abspath, dirname, isdir, isfile, join
import shutil as sh
import time
progname = basename( __file__ )
usage = "usage: %prog [options] name deploydir bkpdir"
pyver = "%s.%s" % sys.version_info[:2]
python = 'python%s' % pyver
timest = time.localtime()
timestr = '%s.%s.%s.%s' % timest[:4]
options = None
def _cmdexecute( cmd, log=True ) :
if log :
print >> options.logfd, " %s" % cmd
rc = os.system( cmd )
if rc != 0 :
print >> options.logfd, "Command failed `%s`" % cmd
sys.exit(1)
def cmdoptions() :
op = OptionParser( usage=usage )
#op.add_option( "--eggs", dest="fetcheggs", default="",
# help="Fetch all the egg files to the <fetcheggs> directory" )
#op.add_option( "-H", dest="noindex", action="store_true", default=False,
# help="Do not look up into python package index" )
options, args = op.parse_args()
return op, options, args
def backupsql( name, destfile ) :
cmd = 'mysqldump %s -u %s --password=%s#321 > %s' % (
name, name, name, destfile )
_cmdexecute( cmd )
if __name__ == '__main__' :
op, options, args = cmdoptions()
if len(args) == 3 :
options.name = args[0]
options.deploydir= abspath(args[1])
options.bkpdir = join( abspath(args[2]), options.name,
'%s-bkp-%s' % (options.name, timestr) )
options.sqldump = join( options.bkpdir,
'%s-sql-%s' % (options.name, timestr) )
options.logfile = join( dirname(options.bkpdir),
'backupz-log-%s' % timestr )
options.targz = join( dirname(options.bkpdir),
'%s-bkp-%s.tar.gz' % (options.name, timestr) )
os.makedirs( options.bkpdir )
options.logfd = open( options.logfile, 'w' )
# Symbolically link deployed directory for backup
os.symlink( options.deploydir, join( options.bkpdir, options.name ) )
# SQL dump
backupsql( options.name,
join( options.bkpdir, '%s-%s.sql' % (options.name, timestr) )
)
# Tar and gzip
cmd = 'tar cfhlz %s %s' % ( options.targz, options.bkpdir )
_cmdexecute( cmd )
# Remove the original tar tree
sh.rmtree( options.bkpdir )
else :
op.print_help()
| [
"[email protected]"
] | |
963002037c164929ea7727c36d3bf9cd05df3dd9 | bfc25f1ad7bfe061b57cfab82aba9d0af1453491 | /data/external/repositories/126714/kaggle-avazu-master/script/rare.py | 43114a58f77b0a7a3318461e834e50e3edd2f92b | [
"MIT"
] | permissive | Keesiu/meta-kaggle | 77d134620ebce530d183467202cf45639d9c6ff2 | 87de739aba2399fd31072ee81b391f9b7a63f540 | refs/heads/master | 2020-03-28T00:23:10.584151 | 2018-12-20T19:09:50 | 2018-12-20T19:09:50 | 147,406,338 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 870 | py | import marshal
def stat(input,isTest):
f = open(input)
line = f.readline()
count = 0
while True:
line = f.readline()
if not line:
break
count += 1
if count % 100000 == 0:
print count
lis = line.split(",")
index = 11
if isTest:
index = 10
id = "i_"+lis[index]
ip = "j_" + lis[index+1]
iid = "v_" + lis[len(lis)-7]
if id in d:
d[id] += 1
else:
d[id] = 1
if ip in d:
d[ip] += 1
else:
d[ip] = 1
if iid in d:
d[iid] += 1
else:
d[iid] = 1
f.close()
d = {}
stat("../train_c",False)
stat("../test_c",True)
rare_d = {}
for k in d:
if d[k] <=10:
rare_d[k] = d[k]
marshal.dump(rare_d,open("../rare_d","w")) | [
"[email protected]"
] | |
ffc88f94db44f760091df1a143e9f2971d8b52db | 3700369b3c560e47dbc27c8b059b6f000a361f83 | /webapp/models.py | 9dc630d78b90b422702a3c3562405d4fb3819577 | [] | no_license | Aitmatow/instagram | 0a44bc05db6308ccb4648d55932613d1377915d1 | 2b8e19e899316720d1f0626a7587f1b895c77a6f | refs/heads/master | 2022-11-23T00:14:05.189119 | 2019-12-16T08:47:42 | 2019-12-16T08:47:42 | 227,972,959 | 1 | 0 | null | 2022-11-22T04:54:20 | 2019-12-14T05:46:03 | Python | UTF-8 | Python | false | false | 929 | py | from django.db import models
QUOTE_NEW = 'new'
QUOTE_APPROVED = 'approved'
QUOTE_STATUS_CHOICES = (
(QUOTE_NEW, 'Новая'),
(QUOTE_APPROVED, 'Подтверждена')
)
# Create your models here.
class Quote(models.Model):
text = models.TextField(max_length=2000, verbose_name='Текст цитаты')
created_at = models.DateTimeField(auto_now=True, verbose_name='Дата добавления')
status = models.CharField(max_length=20, choices=QUOTE_STATUS_CHOICES, default=QUOTE_NEW, verbose_name='Статус')
author_name = models.CharField(max_length=50, verbose_name='Кто добавил')
author_email = models.EmailField(verbose_name='Email')
rating = models.IntegerField(default=0, verbose_name='Рейтинг')
def __str__(self):
return self.text[:20] + '....'
class Meta:
verbose_name = 'Цитата'
verbose_name_plural = 'Цитаты' | [
"[email protected]"
] | |
3de72fca9b59c9ff80421a1ee18a4fe0e90610bd | 7e743d01ab6e3c23d24ac9d31db6f46628826958 | /cmasher/colormaps/waterlily/waterlily.py | ac10a3c72f8f0d71627bbf6f5fabf50cbe3408a5 | [
"BSD-3-Clause"
] | permissive | telegraphic/CMasher | 603fdbeaabd4e2404d199c596dc6436b180f025d | 0c518b8b7cdea2207eacaaf28b460da73f6b2903 | refs/heads/master | 2022-11-18T10:46:19.238033 | 2020-07-21T07:00:00 | 2020-07-21T07:00:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,668 | py | # %% IMPORTS
# Package imports
from matplotlib.cm import register_cmap
from matplotlib.colors import ListedColormap
# All declaration
__all__ = ['cmap']
# Author declaration
__author__ = "Ellert van der Velden (@1313e)"
# Package declaration
__package__ = 'cmasher'
# %% GLOBALS AND DEFINITIONS
# Type of this colormap (according to viscm)
cm_type = "diverging"
# RGB-values of this colormap
cm_data = [[0.12434357, 0.00588452, 0.21400096],
[0.12931129, 0.00675787, 0.22011754],
[0.13427605, 0.00761301, 0.22627813],
[0.13923743, 0.00844618, 0.23248445],
[0.14419489, 0.00925374, 0.23873832],
[0.14914773, 0.01003226, 0.24504157],
[0.15409628, 0.01077599, 0.25139847],
[0.15903865, 0.01148361, 0.25780936],
[0.16397490, 0.01214941, 0.26427895],
[0.16890307, 0.01277188, 0.27080848],
[0.17382209, 0.01334730, 0.27740161],
[0.17873060, 0.01387220, 0.28406219],
[0.18362636, 0.01434494, 0.29079310],
[0.18850717, 0.01476335, 0.29759833],
[0.19336978, 0.01512770, 0.30448066],
[0.19821038, 0.01543943, 0.31144297],
[0.20302543, 0.01569841, 0.31849056],
[0.20780909, 0.01591090, 0.32562507],
[0.21255503, 0.01608416, 0.33284912],
[0.21725536, 0.01622978, 0.34016407],
[0.22190089, 0.01636307, 0.34757095],
[0.22647956, 0.01650931, 0.35506605],
[0.23097774, 0.01669935, 0.36264494],
[0.23537868, 0.01697681, 0.37029727],
[0.23966261, 0.01740014, 0.37800537],
[0.24380764, 0.01804107, 0.38574545],
[0.24778982, 0.01898922, 0.39348282],
[0.25158529, 0.02034719, 0.40117401],
[0.25517266, 0.02222479, 0.40876847],
[0.25853565, 0.02473093, 0.41621127],
[0.26166567, 0.02795975, 0.42345155],
[0.26456273, 0.03198252, 0.43044714],
[0.26723479, 0.03684228, 0.43716929],
[0.26969569, 0.04248896, 0.44360388],
[0.27196284, 0.04848981, 0.44974895],
[0.27405475, 0.05471025, 0.45561291],
[0.27598966, 0.06106965, 0.46121003],
[0.27778467, 0.06750507, 0.46655789],
[0.27945476, 0.07396949, 0.47167607],
[0.28101352, 0.08042854, 0.47658345],
[0.28247268, 0.08685793, 0.48129838],
[0.28384236, 0.09324093, 0.48583798],
[0.28513137, 0.09956639, 0.49021794],
[0.28634701, 0.10582750, 0.49445266],
[0.28749631, 0.11201977, 0.49855477],
[0.28858462, 0.11814153, 0.50253603],
[0.28961712, 0.12419213, 0.50640676],
[0.29059833, 0.13017201, 0.51017629],
[0.29153215, 0.13608239, 0.51385302],
[0.29242208, 0.14192500, 0.51744454],
[0.29327113, 0.14770193, 0.52095768],
[0.29408197, 0.15341549, 0.52439862],
[0.29485717, 0.15906795, 0.52777294],
[0.29559924, 0.16466147, 0.53108570],
[0.29630976, 0.17019880, 0.53434154],
[0.29699110, 0.17568189, 0.53754465],
[0.29764460, 0.18111329, 0.54069888],
[0.29827178, 0.18649525, 0.54380773],
[0.29887444, 0.19182963, 0.54687447],
[0.29945363, 0.19711869, 0.54990205],
[0.30001054, 0.20236443, 0.55289318],
[0.30054631, 0.20756873, 0.55585038],
[0.30106201, 0.21273339, 0.55877600],
[0.30155859, 0.21786013, 0.56167217],
[0.30203696, 0.22295063, 0.56454090],
[0.30249793, 0.22800646, 0.56738403],
[0.30294227, 0.23302915, 0.57020328],
[0.30337068, 0.23802016, 0.57300024],
[0.30378382, 0.24298087, 0.57577639],
[0.30418229, 0.24791260, 0.57853310],
[0.30456679, 0.25281657, 0.58127171],
[0.30493787, 0.25769398, 0.58399340],
[0.30529589, 0.26254606, 0.58669927],
[0.30564131, 0.26737393, 0.58939035],
[0.30597473, 0.27217857, 0.59206767],
[0.30629654, 0.27696100, 0.59473213],
[0.30660696, 0.28172230, 0.59738453],
[0.30690651, 0.28646330, 0.60002572],
[0.30719552, 0.29118493, 0.60265644],
[0.30747413, 0.29588812, 0.60527731],
[0.30774293, 0.30057358, 0.60788908],
[0.30800193, 0.30524222, 0.61049227],
[0.30825149, 0.30989477, 0.61308745],
[0.30849187, 0.31453196, 0.61567516],
[0.30872319, 0.31915455, 0.61825584],
[0.30894582, 0.32376318, 0.62082997],
[0.30915975, 0.32835858, 0.62339790],
[0.30936539, 0.33294131, 0.62596007],
[0.30956268, 0.33751209, 0.62851674],
[0.30975200, 0.34207144, 0.63106829],
[0.30993331, 0.34662001, 0.63361494],
[0.31010692, 0.35115830, 0.63615698],
[0.31027282, 0.35568691, 0.63869459],
[0.31043127, 0.36020633, 0.64122798],
[0.31058231, 0.36471711, 0.64375731],
[0.31072612, 0.36921970, 0.64628271],
[0.31086280, 0.37371461, 0.64880430],
[0.31099249, 0.37820230, 0.65132216],
[0.31111533, 0.38268321, 0.65383636],
[0.31123142, 0.38715778, 0.65634691],
[0.31134094, 0.39162643, 0.65885384],
[0.31144402, 0.39608958, 0.66135714],
[0.31154081, 0.40054761, 0.66385675],
[0.31163148, 0.40500090, 0.66635262],
[0.31171621, 0.40944984, 0.66884467],
[0.31179520, 0.41389476, 0.67133278],
[0.31186865, 0.41833603, 0.67381681],
[0.31193680, 0.42277397, 0.67629662],
[0.31199992, 0.42720890, 0.67877201],
[0.31205825, 0.43164113, 0.68124278],
[0.31211216, 0.43607095, 0.68370870],
[0.31216189, 0.44049867, 0.68616949],
[0.31220796, 0.44492453, 0.68862492],
[0.31225061, 0.44934882, 0.69107462],
[0.31229048, 0.45377175, 0.69351832],
[0.31232786, 0.45819361, 0.69595559],
[0.31236353, 0.46261456, 0.69838612],
[0.31239787, 0.46703487, 0.70080943],
[0.31243174, 0.47145469, 0.70322513],
[0.31246573, 0.47587424, 0.70563272],
[0.31250071, 0.48029366, 0.70803171],
[0.31253759, 0.48471311, 0.71042159],
[0.31257717, 0.48913278, 0.71280175],
[0.31262078, 0.49355270, 0.71517170],
[0.31266920, 0.49797308, 0.71753069],
[0.31272396, 0.50239396, 0.71987816],
[0.31278644, 0.50681539, 0.72221344],
[0.31285779, 0.51123752, 0.72453568],
[0.31293998, 0.51566030, 0.72684425],
[0.31303476, 0.52008375, 0.72913836],
[0.31314366, 0.52450796, 0.73141704],
[0.31326906, 0.52893282, 0.73367953],
[0.31341322, 0.53335830, 0.73592494],
[0.31357856, 0.53778430, 0.73815229],
[0.31376746, 0.54221081, 0.74036052],
[0.31398291, 0.54663764, 0.74254862],
[0.31422803, 0.55106464, 0.74471556],
[0.31450612, 0.55549161, 0.74686019],
[0.31482075, 0.55991833, 0.74898134],
[0.31517574, 0.56434455, 0.75107779],
[0.31557523, 0.56876997, 0.75314825],
[0.31602366, 0.57319425, 0.75519142],
[0.31652582, 0.57761700, 0.75720593],
[0.31708684, 0.58203779, 0.75919038],
[0.31771222, 0.58645611, 0.76114332],
[0.31840787, 0.59087143, 0.76306326],
[0.31918007, 0.59528313, 0.76494869],
[0.32003532, 0.59969060, 0.76679791],
[0.32098080, 0.60409308, 0.76860934],
[0.32202424, 0.60848970, 0.77038148],
[0.32317357, 0.61287960, 0.77211265],
[0.32443695, 0.61726187, 0.77380097],
[0.32582371, 0.62163533, 0.77544516],
[0.32734269, 0.62599895, 0.77704320],
[0.32900405, 0.63035134, 0.77859393],
[0.33081742, 0.63469128, 0.78009540],
[0.33279358, 0.63901718, 0.78154661],
[0.33494302, 0.64332754, 0.78294608],
[0.33727653, 0.64762069, 0.78429266],
[0.33980514, 0.65189480, 0.78558566],
[0.34253965, 0.65614801, 0.78682444],
[0.34549065, 0.66037837, 0.78800872],
[0.34866835, 0.66458383, 0.78913863],
[0.35208231, 0.66876231, 0.79021477],
[0.35574127, 0.67291169, 0.79123827],
[0.35965281, 0.67702987, 0.79221088],
[0.36382312, 0.68111477, 0.79313499],
[0.36825671, 0.68516444, 0.79401368],
[0.37295618, 0.68917704, 0.79485074],
[0.37792196, 0.69315094, 0.79565068],
[0.38315216, 0.69708476, 0.79641869],
[0.38864250, 0.70097742, 0.79716057],
[0.39438623, 0.70482817, 0.79788271],
[0.40037420, 0.70863664, 0.79859195],
[0.40659514, 0.71240286, 0.79929533],
[0.41303585, 0.71612724, 0.80000005],
[0.41968153, 0.71981060, 0.80071327],
[0.42651620, 0.72345411, 0.80144199],
[0.43352311, 0.72705927, 0.80219282],
[0.44068514, 0.73062784, 0.80297193],
[0.44798499, 0.73416184, 0.80378517],
[0.45540582, 0.73766347, 0.80463763],
[0.46293189, 0.74113497, 0.80553339],
[0.47054756, 0.74457873, 0.80647656],
[0.47823856, 0.74799715, 0.80747031],
[0.48599174, 0.75139259, 0.80851720],
[0.49379555, 0.75476731, 0.80961897],
[0.50163802, 0.75812375, 0.81077799],
[0.50951018, 0.76146396, 0.81199484],
[0.51740343, 0.76479002, 0.81327029],
[0.52531010, 0.76810393, 0.81460481],
[0.53322356, 0.77140754, 0.81599852],
[0.54113819, 0.77470259, 0.81745128],
[0.54904863, 0.77799078, 0.81896303],
[0.55695031, 0.78127373, 0.82053353],
[0.56484078, 0.78455267, 0.82216148],
[0.57271595, 0.78782912, 0.82384685],
[0.58057376, 0.79110425, 0.82558859],
[0.58841174, 0.79437931, 0.82738612],
[0.59622922, 0.79765517, 0.82923797],
[0.60402262, 0.80093330, 0.83114456],
[0.61179265, 0.80421427, 0.83310394],
[0.61953824, 0.80749903, 0.83511535],
[0.62725857, 0.81078844, 0.83717801],
[0.63495307, 0.81408331, 0.83929113],
[0.64262139, 0.81738440, 0.84145388],
[0.65026337, 0.82069239, 0.84366540],
[0.65787900, 0.82400793, 0.84592486],
[0.66546844, 0.82733162, 0.84823141],
[0.67303197, 0.83066400, 0.85058419],
[0.68056996, 0.83400559, 0.85298238],
[0.68808289, 0.83735685, 0.85542516],
[0.69557018, 0.84071852, 0.85791228],
[0.70303254, 0.84409099, 0.86044288],
[0.71047142, 0.84747442, 0.86301580],
[0.71788585, 0.85086963, 0.86563112],
[0.72527708, 0.85427682, 0.86828781],
[0.73264583, 0.85769631, 0.87098521],
[0.73999166, 0.86112874, 0.87372319],
[0.74731621, 0.86457417, 0.87650067],
[0.75461912, 0.86803319, 0.87931756],
[0.76190122, 0.87150605, 0.88217322],
[0.76916383, 0.87499286, 0.88506677],
[0.77640537, 0.87849452, 0.88799879],
[0.78362842, 0.88201078, 0.89096784],
[0.79083329, 0.88554198, 0.89397361],
[0.79801871, 0.88908894, 0.89701653],
[0.80518675, 0.89265150, 0.90009549],
[0.81233783, 0.89622995, 0.90321016],
[0.81947230, 0.89982462, 0.90636025],
[0.82659011, 0.90343594, 0.90954568],
[0.83369156, 0.90706423, 0.91276619],
[0.84077761, 0.91070961, 0.91602124],
[0.84784862, 0.91437236, 0.91931059],
[0.85490492, 0.91805279, 0.92263401],
[0.86194682, 0.92175119, 0.92599126],
[0.86897464, 0.92546784, 0.92938214],
[0.87598869, 0.92920304, 0.93280643],
[0.88298922, 0.93295708, 0.93626395],
[0.88997650, 0.93673027, 0.93975451],
[0.89695077, 0.94052289, 0.94327794],
[0.90391222, 0.94433526, 0.94683409],
[0.91086103, 0.94816769, 0.95042279],
[0.91779733, 0.95202051, 0.95404393],
[0.92472121, 0.95589406, 0.95769737],
[0.93163272, 0.95978870, 0.96138299],
[0.93853161, 0.96370486, 0.96510081],
[0.94541738, 0.96764310, 0.96885089],
[0.95229045, 0.97160364, 0.97263292],
[0.95915055, 0.97558695, 0.97644685],
[0.96599731, 0.97959353, 0.98029261],
[0.97283020, 0.98362395, 0.98417016],
[0.97964739, 0.98767922, 0.98807995],
[0.98644895, 0.99175974, 0.99202153],
[0.99323371, 0.99586633, 0.99599488],
[1.00000000, 1.00000000, 1.00000000],
[0.99382469, 0.99580955, 0.99432110],
[0.98760164, 0.99166330, 0.98862986],
[0.98133740, 0.98755931, 0.98291625],
[0.97503997, 0.98349458, 0.97717497],
[0.96871626, 0.97946629, 0.97140333],
[0.96237164, 0.97547209, 0.96560034],
[0.95601185, 0.97150923, 0.95976730],
[0.94963976, 0.96757616, 0.95390497],
[0.94325909, 0.96367088, 0.94801568],
[0.93687235, 0.95979187, 0.94210153],
[0.93048036, 0.95593835, 0.93616375],
[0.92408561, 0.95210878, 0.93020504],
[0.91768886, 0.94830240, 0.92422693],
[0.91129032, 0.94451872, 0.91823051],
[0.90489147, 0.94075664, 0.91221790],
[0.89849256, 0.93701566, 0.90619023],
[0.89209319, 0.93329557, 0.90014800],
[0.88569452, 0.92959547, 0.89409297],
[0.87929669, 0.92591493, 0.88802599],
[0.87289905, 0.92225388, 0.88194719],
[0.86650216, 0.91861170, 0.87585767],
[0.86010629, 0.91498793, 0.86975822],
[0.85371140, 0.91138223, 0.86364935],
[0.84731670, 0.90779464, 0.85753082],
[0.84092280, 0.90422454, 0.85140359],
[0.83452970, 0.90067160, 0.84526808],
[0.82813733, 0.89713553, 0.83912458],
[0.82174544, 0.89361612, 0.83297320],
[0.81535347, 0.89011330, 0.82681372],
[0.80896190, 0.88662654, 0.82064685],
[0.80257057, 0.88315559, 0.81447273],
[0.79617936, 0.87970020, 0.80829144],
[0.78978811, 0.87626012, 0.80210307],
[0.78339667, 0.87283512, 0.79590767],
[0.77700472, 0.86942502, 0.78970513],
[0.77061205, 0.86602960, 0.78349540],
[0.76421875, 0.86264852, 0.77727873],
[0.75782467, 0.85928154, 0.77105510],
[0.75142964, 0.85592843, 0.76482449],
[0.74503349, 0.85258894, 0.75858685],
[0.73863604, 0.84926286, 0.75234213],
[0.73223714, 0.84594994, 0.74609027],
[0.72583661, 0.84264994, 0.73983119],
[0.71943428, 0.83936264, 0.73356480],
[0.71302998, 0.83608780, 0.72729102],
[0.70662354, 0.83282518, 0.72100975],
[0.70021479, 0.82957454, 0.71472089],
[0.69380357, 0.82633564, 0.70842432],
[0.68738971, 0.82310825, 0.70211992],
[0.68097304, 0.81989211, 0.69580759],
[0.67455294, 0.81668717, 0.68948672],
[0.66812958, 0.81349304, 0.68315752],
[0.66170286, 0.81030946, 0.67681990],
[0.65527262, 0.80713617, 0.67047373],
[0.64883872, 0.80397291, 0.66411886],
[0.64240101, 0.80081943, 0.65775515],
[0.63595841, 0.79767582, 0.65138150],
[0.62951158, 0.79454151, 0.64499858],
[0.62306045, 0.79141621, 0.63860627],
[0.61660476, 0.78829970, 0.63220428],
[0.61014331, 0.78519209, 0.62579136],
[0.60367711, 0.78209270, 0.61936849],
[0.59720594, 0.77900127, 0.61293539],
[0.59072831, 0.77591802, 0.60649044],
[0.58424556, 0.77284215, 0.60003495],
[0.57775664, 0.76977369, 0.59356774],
[0.57126149, 0.76671233, 0.58708865],
[0.56476033, 0.76365767, 0.58059777],
[0.55825218, 0.76060971, 0.57409393],
[0.55173780, 0.75756786, 0.56757778],
[0.54521614, 0.75453214, 0.56104804],
[0.53868762, 0.75150206, 0.55450494],
[0.53215208, 0.74847735, 0.54794811],
[0.52560864, 0.74545795, 0.54137634],
[0.51905823, 0.74244322, 0.53479036],
[0.51250000, 0.73943309, 0.52818895],
[0.50593365, 0.73642732, 0.52157144],
[0.49935973, 0.73342539, 0.51493803],
[0.49277805, 0.73042701, 0.50828809],
[0.48618824, 0.72743197, 0.50162076],
[0.47958991, 0.72444003, 0.49493507],
[0.47298359, 0.72145070, 0.48823100],
[0.46636921, 0.71846366, 0.48150783],
[0.45974679, 0.71547856, 0.47476485],
[0.45311640, 0.71249506, 0.46800133],
[0.44647816, 0.70951278, 0.46121648],
[0.43983228, 0.70653133, 0.45440955],
[0.43317907, 0.70355031, 0.44757974],
[0.42651828, 0.70056948, 0.44072551],
[0.41985059, 0.69758833, 0.43384622],
[0.41317687, 0.69460632, 0.42694128],
[0.40649778, 0.69162299, 0.42000968],
[0.39981272, 0.68863823, 0.41304872],
[0.39312423, 0.68565110, 0.40605913],
[0.38643180, 0.68266150, 0.39903778],
[0.37973812, 0.67966850, 0.39198500],
[0.37304275, 0.67667203, 0.38489700],
[0.36634870, 0.67367115, 0.37777377],
[0.35965765, 0.67066532, 0.37061319],
[0.35297165, 0.66765394, 0.36341301],
[0.34629317, 0.66463637, 0.35617090],
[0.33962524, 0.66161188, 0.34888442],
[0.33297150, 0.65857969, 0.34155107],
[0.32633633, 0.65553892, 0.33416823],
[0.31972491, 0.65248860, 0.32673327],
[0.31314248, 0.64942789, 0.31924201],
[0.30659623, 0.64635564, 0.31169136],
[0.30009369, 0.64327077, 0.30407659],
[0.29364480, 0.64017187, 0.29639406],
[0.28726040, 0.63705756, 0.28863847],
[0.28095360, 0.63392625, 0.28080436],
[0.27474016, 0.63077607, 0.27288617],
[0.26863813, 0.62760506, 0.26487647],
[0.26267002, 0.62441075, 0.25676914],
[0.25686147, 0.62119053, 0.24855490],
[0.25124393, 0.61794122, 0.24022510],
[0.24585511, 0.61465902, 0.23177057],
[0.24074014, 0.61133945, 0.22318124],
[0.23595315, 0.60797718, 0.21444629],
[0.23155932, 0.60456563, 0.20555579],
[0.22763671, 0.60109683, 0.19649969],
[0.22427864, 0.59756085, 0.18727215],
[0.22159446, 0.59394541, 0.17787608],
[0.21970884, 0.59023557, 0.16832692],
[0.21875529, 0.58641365, 0.15866581],
[0.21885793, 0.58246043, 0.14897700],
[0.22009608, 0.57835850, 0.13940533],
[0.22245044, 0.57409882, 0.13016467],
[0.22576171, 0.56968811, 0.12150790],
[0.22974550, 0.56515118, 0.11365853],
[0.23407808, 0.56052386, 0.10674215],
[0.23848634, 0.55584188, 0.10077283],
[0.24278697, 0.55113355, 0.09568718],
[0.24687702, 0.54641845, 0.09138767],
[0.25070795, 0.54170912, 0.08777129],
[0.25426366, 0.53701323, 0.08474270],
[0.25754688, 0.53233510, 0.08221878],
[0.26056796, 0.52767728, 0.08012845],
[0.26334064, 0.52304119, 0.07841111],
[0.26588045, 0.51842744, 0.07701528],
[0.26820294, 0.51383615, 0.07589709],
[0.27032300, 0.50926715, 0.07501892],
[0.27225402, 0.50472022, 0.07434809],
[0.27400849, 0.50019497, 0.07385633],
[0.27559818, 0.49569090, 0.07351934],
[0.27703238, 0.49120778, 0.07331473],
[0.27832162, 0.48674492, 0.07322459],
[0.27947412, 0.48230192, 0.07323212],
[0.28049758, 0.47787835, 0.07332273],
[0.28139919, 0.47347376, 0.07348371],
[0.28218562, 0.46908766, 0.07370402],
[0.28286261, 0.46471970, 0.07397352],
[0.28343564, 0.46036946, 0.07428352],
[0.28390985, 0.45603654, 0.07462643],
[0.28429006, 0.45172052, 0.07499567],
[0.28458007, 0.44742115, 0.07538470],
[0.28478415, 0.44313801, 0.07578856],
[0.28490602, 0.43887075, 0.07620262],
[0.28494907, 0.43461906, 0.07662274],
[0.28491652, 0.43038262, 0.07704527],
[0.28481132, 0.42616112, 0.07746700],
[0.28463599, 0.42195434, 0.07788471],
[0.28439348, 0.41776190, 0.07829633],
[0.28408628, 0.41358350, 0.07869974],
[0.28371630, 0.40941900, 0.07909240],
[0.28328605, 0.40526800, 0.07947311],
[0.28279736, 0.40113032, 0.07984003],
[0.28225231, 0.39700566, 0.08019202],
[0.28165244, 0.39289384, 0.08052756],
[0.28099980, 0.38879452, 0.08084604],
[0.28029571, 0.38470756, 0.08114612],
[0.27954177, 0.38063269, 0.08142708],
[0.27873954, 0.37656965, 0.08168838],
[0.27789035, 0.37251823, 0.08192932],
[0.27699548, 0.36847822, 0.08214928],
[0.27605604, 0.36444944, 0.08234761],
[0.27507335, 0.36043163, 0.08252408],
[0.27404853, 0.35642457, 0.08267831],
[0.27298263, 0.35242806, 0.08280996],
[0.27187665, 0.34844188, 0.08291875],
[0.27073158, 0.34446585, 0.08300442],
[0.26954834, 0.34049973, 0.08306678],
[0.26832781, 0.33654334, 0.08310564],
[0.26707080, 0.33259648, 0.08312079],
[0.26577808, 0.32865896, 0.08311205],
[0.26445049, 0.32473055, 0.08307943],
[0.26308876, 0.32081105, 0.08302283],
[0.26169362, 0.31690026, 0.08294219],
[0.26026570, 0.31299798, 0.08283742],
[0.25880553, 0.30910406, 0.08270826],
[0.25731388, 0.30521825, 0.08255493],
[0.25579133, 0.30134033, 0.08237742],
[0.25423833, 0.29747015, 0.08217546],
[0.25265547, 0.29360749, 0.08194916],
[0.25104334, 0.28975212, 0.08169866],
[0.24940226, 0.28590390, 0.08142361],
[0.24773284, 0.28206257, 0.08112428],
[0.24603549, 0.27822793, 0.08080061],
[0.24431057, 0.27439981, 0.08045246],
[0.24255862, 0.27057794, 0.08008011],
[0.24077983, 0.26676218, 0.07968318],
[0.23897479, 0.26295224, 0.07926209],
[0.23714362, 0.25914798, 0.07881643],
[0.23528689, 0.25534911, 0.07834662],
[0.23340468, 0.25155548, 0.07785225],
[0.23149749, 0.24776679, 0.07733371],
[0.22956545, 0.24398289, 0.07679069],
[0.22760892, 0.24020347, 0.07622341],
[0.22562811, 0.23642835, 0.07563177],
[0.22362324, 0.23265727, 0.07501578],
[0.22159459, 0.22888997, 0.07437557],
[0.21954224, 0.22512625, 0.07371087],
[0.21746649, 0.22136579, 0.07302195],
[0.21536746, 0.21760838, 0.07230872],
[0.21324527, 0.21385374, 0.07157112],
[0.21110013, 0.21010159, 0.07080928],
[0.20893210, 0.20635167, 0.07002313],
[0.20674127, 0.20260369, 0.06921265],
[0.20452777, 0.19885735, 0.06837794],
[0.20229165, 0.19511235, 0.06751901],
[0.20003290, 0.19136842, 0.06663574],
[0.19775160, 0.18762521, 0.06572831],
[0.19544775, 0.18388240, 0.06479672],
[0.19312130, 0.18013969, 0.06384095],
[0.19077219, 0.17639672, 0.06286104],
[0.18840038, 0.17265315, 0.06185709],
[0.18600578, 0.16890862, 0.06082917],
[0.18358823, 0.16516277, 0.05977729],
[0.18114755, 0.16141524, 0.05870151],
[0.17868358, 0.15766562, 0.05760198],
[0.17619607, 0.15391353, 0.05647879],
[0.17368476, 0.15015856, 0.05533204],
[0.17114930, 0.14640032, 0.05416183],
[0.16858932, 0.14263837, 0.05296828],
[0.16600443, 0.13887229, 0.05175158],
[0.16339415, 0.13510163, 0.05051188],
[0.16075793, 0.13132595, 0.04924932],
[0.15809517, 0.12754478, 0.04796409],
[0.15540519, 0.12375769, 0.04665629],
[0.15268725, 0.11996418, 0.04532612],
[0.14994054, 0.11616378, 0.04397374],
[0.14716414, 0.11235600, 0.04259926],
[0.14435705, 0.10854034, 0.04120279],
[0.14151819, 0.10471630, 0.03977633],
[0.13864637, 0.10088335, 0.03834104],
[0.13574030, 0.09704098, 0.03691515],
[0.13279861, 0.09318862, 0.03549973],
[0.12981983, 0.08932571, 0.03409578],
[0.12680239, 0.08545164, 0.03270415],
[0.12374463, 0.08156576, 0.03132562],
[0.12064478, 0.07766739, 0.02996087],
[0.11750102, 0.07375577, 0.02861046],
[0.11431142, 0.06983006, 0.02727485],
[0.11107399, 0.06588932, 0.02595442],
[0.10778667, 0.06193249, 0.02464948]]
# Create ListedColormap object for this colormap
cmap = ListedColormap(cm_data, name="cmr.waterlily", N=len(cm_data))
cmap_r = cmap.reversed()
# Register (reversed) cmap in MPL
register_cmap(cmap=cmap)
register_cmap(cmap=cmap_r)
| [
"[email protected]"
] | |
cd94f45eb0dc8a9695b7edda556ed08c23785c4f | 663d89c6d26b66673d2df136366dab6f36f17ee9 | /audiovisual/indico_audiovisual/blueprint.py | f281f0bd19160c861bf5b607ed314e0150b5c730 | [
"MIT"
] | permissive | rama270677/indico-plugins-cern | 1a0a421bd45ce3f8bcea60d04ab4edca92fc5421 | 4ab66be5d633f31922be1ee8fd9d3a0905610924 | refs/heads/master | 2022-12-01T04:44:28.861197 | 2020-08-21T14:35:13 | 2020-08-21T14:35:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 561 | py | # This file is part of the CERN Indico plugins.
# Copyright (C) 2014 - 2020 CERN
#
# The CERN Indico plugins are free software; you can redistribute
# them and/or modify them under the terms of the MIT License; see
# the LICENSE file for more details.
from __future__ import unicode_literals
from indico.core.plugins import IndicoPluginBlueprint
from indico_audiovisual.controllers import RHRequestList
blueprint = IndicoPluginBlueprint('audiovisual', __name__, url_prefix='/service/audiovisual')
blueprint.add_url_rule('/', 'request_list', RHRequestList)
| [
"[email protected]"
] | |
34f23e5d5803c1e8ef372ec6d8a00f6416b33083 | d5ba475a6a782b0eed5d134b66eb8c601c41421c | /terrascript/data/docker.py | e4f799274d97d85094ebb96f999803db056d4c25 | [
"BSD-2-Clause",
"Python-2.0"
] | permissive | amlodzianowski/python-terrascript | ab42a06a5167e53ad8093b656a9bf14a03cb031d | 142b1a4d1164d1012ac8865d12fdcc72f1e7ae75 | refs/heads/master | 2021-05-19T11:59:47.584554 | 2020-03-26T07:13:47 | 2020-03-26T07:13:47 | 251,688,045 | 0 | 0 | BSD-2-Clause | 2020-03-31T18:00:22 | 2020-03-31T18:00:22 | null | UTF-8 | Python | false | false | 225 | py | # terrascript/data/docker.py
import terrascript
class docker_registry_image(terrascript.Data):
pass
class docker_network(terrascript.Data):
pass
__all__ = [
"docker_registry_image",
"docker_network",
]
| [
"[email protected]"
] | |
62038fb02730d5e2937c4c3c32633477e176b0f2 | 66f1bfc0c12a7491dd4cb7b5767ba1f0e39d9f37 | /image_classification/utils/file_processing.py | 49b111c2eff61a391d905df091019195df2619b2 | [] | no_license | CaravanPassenger/pytorch-learning-notes | a3352c8715f17841f4cbf297f20c58f7f040ec03 | d3e44fad42809f23762c9028d8b1d478acf42ab2 | refs/heads/master | 2023-08-29T09:05:36.018059 | 2021-09-30T10:23:40 | 2021-09-30T10:23:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,337 | py | # -*-coding: utf-8 -*-
"""
@Project: IntelligentManufacture
@File : file_processing.py
@Author : panjq
@E-mail : [email protected]
@Date : 2019-02-14 15:08:19
"""
import glob
import os
<<<<<<< HEAD
import os, shutil
import numpy as np
import json
import pandas as pd
def read_json_data(json_path):
# 读取数据
with open(json_path, 'r') as f:
json_data = json.load(f)
return json_data
def write_json_path(out_json_path, json_data):
# 写入 JSON 数据
with open(out_json_path, 'w') as f:
json.dump(json_data, f)
def write_data(filename, content_list, mode='w'):
=======
import os,shutil
import numpy as np
import pandas as pd
def write_data(filename, content_list,mode='w'):
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
"""保存list[list[]]的数据到txt文件
:param filename:文件名
:param content_list:需要保存的数据,type->list
:param mode:读写模式:'w' or 'a'
:return: void
"""
with open(filename, mode=mode, encoding='utf-8') as f:
for line_list in content_list:
# 将list转为string
<<<<<<< HEAD
line = " ".join('%s' % id for id in line_list)
f.write(line + "\n")
def write_list_data(filename, list_data, mode='w'):
=======
line=" ".join('%s' % id for id in line_list)
f.write(line+"\n")
def write_list_data(filename, list_data,mode='w'):
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
"""保存list[]的数据到txt文件,每个元素分行
:param filename:文件名
:param list_data:需要保存的数据,type->list
:param mode:读写模式:'w' or 'a'
:return: void
"""
with open(filename, mode=mode, encoding='utf-8') as f:
for line in list_data:
# 将list转为string
<<<<<<< HEAD
f.write(str(line) + "\n")
def read_data(filename, split=" ", convertNum=True):
=======
f.write(str(line)+"\n")
def read_data(filename,split=" ",convertNum=True):
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
"""
读取txt数据函数
:param filename:文件名
:param split :分割符
:param convertNum :是否将list中的string转为int/float类型的数字
:return: txt的数据列表
Python中有三个去除头尾字符、空白符的函数,它们依次为:
strip: 用来去除头尾字符、空白符(包括\n、\r、\t、' ',即:换行、回车、制表符、空格)
lstrip:用来去除开头字符、空白符(包括\n、\r、\t、' ',即:换行、回车、制表符、空格)
rstrip:用来去除结尾字符、空白符(包括\n、\r、\t、' ',即:换行、回车、制表符、空格)
注意:这些函数都只会删除头和尾的字符,中间的不会删除。
"""
<<<<<<< HEAD
with open(filename, mode="r", encoding='utf-8') as f:
=======
with open(filename, mode="r",encoding='utf-8') as f:
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
content_list = f.readlines()
if split is None:
content_list = [content.rstrip() for content in content_list]
return content_list
else:
content_list = [content.rstrip().split(split) for content in content_list]
if convertNum:
<<<<<<< HEAD
for i, line in enumerate(content_list):
line_data = []
=======
for i,line in enumerate(content_list):
line_data=[]
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
for l in line:
if is_int(l): # isdigit() 方法检测字符串是否只由数字组成,只能判断整数
line_data.append(int(l))
elif is_float(l): # 判断是否为小数
line_data.append(float(l))
else:
line_data.append(l)
<<<<<<< HEAD
content_list[i] = line_data
=======
content_list[i]=line_data
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
return content_list
def is_int(str):
# 判断是否为整数
try:
x = int(str)
return isinstance(x, int)
except ValueError:
return False
def is_float(str):
# 判断是否为整数和小数
try:
x = float(str)
return isinstance(x, float)
except ValueError:
return False
def list2str(content_list):
<<<<<<< HEAD
content_str_list = []
=======
content_str_list=[]
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
for line_list in content_list:
line_str = " ".join('%s' % id for id in line_list)
content_str_list.append(line_str)
return content_str_list
<<<<<<< HEAD
def get_images_list(image_dir, postfix=['*.jpg'], basename=False):
=======
def get_images_list(image_dir,postfix=['*.jpg'],basename=False):
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
'''
获得文件列表
:param image_dir: 图片文件目录
:param postfix: 后缀名,可是多个如,['*.jpg','*.png']
:param basename: 返回的列表是文件名(True),还是文件的完整路径(False)
:return:
'''
<<<<<<< HEAD
images_list = []
for format in postfix:
image_format = os.path.join(image_dir, format)
image_list = glob.glob(image_format)
if not image_list == []:
images_list += image_list
images_list = sorted(images_list)
if basename:
images_list = get_basename(images_list)
return images_list
def get_basename(file_list):
dest_list = []
for file_path in file_list:
basename = os.path.basename(file_path)
dest_list.append(basename)
return dest_list
def copyfile(srcfile, dstfile):
if not os.path.isfile(srcfile):
print("%s not exist!" % (srcfile))
else:
fpath, fname = os.path.split(dstfile) # 分离文件名和路径
if not os.path.exists(fpath):
os.makedirs(fpath) # 创建路径
shutil.copyfile(srcfile, dstfile) # 复制文件
# print("copy %s -> %s"%( srcfile,dstfile))
def create_dir(parent_dir, dir1=None, filename=None):
out_path = parent_dir
if dir1:
out_path = os.path.join(parent_dir, dir1)
if not os.path.exists(out_path):
os.makedirs(out_path)
if filename:
out_path = os.path.join(out_path, filename)
return out_path
def create_file_path(filename):
basename = os.path.basename(filename)
dirname = os.path.dirname(filename)
create_dir(dirname, dir1=None, filename=basename)
=======
images_list=[]
for format in postfix:
image_format=os.path.join(image_dir,format)
image_list=glob.glob(image_format)
if not image_list==[]:
images_list+=image_list
images_list=sorted(images_list)
if basename:
images_list=get_basename(images_list)
return images_list
def get_basename(file_list):
dest_list=[]
for file_path in file_list:
basename=os.path.basename(file_path)
dest_list.append(basename)
return dest_list
def copyfile(srcfile,dstfile):
if not os.path.isfile(srcfile):
print("%s not exist!"%(srcfile))
else:
fpath,fname=os.path.split(dstfile) #分离文件名和路径
if not os.path.exists(fpath):
os.makedirs(fpath) #创建路径
shutil.copyfile(srcfile,dstfile) #复制文件
# print("copy %s -> %s"%( srcfile,dstfile))
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
def merge_list(data1, data2):
'''
将两个list进行合并
:param data1:
:param data2:
:return:返回合并后的list
'''
if not len(data1) == len(data2):
return
all_data = []
for d1, d2 in zip(data1, data2):
all_data.append(d1 + d2)
return all_data
def split_list(data, split_index=1):
'''
将data切分成两部分
:param data: list
:param split_index: 切分的位置
:return:
'''
data1 = []
data2 = []
for d in data:
d1 = d[0:split_index]
d2 = d[split_index:]
data1.append(d1)
data2.append(d2)
return data1, data2
def getFilePathList(file_dir):
'''
获取file_dir目录下,所有文本路径,包括子目录文件
:param rootDir:
:return:
'''
filePath_list = []
for walk in os.walk(file_dir):
part_filePath_list = [os.path.join(walk[0], file) for file in walk[2]]
filePath_list.extend(part_filePath_list)
return filePath_list
<<<<<<< HEAD
def get_files_list(file_dir, postfix=None):
'''
获得file_dir目录下,后缀名为postfix所有文件列表,包括子目录
:param file_dir:
:param postfix: ['*.jpg','*.png'],postfix=None表示全部文件
:return:
'''
file_list = []
filePath_list = getFilePathList(file_dir)
if postfix is None:
file_list = filePath_list
else:
postfix = [p.split('.')[-1] for p in postfix]
for file in filePath_list:
basename = os.path.basename(file) # 获得路径下的文件名
postfix_name = basename.split('.')[-1]
if postfix_name in postfix:
=======
def get_files_list(file_dir, postfix='ALL'):
'''
获得file_dir目录下,后缀名为postfix所有文件列表,包括子目录
:param file_dir:
:param postfix: jpg.png
:return:
'''
postfix = postfix.split('.')[-1]
file_list = []
filePath_list = getFilePathList(file_dir)
if postfix == 'ALL':
file_list = filePath_list
else:
for file in filePath_list:
basename = os.path.basename(file) # 获得路径下的文件名
postfix_name = basename.split('.')[-1]
if postfix_name == postfix:
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
file_list.append(file)
file_list.sort()
return file_list
<<<<<<< HEAD
def get_files_labels(files_dir, postfix=None):
=======
def gen_files_labels(files_dir,postfix='ALL'):
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
'''
获取files_dir路径下所有文件路径,以及labels,其中labels用子级文件名表示
files_dir目录下,同一类别的文件放一个文件夹,其labels即为文件的名
:param files_dir:
:postfix 后缀名
:return:filePath_list所有文件的路径,label_list对应的labels
'''
# filePath_list = getFilePathList(files_dir)
<<<<<<< HEAD
filePath_list = get_files_list(files_dir, postfix=postfix)
# print("files nums:{}".format(len(filePath_list)))
=======
filePath_list=get_files_list(files_dir, postfix=postfix)
print("files nums:{}".format(len(filePath_list)))
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
# 获取所有样本标签
label_list = []
for filePath in filePath_list:
label = filePath.split(os.sep)[-2]
label_list.append(label)
labels_set = list(set(label_list))
<<<<<<< HEAD
# print("labels:{}".format(labels_set))
=======
print("labels:{}".format(labels_set))
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
# 标签统计计数
# print(pd.value_counts(label_list))
return filePath_list, label_list
<<<<<<< HEAD
def decode_label(label_list, name_table):
=======
def decode_label(label_list,name_table):
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
'''
根据name_table解码label
:param label_list:
:param name_table:
:return:
'''
<<<<<<< HEAD
name_list = []
=======
name_list=[]
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
for label in label_list:
name = name_table[label]
name_list.append(name)
return name_list
<<<<<<< HEAD
def encode_label(name_list, name_table, unknow=0):
=======
def encode_label(name_list,name_table,unknow=0):
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
'''
根据name_table,编码label
:param name_list:
:param name_table:
:param unknow :未知的名称,默认label为0,一般在name_table中index=0是背景,未知的label也当做背景处理
:return:
'''
<<<<<<< HEAD
label_list = []
=======
label_list=[]
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
for name in name_list:
if name in name_table:
index = name_table.index(name)
else:
index = unknow
label_list.append(index)
return label_list
<<<<<<< HEAD
def print_dict(dict_data, save_path):
list_config = []
for key in dict_data:
info = "conf.{}={}".format(key, dict_data[key])
print(info)
list_config.append(info)
if save_path is not None:
with open(save_path, "w") as f:
for info in list_config:
f.writelines(info + "\n")
if __name__ == '__main__':
filename = 'test.txt'
w_data = [['1.jpg', 'dog', 200, 300, 1.0], ['2.jpg', 'dog', 20, 30, -2]]
print("w_data=", w_data)
write_data(filename, w_data, mode='w')
=======
if __name__=='__main__':
filename = 'test.txt'
w_data = [['1.jpg', 'dog', 200, 300, 1.0], ['2.jpg', 'dog', 20, 30, -2]]
print("w_data=", w_data)
write_data(filename,w_data, mode='w')
>>>>>>> 166dd1f31f4d3c4ce73f13077cf6619bbff91635
r_data = read_data(filename)
print('r_data=', r_data)
| [
"[email protected]"
] | |
b8a56eb177c481f347143f82e27e2e8e2fac50ec | 4d790ddf9f1432f5b682009c0e4e953e7443c768 | /Gardener/python/variables/allBtagPogScaleFactorsICHEP.py | 9fa3911f38b57d39c2cc82b63f036ad6bae798c2 | [] | no_license | pmatorras/LatinoAnalysis | cc6d80ce18efd4ef2d99548477a0498f811b0cca | 4b34d0d4f473088c30db3cd5a00ffece7127a3e8 | refs/heads/master | 2023-07-20T06:02:48.097585 | 2019-09-13T09:25:56 | 2019-09-13T09:25:56 | 210,785,511 | 1 | 0 | null | 2019-09-25T07:43:46 | 2019-09-25T07:43:46 | null | UTF-8 | Python | false | false | 41,154 | py | import optparse
import numpy
import ROOT
import os.path
from LatinoAnalysis.Gardener.gardening import TreeCloner
#from HWWAnalysis.ShapeAnalysis.triggerEffCombiner import TriggerEff
#___. __ __________ _________ ___________ __
#\_ |___/ |______ ____\______ \____ ____ / _____/ ____ _____ _______ ____\_ _____/____ _____/ |_ ___________ ______
# | __ \ __\__ \ / ___\| ___/ _ \ / ___\\_____ \_/ ___\\__ \\_ __ \_/ __ \| __) \__ \ _/ ___\ __\/ _ \_ __ \/ ___/
# | \_\ \ | / __ \_/ /_/ > | ( <_> ) /_/ > \ \___ / __ \| | \/\ ___/| \ / __ \\ \___| | ( <_> ) | \/\___ \
# |___ /__| (____ /\___ /|____| \____/\___ /_______ /\___ >____ /__| \___ >___ / (____ /\___ >__| \____/|__| /____ >
# \/ \//_____/ /_____/ \/ \/ \/ \/ \/ \/ \/ \/
class allBtagPogScaleFactorsICHEP(TreeCloner):
def __init__(self):
pass
def __del__(self):
pass
def help(self):
return '''Add a scale factor derived according to POG recommendations, method 1a in https://twiki.cern.ch/twiki/bin/view/CMS/BTagSFMethods#1a_Event_reweighting_using_scale'''
def addOptions(self,parser):
description = self.help()
group = optparse.OptionGroup(parser,self.label, description)
group.add_option('-c','--cmssw',dest='cmssw',help='cmssw version req for met vars',default='763')
return group
#def checkOptions(self,opts):
# pass
def checkOptions(self,opts):
#def _readSF (self):
#ROOT.gSystem.Load('libCondFormatsBTagObjects')
cmssw_base = os.getenv('CMSSW_BASE')
effFile = "data/efficiencyMCFile76X_all.py"
if opts.cmssw == "ICHEP2016":
effFile = "data/efficiencyMCFile80X_all.py"
efficienciesMC_CMVA = {}
efficienciesMC_CSV = {}
efffile_path = cmssw_base+'/src/LatinoAnalysis/Gardener/python/'+effFile
if effFile == None :
print " Please provide an input file with the MC efficiencies "
elif os.path.exists(efffile_path) :
handle = open(efffile_path,'r')
exec(handle)
handle.close()
else:
print "cannot find file", effFile
self.efficiencyMC_CMVA = efficienciesMC_CMVA
self.efficiencyMC_CSV = efficienciesMC_CSV
self.minpt = 20
self.maxpt = 290
self.mineta = 0
self.maxeta = 2.4
wpl = 0
wpm = 1
wpt = 2
wps = 3
#compile code to read scale factors
self.cmssw = opts.cmssw
self.cmvaSfFile = 'cMVAv2.csv'
self.csvSfFile = 'CSVv2.csv'
if self.cmssw == "ICHEP2016":
self.cmvaSfFile = "cMVAv2_ICHEP2016.csv"
self.csvSfFile = "CSVv2_ICHEP2016.csv"
#ROOT.gROOT.ProcessLine(".L "+cmssw_base+'/src/LatinoAnalysis/Gardener/python/variables/BTagCalibrationStandaloneStandalone.cc+')
try:
ROOT.gROOT.LoadMacro(cmssw_base+'/src/LatinoAnalysis/Gardener/python/variables/BTagCalibrationStandalone.cc+g')
except RuntimeError:
ROOT.gROOT.LoadMacro(cmssw_base+'/src/LatinoAnalysis/Gardener/python/variables/BTagCalibrationStandalone.cc++g')
#ROOT.gROOT.ProcessLine('.L BTagCalibrationStandaloneStandalone.cc+')
print "CMVA scale factors from", cmssw_base+'/src/LatinoAnalysis/Gardener/python/data/'+self.cmvaSfFile
print "CSVv2 scale factors from", cmssw_base+'/src/LatinoAnalysis/Gardener/python/data/'+self.csvSfFile
### Readers for cMVAv2 re-shaping (1 nominal + 9 Up variations + 9 Down variations)
self.calibCMVA = ROOT.BTagCalibrationStandalone("cMVAv2", cmssw_base+'/src/LatinoAnalysis/Gardener/python/data/'+self.cmvaSfFile)
if self.cmssw != "ICHEP2016":
self.readerCentralCMVAshape = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "central")
self.readerCentralCMVAshape_up_jes = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_jes")
self.readerCentralCMVAshape_down_jes = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_jes")
self.readerCentralCMVAshape_up_lf = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_lf")
self.readerCentralCMVAshape_down_lf = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_lf")
self.readerCentralCMVAshape_up_hfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_hfstats1")
self.readerCentralCMVAshape_down_hfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_hfstats1")
self.readerCentralCMVAshape_up_hfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_hfstats2")
self.readerCentralCMVAshape_down_hfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_hfstats2")
self.readerCentralCMVAshape_up_cferr1 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_cferr1")
self.readerCentralCMVAshape_down_cferr1 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_cferr1")
self.readerCentralCMVAshape_up_cferr2 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_cferr2")
self.readerCentralCMVAshape_down_cferr2 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_cferr2")
self.readerCentralCMVAshape_up_hf = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_hf")
self.readerCentralCMVAshape_down_hf = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_hf")
self.readerCentralCMVAshape_up_lfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_lfstats1")
self.readerCentralCMVAshape_down_lfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_lfstats1")
self.readerCentralCMVAshape_up_lfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "up_lfstats2")
self.readerCentralCMVAshape_down_lfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, wps, "iterativefit", "down_lfstats2")
### Readers for CSVv2 re-shaping (1 nominal + 9 Up variations + 9 Down variations)
self.calibCSV = ROOT.BTagCalibrationStandalone("CSVv2", cmssw_base+'/src/LatinoAnalysis/Gardener/python/data/'+self.csvSfFile)
if self.cmssw != "ICHEP2016":
self.readerCentralCSVshape = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "central")
self.readerCentralCSVshape_up_jes = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_jes")
self.readerCentralCSVshape_down_jes = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_jes")
self.readerCentralCSVshape_up_lf = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_lf")
self.readerCentralCSVshape_down_lf = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_lf")
self.readerCentralCSVshape_up_hfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_hfstats1")
self.readerCentralCSVshape_down_hfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_hfstats1")
self.readerCentralCSVshape_up_hfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_hfstats2")
self.readerCentralCSVshape_down_hfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_hfstats2")
self.readerCentralCSVshape_up_cferr1 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_cferr1")
self.readerCentralCSVshape_down_cferr1 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_cferr1")
self.readerCentralCSVshape_up_cferr2 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_cferr2")
self.readerCentralCSVshape_down_cferr2 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_cferr2")
self.readerCentralCSVshape_up_hf = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_hf")
self.readerCentralCSVshape_down_hf = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_hf")
self.readerCentralCSVshape_up_lfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_lfstats1")
self.readerCentralCSVshape_down_lfstats1 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_lfstats1")
self.readerCentralCSVshape_up_lfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "up_lfstats2")
self.readerCentralCSVshape_down_lfstats2 = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, wps, "iterativefit", "down_lfstats2")
### Readers for CMVA and CSV working point based
self.wps = ["L", "M", "T"]
self.taggers=["CMVA", "CSV"]
self.flavors=["udsg", "bc"]
self.variations=["central", "up", "down"]
self.readers = {}
self.readers["CMVA"]={}
self.readers["CSV"]={}
for iwp,wp in enumerate(self.wps):
self.readers["CMVA"][wp] = {}
self.readers["CSV"][wp] = {}
for flavor in self.flavors:
self.readers["CMVA"][wp][flavor] = {}
self.readers["CSV"][wp][flavor] = {}
if flavor == "bc":
sampleCMVA = "ttbar"
sampleCSV = "mujets"
else:
sampleCMVA = "incl"
sampleCSV ="incl"
if self.cmssw == "ICHEP2016":
sampleCMVA = "hww"
sampleCSV = "hww"
for variation in self.variations:
self.readers["CMVA"][wp][flavor][variation] = ROOT.BTagCalibrationStandaloneReader(self.calibCMVA, iwp, sampleCMVA, variation)
self.readers["CSV"][wp][flavor][variation] = ROOT.BTagCalibrationStandaloneReader(self.calibCSV, iwp, sampleCSV, variation)
def _getEffMC (self, algo, wp, kindJet, pt, eta):
# fix underflow and overflow
if pt < self.minpt:
pt = self.minpt
if pt > self.maxpt:
pt = self.maxpt
if eta < self.mineta:
eta = self.mineta
if eta > self.maxeta:
eta = self.maxeta
if not (wp=='L' or wp=='M' or wp=='T'):
print "ERROR: wp ", wp, " do not exist or the format is wrong. Please provide a correct wp."
print "Available wps are 'L', 'M' or 'T'."
if algo == "CMVA":
if (kindJet,wp) in self.efficiencyMC_CMVA.keys() :
# get the efficiency
for point in self.efficiencyMC_CMVA[(kindJet,wp)] :
# pt eta eff
# (( 0.0, 10.0), (0.0, 1.5), 0.980 ),
if ( pt >= point[0][0] and pt < point[0][1] and
eta >= point[1][0] and eta < point[1][1] ) :
return point[2]
# default ... it should never happen!
print " default ???", pt, eta, kindJet
return 1.0
elif algo == "CSV":
if (kindJet,wp) in self.efficiencyMC_CSV.keys() :
# get the efficiency
for point in self.efficiencyMC_CSV[(kindJet,wp)] :
# pt eta eff
# (( 0.0, 10.0), (0.0, 1.5), 0.980 ),
if ( pt >= point[0][0] and pt < point[0][1] and
eta >= point[1][0] and eta < point[1][1] ) :
#print "kindJet, wp = ", kindJet, " ", wp
#print "pt, eta, SF = ", pt, " ", eta, " ", point[2]
return point[2]
# default ... it should never happen!
print " default ???", pt, eta, kindJet
return 1.0
else:
print "ERROR: algo ", algo, " is not available. Please specify a correct algo."
print "Available algos are 'CMVA' and 'CSV'."
# not a lepton ... like some default value
return 1.0
def resetCounters(self):
for tagger in self.taggers:
for wp in self.wps:
self.pMC[tagger][wp] = 1.
for variation in self.variations:
self.pData[tagger][wp][variation]["undef"]=1.
if variation != "central":
for flavor in self.flavors:
self.pData[tagger][wp][variation][flavor]=1.
def process(self,**kwargs):
tree = kwargs['tree']
input = kwargs['input']
output = kwargs['output']
self.connect(tree,input)
#self._readSF()
branchlist = ["bPogSF", "bPogSFUp", "bPogSFDown",
"bPogSF_CMVAreshape",
"bPogSF_CMVAreshape_up_jes", "bPogSF_CMVAreshape_down_jes",
"bPogSF_CMVAreshape_up_lf", "bPogSF_CMVAreshape_down_lf",
"bPogSF_CMVAreshape_up_hf", "bPogSF_CMVAreshape_down_hf",
"bPogSF_CMVAreshape_up_hfstats1", "bPogSF_CMVAreshape_down_hfstats1",
"bPogSF_CMVAreshape_up_hfstats2", "bPogSF_CMVAreshape_down_hfstats2",
"bPogSF_CMVAreshape_up_lfstats1", "bPogSF_CMVAreshape_down_lfstats1",
"bPogSF_CMVAreshape_up_lfstats2", "bPogSF_CMVAreshape_down_lfstats2",
"bPogSF_CMVAreshape_up_cferr1", "bPogSF_CMVAreshape_down_cferr1",
"bPogSF_CMVAreshape_up_cferr2", "bPogSF_CMVAreshape_down_cferr2",
"bPogSF_CSVreshape",
"bPogSF_CSVreshape_up_jes", "bPogSF_CSVreshape_down_jes",
"bPogSF_CSVreshape_up_lf", "bPogSF_CSVreshape_down_lf",
"bPogSF_CSVreshape_up_hf", "bPogSF_CSVreshape_down_hf",
"bPogSF_CSVreshape_up_hfstats1", "bPogSF_CSVreshape_down_hfstats1",
"bPogSF_CSVreshape_up_hfstats2", "bPogSF_CSVreshape_down_hfstats2",
"bPogSF_CSVreshape_up_lfstats1", "bPogSF_CSVreshape_down_lfstats1",
"bPogSF_CSVreshape_up_lfstats2", "bPogSF_CSVreshape_down_lfstats2",
"bPogSF_CSVreshape_up_cferr1", "bPogSF_CSVreshape_down_cferr1",
"bPogSF_CSVreshape_up_cferr2", "bPogSF_CSVreshape_down_cferr2",
]
for tagger in self.taggers:
for wp in self.wps:
for variation in self.variations:
suffix = "_"+variation
if variation == "central":
suffix = ""
namebranch = 'bPogSF_'+tagger+wp+suffix
branchlist.append(namebranch)
if variation != "central":
for flavor in self.flavors:
suffix = "_"+flavor+"_"+variation
namebranch = 'bPogSF_'+tagger+wp+suffix
branchlist.append(namebranch)
self.clone(output, branchlist)
bPogSFAll = {}
self.pData = {}
self.pMC = {}
for tagger in self.taggers:
bPogSFAll[tagger]={}
self.pData[tagger]={}
self.pMC[tagger]={}
for wp in self.wps:
bPogSFAll[tagger][wp]={}
self.pMC[tagger][wp] = 1.
self.pData[tagger][wp]={}
for variation in self.variations:
bPogSFAll[tagger][wp][variation]={}
self.pData[tagger][wp][variation]={}
#undef is for correlated variations independent of flaor
bPogSFAll[tagger][wp][variation]["undef"] = numpy.ones(1, dtype=numpy.float32)
self.pData[tagger][wp][variation]["undef"]=1.
suffix = "_"+variation
if variation == "central":
suffix = ""
namebranch = 'bPogSF_'+tagger+wp+suffix
self.otree.Branch(namebranch, bPogSFAll[tagger][wp][variation]["undef"], namebranch+"/F")
if variation != "central":
for flavor in self.flavors:
bPogSFAll[tagger][wp][variation][flavor] = numpy.ones(1, dtype=numpy.float32)
self.pData[tagger][wp][variation][flavor] = 1.
suffix = "_"+flavor+"_"+variation
namebranch = 'bPogSF_'+tagger+wp+suffix
self.otree.Branch(namebranch, bPogSFAll[tagger][wp][variation][flavor], namebranch+"/F")
bPogSF = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF',bPogSF,'bPogSF/F')
bPogSFUp = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSFUp',bPogSFUp,'bPogSFUp/F')
bPogSFDown = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSFDown',bPogSFDown,'bPogSFDown/F')
if self.cmssw != "ICHEP2016":
# Re-shaping weights for cMVAv2
bPogSF_CMVAreshape = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape',bPogSF_CMVAreshape,'bPogSF_CMVAreshape/F')
bPogSF_CMVAreshape_up_jes = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_jes',bPogSF_CMVAreshape_up_jes,'bPogSF_CMVAreshape_up_jes/F')
bPogSF_CMVAreshape_down_jes = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_jes',bPogSF_CMVAreshape_down_jes,'bPogSF_CMVAreshape_down_jes/F')
bPogSF_CMVAreshape_up_lf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_lf',bPogSF_CMVAreshape_up_lf,'bPogSF_CMVAreshape_up_lf/F')
bPogSF_CMVAreshape_down_lf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_lf',bPogSF_CMVAreshape_down_lf,'bPogSF_CMVAreshape_down_lf/F')
bPogSF_CMVAreshape_up_hf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_hf',bPogSF_CMVAreshape_up_hf,'bPogSF_CMVAreshape_up_hf/F')
bPogSF_CMVAreshape_down_hf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_hf',bPogSF_CMVAreshape_down_hf,'bPogSF_CMVAreshape_down_hf/F')
bPogSF_CMVAreshape_up_hfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_hfstats1',bPogSF_CMVAreshape_up_hfstats1,'bPogSF_CMVAreshape_up_hfstats1/F')
bPogSF_CMVAreshape_down_hfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_hfstats1',bPogSF_CMVAreshape_down_hfstats1,'bPogSF_CMVAreshape_down_hfstats1/F')
bPogSF_CMVAreshape_up_hfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_hfstats2',bPogSF_CMVAreshape_up_hfstats2,'bPogSF_CMVAreshape_up_hfstats2/F')
bPogSF_CMVAreshape_down_hfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_hfstats2',bPogSF_CMVAreshape_down_hfstats2,'bPogSF_CMVAreshape_down_hfstats2/F')
bPogSF_CMVAreshape_up_lfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_lfstats1',bPogSF_CMVAreshape_up_lfstats1,'bPogSF_CMVAreshape_up_lfstats1/F')
bPogSF_CMVAreshape_down_lfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_lfstats1',bPogSF_CMVAreshape_down_lfstats1,'bPogSF_CMVAreshape_down_lfstats1/F')
bPogSF_CMVAreshape_up_lfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_lfstats2',bPogSF_CMVAreshape_up_lfstats2,'bPogSF_CMVAreshape_up_lfstats2/F')
bPogSF_CMVAreshape_down_lfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_lfstats2',bPogSF_CMVAreshape_down_lfstats2,'bPogSF_CMVAreshape_down_lfstats2/F')
bPogSF_CMVAreshape_up_cferr1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_cferr1',bPogSF_CMVAreshape_up_cferr1,'bPogSF_CMVAreshape_up_cferr1/F')
bPogSF_CMVAreshape_down_cferr1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_cferr1',bPogSF_CMVAreshape_down_cferr1,'bPogSF_CMVAreshape_down_cferr1/F')
bPogSF_CMVAreshape_up_cferr2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_up_cferr2',bPogSF_CMVAreshape_up_cferr2,'bPogSF_CMVAreshape_up_cferr2/F')
bPogSF_CMVAreshape_down_cferr2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CMVAreshape_down_cferr2',bPogSF_CMVAreshape_down_cferr2,'bPogSF_CMVAreshape_down_cferr2/F')
# Re-shaping weights for CSVv2
bPogSF_CSVreshape = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape',bPogSF_CSVreshape,'bPogSF_CSVreshape/F')
bPogSF_CSVreshape_up_jes = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_jes',bPogSF_CSVreshape_up_jes,'bPogSF_CSVreshape_up_jes/F')
bPogSF_CSVreshape_down_jes = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_jes',bPogSF_CSVreshape_down_jes,'bPogSF_CSVreshape_down_jes/F')
bPogSF_CSVreshape_up_lf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_lf',bPogSF_CSVreshape_up_lf,'bPogSF_CSVreshape_up_lf/F')
bPogSF_CSVreshape_down_lf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_lf',bPogSF_CSVreshape_down_lf,'bPogSF_CSVreshape_down_lf/F')
bPogSF_CSVreshape_up_hf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_hf',bPogSF_CSVreshape_up_hf,'bPogSF_CSVreshape_up_hf/F')
bPogSF_CSVreshape_down_hf = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_hf',bPogSF_CSVreshape_down_hf,'bPogSF_CSVreshape_down_hf/F')
bPogSF_CSVreshape_up_hfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_hfstats1',bPogSF_CSVreshape_up_hfstats1,'bPogSF_CSVreshape_up_hfstats1/F')
bPogSF_CSVreshape_down_hfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_hfstats1',bPogSF_CSVreshape_down_hfstats1,'bPogSF_CSVreshape_down_hfstats1/F')
bPogSF_CSVreshape_up_hfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_hfstats2',bPogSF_CSVreshape_up_hfstats2,'bPogSF_CSVreshape_up_hfstats2/F')
bPogSF_CSVreshape_down_hfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_hfstats2',bPogSF_CSVreshape_down_hfstats2,'bPogSF_CSVreshape_down_hfstats2/F')
bPogSF_CSVreshape_up_lfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_lfstats1',bPogSF_CSVreshape_up_lfstats1,'bPogSF_CSVreshape_up_lfstats1/F')
bPogSF_CSVreshape_down_lfstats1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_lfstats1',bPogSF_CSVreshape_down_lfstats1,'bPogSF_CSVreshape_down_lfstats1/F')
bPogSF_CSVreshape_up_lfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_lfstats2',bPogSF_CSVreshape_up_lfstats2,'bPogSF_CSVreshape_up_lfstats2/F')
bPogSF_CSVreshape_down_lfstats2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_lfstats2',bPogSF_CSVreshape_down_lfstats2,'bPogSF_CSVreshape_down_lfstats2/F')
bPogSF_CSVreshape_up_cferr1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_cferr1',bPogSF_CSVreshape_up_cferr1,'bPogSF_CSVreshape_up_cferr1/F')
bPogSF_CSVreshape_down_cferr1 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_cferr1',bPogSF_CSVreshape_down_cferr1,'bPogSF_CSVreshape_down_cferr1/F')
bPogSF_CSVreshape_up_cferr2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_up_cferr2',bPogSF_CSVreshape_up_cferr2,'bPogSF_CSVreshape_up_cferr2/F')
bPogSF_CSVreshape_down_cferr2 = numpy.ones(1, dtype=numpy.float32)
self.otree.Branch('bPogSF_CSVreshape_down_cferr2',bPogSF_CSVreshape_down_cferr2,'bPogSF_CSVreshape_down_cferr2/F')
nentries = self.itree.GetEntries()
print 'Total number of entries: ',nentries
# avoid dots to go faster
itree = self.itree
otree = self.otree
print '- Starting eventloop'
step = 5000
for i in xrange(nentries):
itree.GetEntry(i)
## print event count
if i > 0 and i%step == 0.:
print i,'events processed.'
self.resetCounters()
if self.cmssw != "ICHEP2016":
# CMVA reshaper
bPogSF_CMVAreshape[0] = 1.
bPogSF_CMVAreshape_up_jes[0] = 1.
bPogSF_CMVAreshape_down_jes[0] = 1.
bPogSF_CMVAreshape_up_lf[0] = 1.
bPogSF_CMVAreshape_down_lf[0] = 1.
bPogSF_CMVAreshape_up_hf[0] = 1.
bPogSF_CMVAreshape_down_hf[0] = 1.
bPogSF_CMVAreshape_up_hfstats1[0] = 1.
bPogSF_CMVAreshape_down_hfstats1[0] = 1.
bPogSF_CMVAreshape_up_hfstats2[0] = 1.
bPogSF_CMVAreshape_down_hfstats2[0] = 1.
bPogSF_CMVAreshape_up_lfstats1[0] = 1.
bPogSF_CMVAreshape_down_lfstats1[0] = 1.
bPogSF_CMVAreshape_up_lfstats2[0] = 1.
bPogSF_CMVAreshape_down_lfstats2[0] = 1.
bPogSF_CMVAreshape_up_cferr1[0] = 1.
bPogSF_CMVAreshape_down_cferr1[0] = 1.
bPogSF_CMVAreshape_up_cferr2[0] = 1.
bPogSF_CMVAreshape_down_cferr2[0] = 1.
bPogSF_CSVreshape[0] = 1.
bPogSF_CSVreshape_up_jes[0] = 1.
bPogSF_CSVreshape_down_jes[0] = 1.
bPogSF_CSVreshape_up_lf[0] = 1.
bPogSF_CSVreshape_down_lf[0] = 1.
bPogSF_CSVreshape_up_hf[0] = 1.
bPogSF_CSVreshape_down_hf[0] = 1.
bPogSF_CSVreshape_up_hfstats1[0] = 1.
bPogSF_CSVreshape_down_hfstats1[0] = 1.
bPogSF_CSVreshape_up_hfstats2[0] = 1.
bPogSF_CSVreshape_down_hfstats2[0] = 1.
bPogSF_CSVreshape_up_lfstats1[0] = 1.
bPogSF_CSVreshape_down_lfstats1[0] = 1.
bPogSF_CSVreshape_up_lfstats2[0] = 1.
bPogSF_CSVreshape_down_lfstats2[0] = 1.
bPogSF_CSVreshape_up_cferr1[0] = 1.
bPogSF_CSVreshape_down_cferr1[0] = 1.
bPogSF_CSVreshape_up_cferr2[0] = 1.
bPogSF_CSVreshape_down_cferr2[0] = 1.
njet = 0
for iJet in xrange(len(itree.std_vector_jet_pt)) :
pt = itree.std_vector_jet_pt [iJet]
eta = itree.std_vector_jet_eta [iJet]
flavour = itree.std_vector_jet_HadronFlavour [iJet]
cmva = itree.std_vector_jet_cmvav2 [iJet]
csv = itree.std_vector_jet_csvv2ivf [iJet]
tagged = {}
tagged["CMVA"]={}
tagged["CMVA"]["L"] = itree.std_vector_jet_cmvav2 [iJet] > -0.715
tagged["CMVA"]["M"] = itree.std_vector_jet_cmvav2 [iJet] > 0.185
tagged["CMVA"]["T"] = itree.std_vector_jet_cmvav2 [iJet] > 0.875
tagged["CSV"]={}
tagged["CSV"]["L"] = itree.std_vector_jet_csvv2ivf [iJet] > 0.460
tagged["CSV"]["M"] = itree.std_vector_jet_csvv2ivf [iJet] > 0.800
tagged["CSV"]["T"] = itree.std_vector_jet_csvv2ivf [iJet] > 0.935
if pt > self.minpt and abs(eta) < self.maxeta:
kindJet = 'b' # ele or mu
idJet = 0
if abs (flavour) == 4 :
kindJet = 'c'
idJet = 1
elif abs (flavour) == 0 :
kindJet = 'l'
idJet = 2
elif flavour == 5:
kindJet = 'b'
idJet = 0
else:
print "BIG PROBLEM! Hadron Flavor is neither 0, 4 or 5"
#print "pt, eta, idJet, kindJet", pt, eta, idJet, kindJet
#print "~~~~~~~~~~~~~~~~ jet ", njet
if self.cmssw != "ICHEP2016":
sfCMVAshape = self.readerCentralCMVAshape.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape[0] *= sfCMVAshape
#print "CMVA : idJet = ", idJet, " pt = ", pt, " eta = ", eta, " cmva = ", cmva, " SF = ", sfCMVAshape, " weight = ", bPogSF_CMVAreshape[0]
sfCMVAshape_up_jes = self.readerCentralCMVAshape_up_jes.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_jes[0] *= sfCMVAshape_up_jes
sfCMVAshape_down_jes = self.readerCentralCMVAshape_down_jes.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_jes[0] *= sfCMVAshape_down_jes
#print "CMVA JES UP: idJet = ", idJet, " pt = ", pt, " eta = ", eta, " cmva = ", cmva, " SF = ", sfCMVAshape_up_jes, " weight = ", bPogSF_CMVAreshape_up_jes[0]
sfCMVAshape_up_lf = self.readerCentralCMVAshape_up_lf.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_lf[0] *= sfCMVAshape_up_lf
sfCMVAshape_down_lf = self.readerCentralCMVAshape_down_lf.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_lf[0] *= sfCMVAshape_down_lf
sfCMVAshape_up_hf = self.readerCentralCMVAshape_up_hf.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_hf[0] *= sfCMVAshape_up_hf
sfCMVAshape_down_hf = self.readerCentralCMVAshape_down_hf.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_hf[0] *= sfCMVAshape_down_hf
sfCMVAshape_up_hfstats1 = self.readerCentralCMVAshape_up_hfstats1.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_hfstats1[0] *= sfCMVAshape_up_hfstats1
sfCMVAshape_down_hfstats1 = self.readerCentralCMVAshape_down_hfstats1.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_hfstats1[0] *= sfCMVAshape_down_hfstats1
sfCMVAshape_up_hfstats2 = self.readerCentralCMVAshape_up_hfstats2.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_hfstats2[0] *= sfCMVAshape_up_hfstats2
sfCMVAshape_down_hfstats2 = self.readerCentralCMVAshape_down_hfstats2.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_hfstats2[0] *= sfCMVAshape_down_hfstats2
sfCMVAshape_up_lfstats1 = self.readerCentralCMVAshape_up_lfstats1.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_lfstats1[0] *= sfCMVAshape_up_lfstats1
sfCMVAshape_down_lfstats1 = self.readerCentralCMVAshape_down_lfstats1.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_lfstats1[0] *= sfCMVAshape_down_lfstats1
sfCMVAshape_up_lfstats2 = self.readerCentralCMVAshape_up_lfstats2.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_lfstats2[0] *= sfCMVAshape_up_lfstats2
sfCMVAshape_down_lfstats2 = self.readerCentralCMVAshape_down_lfstats2.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_lfstats2[0] *= sfCMVAshape_down_lfstats2
sfCMVAshape_up_cferr1 = self.readerCentralCMVAshape_up_cferr1.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_cferr1[0] *= sfCMVAshape_up_cferr1
sfCMVAshape_down_cferr1 = self.readerCentralCMVAshape_down_cferr1.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_cferr1[0] *= sfCMVAshape_down_cferr1
sfCMVAshape_up_cferr2 = self.readerCentralCMVAshape_up_cferr2.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_up_cferr2[0] *= sfCMVAshape_up_cferr2
sfCMVAshape_down_cferr2 = self.readerCentralCMVAshape_down_cferr2.evaluate(idJet, eta, pt, cmva)
bPogSF_CMVAreshape_down_cferr2[0] *= sfCMVAshape_down_cferr2
sfCSVshape = self.readerCentralCSVshape.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape[0] *= sfCSVshape
#print "CSV : idJet = ", idJet, " pt = ", pt, " eta = ", eta, " csv = ", csv, " SF = ", sfCSVshape, " weight = ", bPogSF_CSVreshape[0]
sfCSVshape_up_jes = self.readerCentralCSVshape_up_jes.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_jes[0] *= sfCSVshape_up_jes
sfCSVshape_down_jes = self.readerCentralCSVshape_down_jes.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_jes[0] *= sfCSVshape_down_jes
sfCSVshape_up_lf = self.readerCentralCSVshape_up_lf.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_lf[0] *= sfCSVshape_up_lf
sfCSVshape_down_lf = self.readerCentralCSVshape_down_lf.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_lf[0] *= sfCSVshape_down_lf
sfCSVshape_up_hf = self.readerCentralCSVshape_up_hf.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_hf[0] *= sfCSVshape_up_hf
sfCSVshape_down_hf = self.readerCentralCSVshape_down_hf.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_hf[0] *= sfCSVshape_down_hf
sfCSVshape_up_hfstats1 = self.readerCentralCSVshape_up_hfstats1.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_hfstats1[0] *= sfCSVshape_up_hfstats1
sfCSVshape_down_hfstats1 = self.readerCentralCSVshape_down_hfstats1.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_hfstats1[0] *= sfCSVshape_down_hfstats1
sfCSVshape_up_hfstats2 = self.readerCentralCSVshape_up_hfstats2.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_hfstats2[0] *= sfCSVshape_up_hfstats2
sfCSVshape_down_hfstats2 = self.readerCentralCSVshape_down_hfstats2.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_hfstats2[0] *= sfCSVshape_down_hfstats2
sfCSVshape_up_lfstats1 = self.readerCentralCSVshape_up_lfstats1.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_lfstats1[0] *= sfCSVshape_up_lfstats1
sfCSVshape_down_lfstats1 = self.readerCentralCSVshape_down_lfstats1.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_lfstats1[0] *= sfCSVshape_down_lfstats1
sfCSVshape_up_lfstats2 = self.readerCentralCSVshape_up_lfstats2.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_lfstats2[0] *= sfCSVshape_up_lfstats2
sfCSVshape_down_lfstats2 = self.readerCentralCSVshape_down_lfstats2.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_lfstats2[0] *= sfCSVshape_down_lfstats2
sfCSVshape_up_cferr1 = self.readerCentralCSVshape_up_cferr1.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_cferr1[0] *= sfCSVshape_up_cferr1
sfCSVshape_down_cferr1 = self.readerCentralCSVshape_down_cferr1.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_cferr1[0] *= sfCSVshape_down_cferr1
sfCSVshape_up_cferr2 = self.readerCentralCSVshape_up_cferr2.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_up_cferr2[0] *= sfCSVshape_up_cferr2
sfCSVshape_down_cferr2 = self.readerCentralCSVshape_down_cferr2.evaluate(idJet, eta, pt, csv)
bPogSF_CSVreshape_down_cferr2[0] *= sfCSVshape_down_cferr2
effMC = {}
sf = {}
if (idJet != 2):
thisflavor = "bc"
else:
thisflavor = "udsg"
#get the SF
for tagger in self.taggers:
effMC[tagger]={}
sf[tagger]={}
for wp in self.wps:
effMC[tagger][wp]=self._getEffMC(tagger, wp, kindJet, pt, abs(eta))
sf[tagger][wp]={}
for variation in self.variations:
# b/c
if (idJet != 2) :
sf[tagger][wp][variation] = self.readers[tagger][wp]["bc"][variation].evaluate(idJet, eta, pt)
if (pt < 30) and variation != "central":
#double the uncertainty for b/c jets below 30 GeV
sf[tagger][wp][variation] = 2*(self.readers[tagger][wp]["bc"][variation].evaluate(idJet, eta, pt) - \
sf[tagger][wp]["central"]) + \
sf[tagger][wp]["central"]
# udsg
else:
sf[tagger][wp][variation] = self.readers[tagger][wp]["udsg"][variation].evaluate(idJet, eta, pt)
#use the SF to determine event probabilities
for tagger in self.taggers:
for wp in self.wps:
if tagged[tagger][wp]:
self.pMC[tagger][wp] = self.pMC[tagger][wp]*effMC[tagger][wp]
else:
self.pMC[tagger][wp] = self.pMC[tagger][wp]*(1.-effMC[tagger][wp])
for variation in self.variations:
if tagged[tagger][wp]:
self.pData[tagger][wp][variation]["undef"] = self.pData[tagger][wp][variation]["undef"]*effMC[tagger][wp]*sf[tagger][wp][variation]
else:
self.pData[tagger][wp][variation]["undef"] = self.pData[tagger][wp][variation]["undef"]*(1.-effMC[tagger][wp]*sf[tagger][wp][variation])
if variation != "central":
for flavor in self.flavors:
#if the flavor of this jet is the same as the flavor for which we are computing
#the variation, then we need the varied SF
#otherwise we take the central SF
if thisflavor == flavor:
flavorsf = sf[tagger][wp][variation]
else:
flavorsf = sf[tagger][wp]["central"]
if tagged[tagger][wp]:
self.pData[tagger][wp][variation][flavor] = self.pData[tagger][wp][variation][flavor]*effMC[tagger][wp]*flavorsf
else:
self.pData[tagger][wp][variation][flavor] = self.pData[tagger][wp][variation][flavor]*(1.-effMC[tagger][wp]*flavorsf)
njet += 1
#print "flavour, effMC, sf", flavour, effMC, sf
#print "pData, pMC", pData, pMC
#print "bPogSF_CMVAreshape[0] = ", bPogSF_CMVAreshape[0]
#print "bPogSF_CMVAreshape_up_jes[0] = ", bPogSF_CMVAreshape_up_jes[0]
for tagger in self.taggers:
for wp in self.wps:
for variation in self.variations:
bPogSFAll[tagger][wp][variation]["undef"][0] = self.pData[tagger][wp][variation]["undef"]/self.pMC[tagger][wp]
if variation != "central":
for flavor in self.flavors:
bPogSFAll[tagger][wp][variation][flavor][0] = self.pData[tagger][wp][variation][flavor]/self.pMC[tagger][wp]
bPogSF[0] = bPogSFAll["CMVA"]["L"]["central"]["undef"]
bPogSFUp[0] = bPogSFAll["CMVA"]["L"]["up"]["undef"]
bPogSFDown[0] = bPogSFAll["CMVA"]["L"]["down"]["undef"]
otree.Fill()
self.disconnect()
print '- Eventloop completed'
| [
"[email protected]"
] | |
f5ebb98bb66da12d55b669a91f4934411791b362 | 629f2bcdfb0902e013c16792184d4d809e40b775 | /notebooks/tests/group_images_by_cycle_for_panoramic_stitching/test_group_images_by_cycle.py | f438b251676b15c959701135a2187adec3264157 | [
"BSD-3-Clause"
] | permissive | neutronimaging/python_notebooks | 7d7a1df33300c4b952873efdfb358098a658896d | 70a43a76eaf08f4ac63db3df7fbfb2e5cdb1216e | refs/heads/next | 2023-08-30T20:05:20.225198 | 2023-07-05T16:38:10 | 2023-07-05T16:38:10 | 99,945,953 | 8 | 7 | BSD-3-Clause | 2022-11-03T12:03:30 | 2017-08-10T16:56:26 | Jupyter Notebook | UTF-8 | Python | false | false | 4,106 | py | from pathlib import Path
from notebooks.__code.group_images_by_cycle_for_panoramic_stitching.group_images_by_cycle import GroupImagesByCycle
import glob
class TestGroupImagesByCycle:
def setup_method(self):
data_path = Path(__file__).parent.parent
self.data_path = str(data_path)
tiff_path = Path(data_path) / 'data' / 'images' / 'tiff'
list_of_files = glob.glob(str(tiff_path) + '/*.tif')
list_of_files.sort()
self.list_of_files = list_of_files
full_tiff_path = Path(data_path) / 'data' / 'images' / 'data_with_acquisition_cycle'
full_list_of_files = glob.glob(str(full_tiff_path) + '/*.tif')
full_list_of_files.sort()
self.full_list_of_files = full_list_of_files
self.list_of_metadata_key = [65045, 65041]
def test_create_master_dictionary(self):
o_group = GroupImagesByCycle(list_of_files=self.list_of_files,
list_of_metadata_key=self.list_of_metadata_key)
o_group.create_master_dictionary()
dict_expected = {self.data_path + '/data/images/tiff/image001.tif': {
'MotLongAxis': '170.000000',
'MotLiftTable': '115.000000'},
self.data_path + '/data/images/tiff/image002.tif': {
'MotLongAxis': '135.000000',
'MotLiftTable': '115.000000'},
self.data_path + '/data/images/tiff/image003.tif': {
'MotLongAxis': '100.000000',
'MotLiftTable': '115.000000'},
self.data_path + '/data/images/tiff/image004.tif': {
'MotLongAxis': '100.000000',
'MotLiftTable': '70.000000'},
self.data_path + '/data/images/tiff/image005.tif': {
'MotLongAxis': '100.000000',
'MotLiftTable': '30.000000'},
self.data_path + '/data/images/tiff/image006.tif': {
'MotLongAxis': '135.000000',
'MotLiftTable': '30.000000'},
self.data_path + '/data/images/tiff/image007.tif': {
'MotLongAxis': '170.000000',
'MotLiftTable': '30.000000'},
self.data_path + '/data/images/tiff/image008.tif': {
'MotLongAxis': '170.000000',
'MotLiftTable': '70.000000'},
self.data_path + '/data/images/tiff/image009.tif': {
'MotLongAxis': '135.000000',
'MotLiftTable': '70.000000'},
}
dict_returned = o_group.master_dictionary
for _file in dict_expected.keys():
_expected = dict_expected[_file]
_returned = dict_returned[_file]
for _key in _expected.keys():
assert _expected[_key] == _returned[_key]
def test_group_dictionary(self):
o_group = GroupImagesByCycle(list_of_files=self.full_list_of_files,
list_of_metadata_key=self.list_of_metadata_key)
o_group.create_master_dictionary()
o_group.group()
assert len(o_group.dictionary_of_groups.keys()) == 3
expected_list_group0 = self.full_list_of_files[:9]
assert len(o_group.dictionary_of_groups[0]) == len(expected_list_group0)
for _file_returned, _file_expected in zip(o_group.dictionary_of_groups[0], expected_list_group0):
assert _file_expected == _file_returned
expected_list_group1 = self.full_list_of_files[9:18]
assert len(o_group.dictionary_of_groups[1]) == len(expected_list_group1)
for _file_returned, _file_expected in zip(o_group.dictionary_of_groups[1], expected_list_group1):
assert _file_expected == _file_returned
| [
"[email protected]"
] | |
294de16b645cac37a71d2da6cad69031b535576e | 274563cbc93b8dfb93eb574babc4ab5109a20de2 | /basic/simple_draw_text.py | b0220f9f159a97fc9bc3f69992c39cab7a2cb8b9 | [] | no_license | land-pack/opencv-example | ed349cc4196b017ecfadc20d3419542dbb580111 | bef764c31e7fb3aaaa91fdceddc4617d5c9baedd | refs/heads/master | 2021-09-12T19:42:41.570268 | 2018-04-20T07:01:55 | 2018-04-20T07:01:55 | 109,978,004 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 255 | py | import numpy as np
import cv2
img = np.zeros((512, 512, 3), np.uint8)
font = cv2.FONT_HERSHEY_SIMPLEX
cv2.putText(img, 'Frank AK', (10, 500), font, 4, (255, 255, 255), 2, cv2.LINE_AA)
cv2.imshow('Text', img)
cv2.waitKey(0)
cv2.destroyAllWindows()
| [
"[email protected]"
] | |
bc9e7e89d2e6baf54b81ba09c4a086e283f0e331 | d67bd00f8fe819bd3011ce154c19cbc765d59f1d | /branches/4.0_buildout/openlegis/sagl/skins/cadastros/auxiliares/tipo_norma_juridica/titulo_salvar_pysc.py | 74f5c0d14c877874fc08a3e7eefbbd31d9f42463 | [] | no_license | openlegis-br/sagl | 90f87bdbbaa8a6efe0ccb5691ea8424575288c46 | eabf7529eefe13a53ed088250d179a92218af1ed | refs/heads/master | 2023-08-31T12:29:39.382474 | 2023-08-29T16:12:01 | 2023-08-29T16:12:01 | 32,593,838 | 17 | 1 | null | 2023-08-29T06:16:55 | 2015-03-20T16:11:04 | Python | UTF-8 | Python | false | false | 376 | py | ## Script (Python) "titulo_salvar_proc"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=id, title
##title=
##
if hasattr(context.sapl_documentos.modelo.norma,id):
arquivo = getattr(context.sapl_documentos.modelo.norma,id)
arquivo.manage_changeProperties(title=title)
return title
| [
"[email protected]"
] | |
89659d4b65962b9ea76b4d78d503da8bc52d4d1e | 9fa490196c2f7b2e102ed1b3c512403a9a5655e3 | /src/examples/lookup_example.py | 518da3720badec9cef9a519de4d0c031e807af3f | [] | no_license | TRomijn/EMAworkbench | 742d29d997e05d8dce4150dc09207d2b1fe10e95 | 02a211f95c1e0a634aba1d1cadadbeba33b1e27e | refs/heads/master | 2021-01-18T20:49:47.501239 | 2017-03-10T13:31:00 | 2017-03-10T13:31:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,657 | py | '''
Created on Oct 1, 2012
This is a simple example of the lookup uncertainty provided for
use in conjuction with vensim models. This example is largely based on
`Eker et al. (2014) <http://onlinelibrary.wiley.com/doi/10.1002/sdr.1518/suppinfo>`_
@author: sibeleker
@author: jhkwakkel
'''
from __future__ import (division, unicode_literals, print_function,
absolute_import)
import matplotlib.pyplot as plt
from ema_workbench.em_framework import Outcome, ModelEnsemble
from ema_workbench.util import ema_logging
from ema_workbench.connectors.vensim import (LookupUncertainty,
VensimModel)
from ema_workbench.analysis.plotting import lines
from ema_workbench.analysis.plotting_util import BOXPLOT
class Burnout(VensimModel):
model_file = r'\BURNOUT.vpm'
outcomes = [Outcome('Accomplishments to Date', time=True),
Outcome('Energy Level', time=True),
Outcome('Hours Worked Per Week', time=True),
Outcome('accomplishments per hour', time=True)]
def __init__(self, working_directory, name):
super(Burnout, self).__init__(working_directory, name)
self.uncertainties = [LookupUncertainty('hearne2',[(-1, 3), (-2, 1), (0, 0.9), (0.1, 1), (0.99, 1.01), (0.99, 1.01)],
"accomplishments per hour lookup", self, 0, 1),
LookupUncertainty('hearne2', [(-0.75, 0.75), (-0.75, 0.75), (0, 1.5), (0.1, 1.6), (-0.3, 1.5), (0.25, 2.5)],
"fractional change in expectations from perceived adequacy lookup", self, -1, 1),
LookupUncertainty('hearne2', [(-2, 2), (-1, 2), (0, 1.5), (0.1, 1.6), (0.5, 2), (0.5, 2)],
"effect of perceived adequacy on energy drain lookup", self, 0, 10),
LookupUncertainty('hearne2', [(-2, 2), (-1, 2), (0, 1.5), (0.1, 1.6), (0.5, 1.5), (0.1, 2)],
"effect of perceived adequacy of hours worked lookup", self, 0, 2.5),
LookupUncertainty('hearne2', [(-1, 1), (-1, 1), (0, 0.9), (0.1, 1), (0.5, 1.5), (1, 1.5)],
"effect of energy levels on hours worked lookup", self, 0, 1.5),
LookupUncertainty('hearne2', [(-1, 1), (-1, 1), (0, 0.9), (0.1, 1), (0.5, 1.5), (1, 1.5)],
"effect of high energy on further recovery lookup", self, 0, 1.25),
LookupUncertainty('hearne2', [(-2, 2), (-1, 1), (0, 100), (20, 120), (0.5, 1.5), (0.5, 2)],
"effect of hours worked on energy recovery lookup", self, 0, 1.5),
LookupUncertainty('approximation', [(-0.5, 0.35), (3, 5), (1, 10), (0.2, 0.4), (0, 120)],
"effect of hours worked on energy drain lookup", self, 0, 3),
LookupUncertainty('hearne1', [(0, 1), (0, 0.15), (1, 1.5), (0.75, 1.25)],
"effect of low energy on further depletion lookup", self, 0, 1)]
self._delete_lookup_uncertainties()
if __name__ == "__main__":
ema_logging.log_to_stderr(ema_logging.INFO)
model = Burnout(r'./models/burnout', "burnout")
ensemble = ModelEnsemble()
ensemble.model_structures = model
#run policy with old cases
results = ensemble.perform_experiments(100)
lines(results, 'Energy Level', density=BOXPLOT)
plt.show()
| [
"[email protected]"
] | |
0684fd9bf23ea4983d08f5962ec01b548213fbe2 | 870639af1487cf59b548f56c9cd1a45928c1e2c2 | /homeassistant/components/fronius/__init__.py | f605e57bac24ef8b38185962242913b82a2bc039 | [
"Apache-2.0"
] | permissive | atmurray/home-assistant | 9f050944d26c084f8f21e8612a7b90c0ae909763 | 133cb2c3b0e782f063c8a30de4ff55a5c14b9b03 | refs/heads/dev | 2023-03-19T04:26:40.743852 | 2021-11-27T05:58:25 | 2021-11-27T05:58:25 | 234,724,430 | 2 | 0 | Apache-2.0 | 2023-02-22T06:18:36 | 2020-01-18T11:27:02 | Python | UTF-8 | Python | false | false | 8,197 | py | """The Fronius integration."""
from __future__ import annotations
import asyncio
from collections.abc import Callable
import logging
from typing import TypeVar
from pyfronius import Fronius, FroniusError
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import ATTR_MODEL, ATTR_SW_VERSION, CONF_HOST
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers import device_registry as dr
from homeassistant.helpers.aiohttp_client import async_get_clientsession
from homeassistant.helpers.entity import DeviceInfo
from .const import DOMAIN, SOLAR_NET_ID_SYSTEM, FroniusDeviceInfo
from .coordinator import (
FroniusCoordinatorBase,
FroniusInverterUpdateCoordinator,
FroniusLoggerUpdateCoordinator,
FroniusMeterUpdateCoordinator,
FroniusPowerFlowUpdateCoordinator,
FroniusStorageUpdateCoordinator,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS: list[str] = ["sensor"]
FroniusCoordinatorType = TypeVar("FroniusCoordinatorType", bound=FroniusCoordinatorBase)
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up fronius from a config entry."""
host = entry.data[CONF_HOST]
fronius = Fronius(async_get_clientsession(hass), host)
solar_net = FroniusSolarNet(hass, entry, fronius)
await solar_net.init_devices()
hass.data.setdefault(DOMAIN, {})[entry.entry_id] = solar_net
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
# reload on config_entry update
entry.async_on_unload(entry.add_update_listener(async_update_entry))
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
solar_net = hass.data[DOMAIN].pop(entry.entry_id)
while solar_net.cleanup_callbacks:
solar_net.cleanup_callbacks.pop()()
return unload_ok
async def async_update_entry(hass: HomeAssistant, entry: ConfigEntry) -> None:
"""Update a given config entry."""
await hass.config_entries.async_reload(entry.entry_id)
class FroniusSolarNet:
"""The FroniusSolarNet class routes received values to sensor entities."""
def __init__(
self, hass: HomeAssistant, entry: ConfigEntry, fronius: Fronius
) -> None:
"""Initialize FroniusSolarNet class."""
self.hass = hass
self.cleanup_callbacks: list[Callable[[], None]] = []
self.config_entry = entry
self.coordinator_lock = asyncio.Lock()
self.fronius = fronius
self.host: str = entry.data[CONF_HOST]
# entry.unique_id is either logger uid or first inverter uid if no logger available
# prepended by "solar_net_" to have individual device for whole system (power_flow)
self.solar_net_device_id = f"solar_net_{entry.unique_id}"
self.system_device_info: DeviceInfo | None = None
self.inverter_coordinators: list[FroniusInverterUpdateCoordinator] = []
self.logger_coordinator: FroniusLoggerUpdateCoordinator | None = None
self.meter_coordinator: FroniusMeterUpdateCoordinator | None = None
self.power_flow_coordinator: FroniusPowerFlowUpdateCoordinator | None = None
self.storage_coordinator: FroniusStorageUpdateCoordinator | None = None
async def init_devices(self) -> None:
"""Initialize DataUpdateCoordinators for SolarNet devices."""
if self.config_entry.data["is_logger"]:
self.logger_coordinator = FroniusLoggerUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_logger_{self.host}",
)
await self.logger_coordinator.async_config_entry_first_refresh()
# _create_solar_net_device uses data from self.logger_coordinator when available
self.system_device_info = await self._create_solar_net_device()
_inverter_infos = await self._get_inverter_infos()
for inverter_info in _inverter_infos:
coordinator = FroniusInverterUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_inverter_{inverter_info.solar_net_id}_{self.host}",
inverter_info=inverter_info,
)
await coordinator.async_config_entry_first_refresh()
self.inverter_coordinators.append(coordinator)
self.meter_coordinator = await self._init_optional_coordinator(
FroniusMeterUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_meters_{self.host}",
)
)
self.power_flow_coordinator = await self._init_optional_coordinator(
FroniusPowerFlowUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_power_flow_{self.host}",
)
)
self.storage_coordinator = await self._init_optional_coordinator(
FroniusStorageUpdateCoordinator(
hass=self.hass,
solar_net=self,
logger=_LOGGER,
name=f"{DOMAIN}_storages_{self.host}",
)
)
async def _create_solar_net_device(self) -> DeviceInfo:
"""Create a device for the Fronius SolarNet system."""
solar_net_device: DeviceInfo = DeviceInfo(
configuration_url=self.fronius.url,
identifiers={(DOMAIN, self.solar_net_device_id)},
manufacturer="Fronius",
name="SolarNet",
)
if self.logger_coordinator:
_logger_info = self.logger_coordinator.data[SOLAR_NET_ID_SYSTEM]
solar_net_device[ATTR_MODEL] = _logger_info["product_type"]["value"]
solar_net_device[ATTR_SW_VERSION] = _logger_info["software_version"][
"value"
]
device_registry = await dr.async_get_registry(self.hass)
device_registry.async_get_or_create(
config_entry_id=self.config_entry.entry_id,
**solar_net_device,
)
return solar_net_device
async def _get_inverter_infos(self) -> list[FroniusDeviceInfo]:
"""Get information about the inverters in the SolarNet system."""
try:
_inverter_info = await self.fronius.inverter_info()
except FroniusError as err:
raise ConfigEntryNotReady from err
inverter_infos: list[FroniusDeviceInfo] = []
for inverter in _inverter_info["inverters"]:
solar_net_id = inverter["device_id"]["value"]
unique_id = inverter["unique_id"]["value"]
device_info = DeviceInfo(
identifiers={(DOMAIN, unique_id)},
manufacturer=inverter["device_type"].get("manufacturer", "Fronius"),
model=inverter["device_type"].get(
"model", inverter["device_type"]["value"]
),
name=inverter.get("custom_name", {}).get("value"),
via_device=(DOMAIN, self.solar_net_device_id),
)
inverter_infos.append(
FroniusDeviceInfo(
device_info=device_info,
solar_net_id=solar_net_id,
unique_id=unique_id,
)
)
return inverter_infos
@staticmethod
async def _init_optional_coordinator(
coordinator: FroniusCoordinatorType,
) -> FroniusCoordinatorType | None:
"""Initialize an update coordinator and return it if devices are found."""
try:
await coordinator.async_config_entry_first_refresh()
except ConfigEntryNotReady:
return None
# keep coordinator only if devices are found
# else ConfigEntryNotReady raised form KeyError
# in FroniusMeterUpdateCoordinator._get_fronius_device_data
return coordinator
| [
"[email protected]"
] | |
e3b04d3b778413a7f94a95da22d271e008cd9655 | 02a0fff2786e5c24eb3f5aa505475705bf460196 | /tests/test_openapi_schema.py | 914f84912e37c7b5fb5c2630232f6b138132a2e5 | [
"MIT"
] | permissive | ismohamedi/django-ninja | 5b63b0d45c0b22a36996e97f82fa289e91ccdffb | 774f7f2af536dbedf527376f1ed6724b2456a54f | refs/heads/master | 2023-03-12T22:30:16.991598 | 2021-02-19T14:55:30 | 2021-02-19T14:55:30 | 341,249,874 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,295 | py | from typing import List
from unittest.mock import patch
from ninja import NinjaAPI, Schema
from django.test import Client, override_settings
from copy import copy
api = NinjaAPI()
class Payload(Schema):
i: int
f: float
class Response(Schema):
i: int
f: float
@api.post("/test", response=Response)
def method(request, data: Payload):
return data.dict()
@api.post("/test_list", response=List[Response])
def method_list(request, data: List[Payload]):
return []
def test_schema_views(client: Client):
assert client.get("/api/").status_code == 404
assert client.get("/api/docs").status_code == 200
assert client.get("/api/openapi.json").status_code == 200
def test_schema_views_no_INSTALLED_APPS(client: Client):
"Making sure that cdn and included js works fine"
from django.conf import settings
# removing ninja from settings:
INSTALLED_APPS = [i for i in settings.INSTALLED_APPS if i != "ninja"]
@override_settings(INSTALLED_APPS=INSTALLED_APPS)
def call_docs():
assert client.get("/api/docs").status_code == 200
call_docs()
def test_schema():
schema = api.get_openapi_schema()
from pprint import pprint
# --------------------------------------------------------------
method = schema["paths"]["/api/test"]["post"]
assert method["requestBody"] == {
"content": {
"application/json": {"schema": {"$ref": "#/components/schemas/Payload"}}
},
"required": True,
}
assert method["responses"] == {
200: {
"content": {
"application/json": {
"schema": {"$ref": "#/components/schemas/Response"}
}
},
"description": "OK",
}
}
# --------------------------------------------------------------
method_list = schema["paths"]["/api/test_list"]["post"]
assert method_list["requestBody"] == {
"content": {
"application/json": {
"schema": {
"items": {"$ref": "#/components/schemas/Payload"},
"title": "Data",
"type": "array",
}
}
},
"required": True,
}
assert method_list["responses"] == {
200: {
"content": {
"application/json": {
"schema": {
"items": {"$ref": "#/components/schemas/Response"},
"title": "Response",
"type": "array",
}
}
},
"description": "OK",
}
}
assert schema["components"]["schemas"] == {
"Payload": {
"properties": {
"f": {"title": "F", "type": "number"},
"i": {"title": "I", "type": "integer"},
},
"required": ["i", "f"],
"title": "Payload",
"type": "object",
},
"Response": {
"properties": {
"f": {"title": "F", "type": "number"},
"i": {"title": "I", "type": "integer"},
},
"required": ["i", "f"],
"title": "Response",
"type": "object",
},
}
| [
"[email protected]"
] | |
88d46720752a93211916300c547dbdb1f076b09f | 0bb474290e13814c2498c086780da5096453da05 | /diverta2019/A/main.py | 9d653fcacf100b95d6cac66ff44625485aa86670 | [] | no_license | ddtkra/atcoder | 49b6205bf1bf6a50106b4ae94d2206a324f278e0 | eb57c144b5c2dbdd4abc432ecd8b1b3386244e30 | refs/heads/master | 2022-01-25T15:38:10.415959 | 2020-03-18T09:22:08 | 2020-03-18T09:22:08 | 208,825,724 | 1 | 0 | null | 2022-01-21T20:10:20 | 2019-09-16T14:51:01 | Python | UTF-8 | Python | false | false | 560 | py | #!.//usr/bin/env python3
import sys
def solve(N: int, K: int):
print(N-(K-1))
return
# Generated by 1.1.4 https://github.com/kyuridenamida/atcoder-tools (tips: You use the default template now. You can remove this line by using your custom template)
def main():
def iterate_tokens():
for line in sys.stdin:
for word in line.split():
yield word
tokens = iterate_tokens()
N = int(next(tokens)) # type: int
K = int(next(tokens)) # type: int
solve(N, K)
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
08787715e5c39a2256e078d7d66ba33fc723d3cb | b2b9cd537c4c6a216d9b1ee32008cc8e98552405 | /Oriented_crawler/simple_thread/needheader.py | b7e1afc46eb45b39712ffaf80679f9f2e63e5943 | [] | no_license | liyanfeng0127/python2_bdrw | ce982813645294b884d73cd2bbc4de5a33fa2cd5 | 52eba0d67d30ed5ce23e01dde69db35a8ed65787 | refs/heads/master | 2021-05-08T06:50:53.916210 | 2017-10-12T10:41:05 | 2017-10-12T10:41:05 | 106,676,637 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 960 | py | #-*—coding:utf8-*-
import requests
import re
#下面三行是编码转换的功能,大家现在不用关心。
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
#hea是我们自己构造的一个字典,里面保存了user-agent
#在页面中右键“审查元素”,刷新后“Network”,随便选择左边一个,然后选“Headers”,找'User-Agent'
hea = {'User-Agent':'Mozilla/5.0 (Windows NT 6.3; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/41.0.2272.118 Safari/537.36'}
# html = requests.get('http://jp.tingroom.com/yuedu/yd300p/')
html = requests.get('http://jp.tingroom.com/yuedu/yd300p/',headers = hea)
html.encoding = 'utf-8' #这一行是将编码转为utf-8否则中文会显示乱码。
# print html.text
# title = re.findall('color:#666666;">(.*?)</span>',html.text,re.S)
# for each in title:
# print each
#
chinese = re.findall('color: #039;">(.*?)</a>',html.text,re.S)
for each in chinese:
print each | [
"[email protected]"
] | |
c273bf8f309abad0d41156555a5b0f898dfb8ff1 | 6bf492920985e3741440ba53e1c7f8426b66ac1f | /snakemake_rules/rules/gatk/gatk_variant_indel_JEXL_filtration.smk | 1bcc6f56005ae9290a2133ced08430e36f8d7e21 | [
"MIT"
] | permissive | ukaraoz/snakemake-rules | 5b2ba7c9ec19d88b56067a46f66fd0c72e48c368 | 07e96afeb39307cdf35ecc8482dc1f8b62c120b9 | refs/heads/master | 2020-03-31T15:20:44.444006 | 2018-09-07T08:53:47 | 2018-09-07T08:53:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,512 | smk | # -*- snakemake -*-
include: 'gatk.settings.smk'
include: 'gatk_select_indel_variants.smk'
config_default = {'gatk' :{'variant_indel_JEXL_filtration' : _gatk_config_rule_default.copy()}}
config_default['gatk']['variant_indel_JEXL_filtration'].update(
{
'expressions':
["QD < 2.0", "ReadPosRankSum < -20.0", "FS > 200.0",
"SOR > 10.0"]
})
update_config(config_default, config)
config = config_default
rule gatk_variant_indel_JEXL_filtration:
"""Run GATK VariantFiltration
Perform hard filtering using JEXL expressions
"""
wildcard_constraints:
suffix = "(.vcf|.vcf.gz)"
params: cmd = config['gatk']['cmd'] + " -T " + VARIANT_FILTRATION,
options = " ".join([
" ".join(["--filterName GATKStandard{e} --filterExpression \"{exp}\"".format(e=exp.split()[0], exp=exp) \
for exp in config['gatk']['variant_indel_JEXL_filtration']['expressions']])
]),
quote = "" if config['gatk']['cmd'].startswith("gatk") else "",
runtime = config['gatk']['variant_indel_JEXL_filtration']['runtime']
input: vcf = "{prefix}{suffix}", ref = config['gatk']['variant_indel_JEXL_filtration']['ref']
output: vcf = "{prefix}.filteredINDEL{suffix}"
threads: config['gatk']['variant_indel_JEXL_filtration']['threads']
conda: "env.yaml"
shell: "{params.cmd} {params.quote}{params.options}{params.quote} -R {input.ref} --variant {input.vcf} --out {output.vcf}"
| [
"[email protected]"
] | |
fb291cab56eb80122ba5cbe060bfae906dc43ad0 | 6baac3bec174cbb3542d844b34cec3870633e7bf | /red_blue.py | f1ecc21136b6b888b03b5d416a9b4635190f2a47 | [] | no_license | tyday/RedBlue | abcbdbe65fe13ae468cffaf7d9b1b5b1aba34a0f | 4e54d3e4ca67981cc32ba659ec8f0b5aed93b291 | refs/heads/main | 2023-01-07T19:26:40.147228 | 2020-11-07T01:32:50 | 2020-11-07T01:32:50 | 310,451,484 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,067 | py | # a program to play red vs blue
# It's played on a board. Each side gets to choose an open
# square per side. Selecting a square turns each adjacent square
# your color. The game ends when all squares are filled.
# the winner is the side with the most squares
# created from this idea
# https://red-blue.herokuapp.com/
# https://github.com/akulchhillar/Red-Blue
# https://www.reddit.com/r/flask/comments/jkdxh0/hi_all_here_is_a_board_game_for_two_players_made/
# Flow:
# Display Status
# Wait for turn
# Evaluate/Update Board
# Check for win
# Save Game
from random import choice
from enum import Enum
class Cell_Value(Enum):
RED = -1
BLUE = 1
NEUTRAL = 0
class Red_Blue():
def __init__(self, width = 8, height = 8,
display_status = True,
red_player = 'human',
blue_player = 'human'):
player_type = {
'human': self.get_human_move,
'random': self.get_random_computer_move
}
self.board_width = 8
self.board = [Cell_Value.NEUTRAL] * width * height
self.game_over = False
self.player = {
Cell_Value.BLUE: player_type[blue_player],
Cell_Value.RED: player_type[red_player]
}
# self.red_player = red_player
# self.blue_player = blue_player
self.display_status = display_status
self.history = []
self.turn = Cell_Value.RED
def get_available_moves(self):
available_moves = []
for i in range(len(self.board)):
if self.board[i].name == 'NEUTRAL':
available_moves.append(i)
return available_moves
def get_random_move(self):
available_moves = self.get_available_moves()
random_selection = choice(available_moves)
return random_selection
def check_if_move_is_valid(self,move):
if int(move) in self.get_available_moves():
return True
return False
def get_adjacent_cells(self, cell):
adjacent_cells = []
# Above
if cell - self.board_width >= 0:
adjacent_cells.append(cell-self.board_width)
# Below
if cell + self.board_width < len(self.board):
adjacent_cells.append(cell+self.board_width)
# Left
if cell % self.board_width != 0:
adjacent_cells.append(cell-1)
# Right
if (cell+1)%self.board_width !=0:
adjacent_cells.append(cell+1)
return adjacent_cells
def get_center_cell(self, data):
# used if we only get the data of cells flipped
# but not selected cell
for cell in data:
cell_adjacents = self.get_adjacent_cells(cell)
cell_overlap = [c for c in cell_adjacents if c in data]
if len(cell_overlap) > 1:
return cell
return None
def player_action(self):
self.player[self.turn]()
def get_human_move(self):
move = None
while move not in self.get_available_moves():
move = input("Select cell: ")
if move == 'q':
break
try:
move = int(move)
except:
print('Selection must be an integer.')
if move not in self.get_available_moves():
print('Invalid move... please select again')
self.move(move)
def get_random_computer_move(self):
move = self.get_random_move()
self.move(move)
def change_player(self):
if self.turn == Cell_Value.RED:
self.turn = Cell_Value.BLUE
else:
self.turn = Cell_Value.RED
def show_status(self):
r,b = self.red_blue_score()
print(f'Turn: {self.turn.name} -- Score RED: {r}, BLUE: {b}')
def show_board(self):
row = ''
for i, cell in enumerate(self.board):
if cell == Cell_Value.RED:
row += ' RR '
elif cell == Cell_Value.BLUE:
row += ' BB '
else:
row += f' {i:2d} '
if (i+1) % self.board_width == 0:
# we've reached the end of the row.
print(row)
print()
row = ''
def move(self, move):
all_cells = self.get_adjacent_cells(move)
all_cells.append(move)
for cell in all_cells:
self.board[cell] = self.turn
board = [cell.value for cell in self.board]
self.history.append([self.turn.value, move, board])
def red_blue_score(self):
red = len([cell for cell in self.board if cell.name == Cell_Value.RED.name])
blue = len([cell for cell in self.board if cell.name == Cell_Value.BLUE.name])
return red, blue
def check_game_status(self):
if len(self.get_available_moves()) == 0:
# game is over,
# so append the winner of the game to the game history
self.game_over = True
print('Winner!!!')
red_cells = [cell for cell in self.board if cell.name == Cell_Value.RED.name]
blue_cells = [cell for cell in self.board if cell.name == Cell_Value.BLUE.name]
winner = 0
if len(red_cells) > len(blue_cells):
winner = Cell_Value.RED.value
elif len(blue_cells) > len(red_cells):
winner = Cell_Value.BLUE.value
else:
winner = Cell_Value.NEUTRAL.value
for item in self.history:
item.insert(0,winner)
def play_game(self):
while self.game_over is False:
if self.display_status:
self.show_status()
self.show_board()
self.player_action()
self.change_player()
self.check_game_status()
print(self.history)
print(self.show_status())
if __name__=='__main__':
game = Red_Blue(display_status=False, red_player='random', blue_player='random')
game.play_game()
| [
"[email protected]"
] | |
c8a08a597f157e4b39c207a5d7d93895a325beea | d93fe0484fc3b32c8fd9b33cc66cfd636a148ec4 | /AtCoder/ARC-C/004probC.py | 2dc3017bd13cc5dfe8e5d05bb87bb841727be9e2 | [] | no_license | wattaihei/ProgrammingContest | 0d34f42f60fa6693e04c933c978527ffaddceda7 | c26de8d42790651aaee56df0956e0b206d1cceb4 | refs/heads/master | 2023-04-22T19:43:43.394907 | 2021-05-02T13:05:21 | 2021-05-02T13:05:21 | 264,400,706 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 435 | py | import sys
input = sys.stdin.readline
from fractions import gcd
X, Y = map(int, input().split("/"))
g = gcd(X, Y)
x = X//g
y = Y//g
n_min = (2*x-y)//(y**2)
n_max = (2*x+y)//(y**2) + 1
ans = []
for n in range(n_min, n_max+1):
if n <= 0: continue
N = y*n
M = N*(N+1)//2 - x*n
if 0 < M <= N:
ans.append((N, M))
if not ans:
print("Impossible")
else:
print("\n".join([str(a)+" "+str(b) for a, b in ans])) | [
"[email protected]"
] | |
97db4c0934c93d276c97cbd3a46d2f0cca960220 | b144c5142226de4e6254e0044a1ca0fcd4c8bbc6 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocols/matchfields_39df33112bb1cfa56367ea58e168f287.py | b28dfcdf950a34519daeb7518998011e1a36711d | [
"MIT"
] | permissive | iwanb/ixnetwork_restpy | fa8b885ea7a4179048ef2636c37ef7d3f6692e31 | c2cb68fee9f2cc2f86660760e9e07bd06c0013c2 | refs/heads/master | 2021-01-02T17:27:37.096268 | 2020-02-11T09:28:15 | 2020-02-11T09:28:15 | 239,721,780 | 0 | 0 | NOASSERTION | 2020-02-11T09:20:22 | 2020-02-11T09:20:21 | null | UTF-8 | Python | false | false | 15,566 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class MatchFields(Base):
"""NOT DEFINED
The MatchFields class encapsulates a required matchFields resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'matchFields'
def __init__(self, parent):
super(MatchFields, self).__init__(parent)
@property
def ArpDestinationIpv4Address(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('arpDestinationIpv4Address')
@ArpDestinationIpv4Address.setter
def ArpDestinationIpv4Address(self, value):
self._set_attribute('arpDestinationIpv4Address', value)
@property
def ArpOpcode(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('arpOpcode')
@ArpOpcode.setter
def ArpOpcode(self, value):
self._set_attribute('arpOpcode', value)
@property
def ArpSourceHardwareAddress(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('arpSourceHardwareAddress')
@ArpSourceHardwareAddress.setter
def ArpSourceHardwareAddress(self, value):
self._set_attribute('arpSourceHardwareAddress', value)
@property
def ArpSourceIpv4Address(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('arpSourceIpv4Address')
@ArpSourceIpv4Address.setter
def ArpSourceIpv4Address(self, value):
self._set_attribute('arpSourceIpv4Address', value)
@property
def ArpTargetHardwareAddress(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('arpTargetHardwareAddress')
@ArpTargetHardwareAddress.setter
def ArpTargetHardwareAddress(self, value):
self._set_attribute('arpTargetHardwareAddress', value)
@property
def EthernetDestination(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ethernetDestination')
@EthernetDestination.setter
def EthernetDestination(self, value):
self._set_attribute('ethernetDestination', value)
@property
def EthernetSource(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ethernetSource')
@EthernetSource.setter
def EthernetSource(self, value):
self._set_attribute('ethernetSource', value)
@property
def EthernetType(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ethernetType')
@EthernetType.setter
def EthernetType(self, value):
self._set_attribute('ethernetType', value)
@property
def Experimenter(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('experimenter')
@Experimenter.setter
def Experimenter(self, value):
self._set_attribute('experimenter', value)
@property
def IcmpCode(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('icmpCode')
@IcmpCode.setter
def IcmpCode(self, value):
self._set_attribute('icmpCode', value)
@property
def IcmpType(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('icmpType')
@IcmpType.setter
def IcmpType(self, value):
self._set_attribute('icmpType', value)
@property
def Icmpv6Code(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('icmpv6Code')
@Icmpv6Code.setter
def Icmpv6Code(self, value):
self._set_attribute('icmpv6Code', value)
@property
def Icmpv6Type(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('icmpv6Type')
@Icmpv6Type.setter
def Icmpv6Type(self, value):
self._set_attribute('icmpv6Type', value)
@property
def InPort(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('inPort')
@InPort.setter
def InPort(self, value):
self._set_attribute('inPort', value)
@property
def IpDscp(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipDscp')
@IpDscp.setter
def IpDscp(self, value):
self._set_attribute('ipDscp', value)
@property
def IpEcn(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipEcn')
@IpEcn.setter
def IpEcn(self, value):
self._set_attribute('ipEcn', value)
@property
def IpProtocol(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipProtocol')
@IpProtocol.setter
def IpProtocol(self, value):
self._set_attribute('ipProtocol', value)
@property
def Ipv4Destination(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv4Destination')
@Ipv4Destination.setter
def Ipv4Destination(self, value):
self._set_attribute('ipv4Destination', value)
@property
def Ipv4Source(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv4Source')
@Ipv4Source.setter
def Ipv4Source(self, value):
self._set_attribute('ipv4Source', value)
@property
def Ipv6Destination(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv6Destination')
@Ipv6Destination.setter
def Ipv6Destination(self, value):
self._set_attribute('ipv6Destination', value)
@property
def Ipv6ExtHeader(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv6ExtHeader')
@Ipv6ExtHeader.setter
def Ipv6ExtHeader(self, value):
self._set_attribute('ipv6ExtHeader', value)
@property
def Ipv6FlowLabel(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv6FlowLabel')
@Ipv6FlowLabel.setter
def Ipv6FlowLabel(self, value):
self._set_attribute('ipv6FlowLabel', value)
@property
def Ipv6NdSll(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv6NdSll')
@Ipv6NdSll.setter
def Ipv6NdSll(self, value):
self._set_attribute('ipv6NdSll', value)
@property
def Ipv6NdTarget(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv6NdTarget')
@Ipv6NdTarget.setter
def Ipv6NdTarget(self, value):
self._set_attribute('ipv6NdTarget', value)
@property
def Ipv6NdTll(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv6NdTll')
@Ipv6NdTll.setter
def Ipv6NdTll(self, value):
self._set_attribute('ipv6NdTll', value)
@property
def Ipv6Source(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('ipv6Source')
@Ipv6Source.setter
def Ipv6Source(self, value):
self._set_attribute('ipv6Source', value)
@property
def Metadata(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('metadata')
@Metadata.setter
def Metadata(self, value):
self._set_attribute('metadata', value)
@property
def MplsBos(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('mplsBos')
@MplsBos.setter
def MplsBos(self, value):
self._set_attribute('mplsBos', value)
@property
def MplsLabel(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('mplsLabel')
@MplsLabel.setter
def MplsLabel(self, value):
self._set_attribute('mplsLabel', value)
@property
def MplsTc(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('mplsTc')
@MplsTc.setter
def MplsTc(self, value):
self._set_attribute('mplsTc', value)
@property
def PbbIsid(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('pbbIsid')
@PbbIsid.setter
def PbbIsid(self, value):
self._set_attribute('pbbIsid', value)
@property
def PhysicalInPort(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('physicalInPort')
@PhysicalInPort.setter
def PhysicalInPort(self, value):
self._set_attribute('physicalInPort', value)
@property
def SctpDestination(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('sctpDestination')
@SctpDestination.setter
def SctpDestination(self, value):
self._set_attribute('sctpDestination', value)
@property
def SctpSource(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('sctpSource')
@SctpSource.setter
def SctpSource(self, value):
self._set_attribute('sctpSource', value)
@property
def TcpDestination(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('tcpDestination')
@TcpDestination.setter
def TcpDestination(self, value):
self._set_attribute('tcpDestination', value)
@property
def TcpSource(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('tcpSource')
@TcpSource.setter
def TcpSource(self, value):
self._set_attribute('tcpSource', value)
@property
def TunnelId(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('tunnelId')
@TunnelId.setter
def TunnelId(self, value):
self._set_attribute('tunnelId', value)
@property
def UdpDestination(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('udpDestination')
@UdpDestination.setter
def UdpDestination(self, value):
self._set_attribute('udpDestination', value)
@property
def UdpSource(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('udpSource')
@UdpSource.setter
def UdpSource(self, value):
self._set_attribute('udpSource', value)
@property
def VlanId(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('vlanId')
@VlanId.setter
def VlanId(self, value):
self._set_attribute('vlanId', value)
@property
def VlanPriority(self):
"""NOT DEFINED
Returns:
bool
"""
return self._get_attribute('vlanPriority')
@VlanPriority.setter
def VlanPriority(self, value):
self._set_attribute('vlanPriority', value)
def update(self, ArpDestinationIpv4Address=None, ArpOpcode=None, ArpSourceHardwareAddress=None, ArpSourceIpv4Address=None, ArpTargetHardwareAddress=None, EthernetDestination=None, EthernetSource=None, EthernetType=None, Experimenter=None, IcmpCode=None, IcmpType=None, Icmpv6Code=None, Icmpv6Type=None, InPort=None, IpDscp=None, IpEcn=None, IpProtocol=None, Ipv4Destination=None, Ipv4Source=None, Ipv6Destination=None, Ipv6ExtHeader=None, Ipv6FlowLabel=None, Ipv6NdSll=None, Ipv6NdTarget=None, Ipv6NdTll=None, Ipv6Source=None, Metadata=None, MplsBos=None, MplsLabel=None, MplsTc=None, PbbIsid=None, PhysicalInPort=None, SctpDestination=None, SctpSource=None, TcpDestination=None, TcpSource=None, TunnelId=None, UdpDestination=None, UdpSource=None, VlanId=None, VlanPriority=None):
"""Updates a child instance of matchFields on the server.
Args:
ArpDestinationIpv4Address (bool): NOT DEFINED
ArpOpcode (bool): NOT DEFINED
ArpSourceHardwareAddress (bool): NOT DEFINED
ArpSourceIpv4Address (bool): NOT DEFINED
ArpTargetHardwareAddress (bool): NOT DEFINED
EthernetDestination (bool): NOT DEFINED
EthernetSource (bool): NOT DEFINED
EthernetType (bool): NOT DEFINED
Experimenter (bool): NOT DEFINED
IcmpCode (bool): NOT DEFINED
IcmpType (bool): NOT DEFINED
Icmpv6Code (bool): NOT DEFINED
Icmpv6Type (bool): NOT DEFINED
InPort (bool): NOT DEFINED
IpDscp (bool): NOT DEFINED
IpEcn (bool): NOT DEFINED
IpProtocol (bool): NOT DEFINED
Ipv4Destination (bool): NOT DEFINED
Ipv4Source (bool): NOT DEFINED
Ipv6Destination (bool): NOT DEFINED
Ipv6ExtHeader (bool): NOT DEFINED
Ipv6FlowLabel (bool): NOT DEFINED
Ipv6NdSll (bool): NOT DEFINED
Ipv6NdTarget (bool): NOT DEFINED
Ipv6NdTll (bool): NOT DEFINED
Ipv6Source (bool): NOT DEFINED
Metadata (bool): NOT DEFINED
MplsBos (bool): NOT DEFINED
MplsLabel (bool): NOT DEFINED
MplsTc (bool): NOT DEFINED
PbbIsid (bool): NOT DEFINED
PhysicalInPort (bool): NOT DEFINED
SctpDestination (bool): NOT DEFINED
SctpSource (bool): NOT DEFINED
TcpDestination (bool): NOT DEFINED
TcpSource (bool): NOT DEFINED
TunnelId (bool): NOT DEFINED
UdpDestination (bool): NOT DEFINED
UdpSource (bool): NOT DEFINED
VlanId (bool): NOT DEFINED
VlanPriority (bool): NOT DEFINED
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
| [
"[email protected]"
] | |
16fe148351f93eee112d2d7bab5ba1c951af710b | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /ncLp4ZXvz4x4oEHYh_13.py | 08497f878c8c64402f937ecba818958c517619e3 | [] | no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 674 | py | """
Given two unique integer lists `a` and `b`, and an integer target value `v`,
create a function to determine whether there is a pair of numbers that add up
to the target value `v`, where one number comes from one list `a` and the
other comes from the second list `b`.
Return `True` if there is a pair that adds up to the target value and `False`
otherwise.
### Examples
sum_of_two([1, 2], [4, 5, 6], 5) ➞ True
sum_of_two([1, 2], [4, 5, 6], 8) ➞ True
sum_of_two([1, 2], [4, 5, 6], 3) ➞ False
sum_of_two([1, 2], [4, 5, 6], 9) ➞ False
### Notes
N/A
"""
def sum_of_two(a, b, v):
return any([i+j==v for i in a for j in b])
| [
"[email protected]"
] | |
3986b9709cf9dcf81efc3c9876fd534eb9896f84 | bdda6f43c4c72f4819208f6f5e0480e570a5d09c | /fintech_24659/settings.py | 694d25428e56f459066372422eb99ca7150a2f9c | [] | no_license | crowdbotics-apps/fintech-24659 | b5fcbdee03f47f297678201a6e82dcc4b4cd06bb | 4a384d5e4eaf0621680417e8b7f0246e317b35db | refs/heads/master | 2023-03-04T10:23:50.654065 | 2021-02-22T13:19:15 | 2021-02-22T13:19:15 | 341,206,661 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,772 | py | """
Django settings for fintech_24659 project.
Generated by 'django-admin startproject' using Django 2.2.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.2/ref/settings/
"""
import os
import environ
import logging
env = environ.Env()
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = env.bool("DEBUG", default=False)
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = env.str("SECRET_KEY")
ALLOWED_HOSTS = env.list("HOST", default=["*"])
SITE_ID = 1
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("SECURE_REDIRECT", default=False)
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites'
]
LOCAL_APPS = [
'home',
'modules',
'users.apps.UsersConfig',
]
THIRD_PARTY_APPS = [
'rest_framework',
'rest_framework.authtoken',
'rest_auth',
'rest_auth.registration',
'bootstrap4',
'allauth',
'allauth.account',
'allauth.socialaccount',
'allauth.socialaccount.providers.google',
'django_extensions',
'drf_yasg',
'storages',
]
INSTALLED_APPS += LOCAL_APPS + THIRD_PARTY_APPS
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'fintech_24659.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'fintech_24659.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
if env.str("DATABASE_URL", default=None):
DATABASES = {
'default': env.db()
}
# Password validation
# https://docs.djangoproject.com/en/2.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.2/howto/static-files/
STATIC_URL = '/static/'
MIDDLEWARE += ['whitenoise.middleware.WhiteNoiseMiddleware']
AUTHENTICATION_BACKENDS = (
'django.contrib.auth.backends.ModelBackend',
'allauth.account.auth_backends.AuthenticationBackend'
)
STATIC_ROOT = os.path.join(BASE_DIR, "staticfiles")
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# allauth / users
ACCOUNT_EMAIL_REQUIRED = True
ACCOUNT_AUTHENTICATION_METHOD = 'email'
ACCOUNT_USERNAME_REQUIRED = False
ACCOUNT_EMAIL_VERIFICATION = "optional"
ACCOUNT_CONFIRM_EMAIL_ON_GET = True
ACCOUNT_LOGIN_ON_EMAIL_CONFIRMATION = True
ACCOUNT_UNIQUE_EMAIL = True
LOGIN_REDIRECT_URL = "users:redirect"
ACCOUNT_ADAPTER = "users.adapters.AccountAdapter"
SOCIALACCOUNT_ADAPTER = "users.adapters.SocialAccountAdapter"
ACCOUNT_ALLOW_REGISTRATION = env.bool("ACCOUNT_ALLOW_REGISTRATION", True)
SOCIALACCOUNT_ALLOW_REGISTRATION = env.bool("SOCIALACCOUNT_ALLOW_REGISTRATION", True)
REST_AUTH_SERIALIZERS = {
# Replace password reset serializer to fix 500 error
"PASSWORD_RESET_SERIALIZER": "home.api.v1.serializers.PasswordSerializer",
}
REST_AUTH_REGISTER_SERIALIZERS = {
# Use custom serializer that has no username and matches web signup
"REGISTER_SERIALIZER": "home.api.v1.serializers.SignupSerializer",
}
# Custom user model
AUTH_USER_MODEL = "users.User"
EMAIL_HOST = env.str("EMAIL_HOST", "smtp.sendgrid.net")
EMAIL_HOST_USER = env.str("SENDGRID_USERNAME", "")
EMAIL_HOST_PASSWORD = env.str("SENDGRID_PASSWORD", "")
EMAIL_PORT = 587
EMAIL_USE_TLS = True
# AWS S3 config
AWS_ACCESS_KEY_ID = env.str("AWS_ACCESS_KEY_ID", "")
AWS_SECRET_ACCESS_KEY = env.str("AWS_SECRET_ACCESS_KEY", "")
AWS_STORAGE_BUCKET_NAME = env.str("AWS_STORAGE_BUCKET_NAME", "")
AWS_STORAGE_REGION = env.str("AWS_STORAGE_REGION", "")
USE_S3 = (
AWS_ACCESS_KEY_ID and
AWS_SECRET_ACCESS_KEY and
AWS_STORAGE_BUCKET_NAME and
AWS_STORAGE_REGION
)
if USE_S3:
AWS_S3_CUSTOM_DOMAIN = env.str("AWS_S3_CUSTOM_DOMAIN", "")
AWS_S3_OBJECT_PARAMETERS = {"CacheControl": "max-age=86400"}
AWS_DEFAULT_ACL = env.str("AWS_DEFAULT_ACL", "public-read")
AWS_MEDIA_LOCATION = env.str("AWS_MEDIA_LOCATION", "media")
AWS_AUTO_CREATE_BUCKET = env.bool("AWS_AUTO_CREATE_BUCKET", True)
DEFAULT_FILE_STORAGE = env.str(
"DEFAULT_FILE_STORAGE", "home.storage_backends.MediaStorage"
)
MEDIA_URL = '/mediafiles/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'mediafiles')
# Swagger settings for api docs
SWAGGER_SETTINGS = {
"DEFAULT_INFO": f"{ROOT_URLCONF}.api_info",
}
if DEBUG or not (EMAIL_HOST_USER and EMAIL_HOST_PASSWORD):
# output email to console instead of sending
if not DEBUG:
logging.warning("You should setup `SENDGRID_USERNAME` and `SENDGRID_PASSWORD` env vars to send emails.")
EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
| [
"[email protected]"
] | |
21326e3552bdb8a7d4600a442d82b4a5930e105e | 5dcdc5720f59280e6e22e78534dc565e9b299048 | /数学/Angles of a Clock.py | 6883e79b5bbaa586ecf7db879518fbfeaa58e64d | [] | no_license | Stella2019/10-24 | 4fc944ba397d8f9494f4fc7ceead3065b4572a55 | d62572831235e8e608f259aa7b3608ae6752a64a | refs/heads/main | 2023-01-01T12:46:06.021044 | 2020-10-24T20:53:12 | 2020-10-24T20:53:12 | 306,967,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 232 | py | def calcAngle(h, m):
hour_angle = (360 / (12 * 60.0)) * (h * 60 + m)
min_angle = 360 / 60.0 * m
angle = abs(hour_angle - min_angle)
return min(angle, 360 - angle)
print(calcAngle(3, 15))
# 7.50
print(calcAngle(3, 00))
# 90 | [
"[email protected]"
] | |
bbc4ce153225199aee5e7a81923cc97c398f5eec | d47b841f7e64d83cebbe63a25bac47adc495a760 | /cfbd/models/scoreboard_game.py | 5454322ed36376d7222b319f2c6aa0cf161cf8c2 | [] | no_license | CiscoNeville/cfbd-python | 810029240de30a2b7a205cbc3bb009599481206c | 5775ff7ce7464e881f1940a7c0a534b0c26c1ce8 | refs/heads/master | 2023-09-04T18:27:23.773119 | 2021-11-19T01:49:07 | 2021-11-19T01:49:07 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,431 | py | # coding: utf-8
"""
College Football Data API
This is an API for accessing all sorts of college football data. Please note that API keys should be supplied with \"Bearer \" prepended (e.g. \"Bearer your_key\"). API keys can be acquired from the CollegeFootballData.com website. # noqa: E501
OpenAPI spec version: 4.2.2
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
from cfbd.configuration import Configuration
class ScoreboardGame(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'id': 'int',
'start_date': 'str',
'tv': 'str',
'neutral_site': 'bool',
'conference_game': 'bool',
'status': 'str',
'period': 'int',
'clock': 'str',
'venue': 'object',
'home_team': 'object',
'away_team': 'object',
'weather': 'object',
'betting': 'object'
}
attribute_map = {
'id': 'id',
'start_date': 'startDate',
'tv': 'tv',
'neutral_site': 'neutralSite',
'conference_game': 'conferenceGame',
'status': 'status',
'period': 'period',
'clock': 'clock',
'venue': 'venue',
'home_team': 'homeTeam',
'away_team': 'awayTeam',
'weather': 'weather',
'betting': 'betting'
}
def __init__(self, id=None, start_date=None, tv=None, neutral_site=None, conference_game=None, status=None, period=None, clock=None, venue=None, home_team=None, away_team=None, weather=None, betting=None, _configuration=None): # noqa: E501
"""ScoreboardGame - a model defined in Swagger""" # noqa: E501
if _configuration is None:
_configuration = Configuration()
self._configuration = _configuration
self._id = None
self._start_date = None
self._tv = None
self._neutral_site = None
self._conference_game = None
self._status = None
self._period = None
self._clock = None
self._venue = None
self._home_team = None
self._away_team = None
self._weather = None
self._betting = None
self.discriminator = None
if id is not None:
self.id = id
if start_date is not None:
self.start_date = start_date
if tv is not None:
self.tv = tv
if neutral_site is not None:
self.neutral_site = neutral_site
if conference_game is not None:
self.conference_game = conference_game
if status is not None:
self.status = status
if period is not None:
self.period = period
if clock is not None:
self.clock = clock
if venue is not None:
self.venue = venue
if home_team is not None:
self.home_team = home_team
if away_team is not None:
self.away_team = away_team
if weather is not None:
self.weather = weather
if betting is not None:
self.betting = betting
@property
def id(self):
"""Gets the id of this ScoreboardGame. # noqa: E501
:return: The id of this ScoreboardGame. # noqa: E501
:rtype: int
"""
return self._id
@id.setter
def id(self, id):
"""Sets the id of this ScoreboardGame.
:param id: The id of this ScoreboardGame. # noqa: E501
:type: int
"""
self._id = id
@property
def start_date(self):
"""Gets the start_date of this ScoreboardGame. # noqa: E501
:return: The start_date of this ScoreboardGame. # noqa: E501
:rtype: str
"""
return self._start_date
@start_date.setter
def start_date(self, start_date):
"""Sets the start_date of this ScoreboardGame.
:param start_date: The start_date of this ScoreboardGame. # noqa: E501
:type: str
"""
self._start_date = start_date
@property
def tv(self):
"""Gets the tv of this ScoreboardGame. # noqa: E501
:return: The tv of this ScoreboardGame. # noqa: E501
:rtype: str
"""
return self._tv
@tv.setter
def tv(self, tv):
"""Sets the tv of this ScoreboardGame.
:param tv: The tv of this ScoreboardGame. # noqa: E501
:type: str
"""
self._tv = tv
@property
def neutral_site(self):
"""Gets the neutral_site of this ScoreboardGame. # noqa: E501
:return: The neutral_site of this ScoreboardGame. # noqa: E501
:rtype: bool
"""
return self._neutral_site
@neutral_site.setter
def neutral_site(self, neutral_site):
"""Sets the neutral_site of this ScoreboardGame.
:param neutral_site: The neutral_site of this ScoreboardGame. # noqa: E501
:type: bool
"""
self._neutral_site = neutral_site
@property
def conference_game(self):
"""Gets the conference_game of this ScoreboardGame. # noqa: E501
:return: The conference_game of this ScoreboardGame. # noqa: E501
:rtype: bool
"""
return self._conference_game
@conference_game.setter
def conference_game(self, conference_game):
"""Sets the conference_game of this ScoreboardGame.
:param conference_game: The conference_game of this ScoreboardGame. # noqa: E501
:type: bool
"""
self._conference_game = conference_game
@property
def status(self):
"""Gets the status of this ScoreboardGame. # noqa: E501
:return: The status of this ScoreboardGame. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this ScoreboardGame.
:param status: The status of this ScoreboardGame. # noqa: E501
:type: str
"""
self._status = status
@property
def period(self):
"""Gets the period of this ScoreboardGame. # noqa: E501
:return: The period of this ScoreboardGame. # noqa: E501
:rtype: int
"""
return self._period
@period.setter
def period(self, period):
"""Sets the period of this ScoreboardGame.
:param period: The period of this ScoreboardGame. # noqa: E501
:type: int
"""
self._period = period
@property
def clock(self):
"""Gets the clock of this ScoreboardGame. # noqa: E501
:return: The clock of this ScoreboardGame. # noqa: E501
:rtype: str
"""
return self._clock
@clock.setter
def clock(self, clock):
"""Sets the clock of this ScoreboardGame.
:param clock: The clock of this ScoreboardGame. # noqa: E501
:type: str
"""
self._clock = clock
@property
def venue(self):
"""Gets the venue of this ScoreboardGame. # noqa: E501
:return: The venue of this ScoreboardGame. # noqa: E501
:rtype: object
"""
return self._venue
@venue.setter
def venue(self, venue):
"""Sets the venue of this ScoreboardGame.
:param venue: The venue of this ScoreboardGame. # noqa: E501
:type: object
"""
self._venue = venue
@property
def home_team(self):
"""Gets the home_team of this ScoreboardGame. # noqa: E501
:return: The home_team of this ScoreboardGame. # noqa: E501
:rtype: object
"""
return self._home_team
@home_team.setter
def home_team(self, home_team):
"""Sets the home_team of this ScoreboardGame.
:param home_team: The home_team of this ScoreboardGame. # noqa: E501
:type: object
"""
self._home_team = home_team
@property
def away_team(self):
"""Gets the away_team of this ScoreboardGame. # noqa: E501
:return: The away_team of this ScoreboardGame. # noqa: E501
:rtype: object
"""
return self._away_team
@away_team.setter
def away_team(self, away_team):
"""Sets the away_team of this ScoreboardGame.
:param away_team: The away_team of this ScoreboardGame. # noqa: E501
:type: object
"""
self._away_team = away_team
@property
def weather(self):
"""Gets the weather of this ScoreboardGame. # noqa: E501
:return: The weather of this ScoreboardGame. # noqa: E501
:rtype: object
"""
return self._weather
@weather.setter
def weather(self, weather):
"""Sets the weather of this ScoreboardGame.
:param weather: The weather of this ScoreboardGame. # noqa: E501
:type: object
"""
self._weather = weather
@property
def betting(self):
"""Gets the betting of this ScoreboardGame. # noqa: E501
:return: The betting of this ScoreboardGame. # noqa: E501
:rtype: object
"""
return self._betting
@betting.setter
def betting(self, betting):
"""Sets the betting of this ScoreboardGame.
:param betting: The betting of this ScoreboardGame. # noqa: E501
:type: object
"""
self._betting = betting
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(ScoreboardGame, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ScoreboardGame):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, ScoreboardGame):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
] | |
b82cce8fe7e0e430c2cb38d742aab8c0b680be5f | a46d135ba8fd7bd40f0b7d7a96c72be446025719 | /packages/python/plotly/plotly/validators/bar/_yperiod0.py | 225b424a1703b373d93e4772e32dd4b730f629cb | [
"MIT"
] | permissive | hugovk/plotly.py | 5e763fe96f225d964c4fcd1dea79dbefa50b4692 | cfad7862594b35965c0e000813bd7805e8494a5b | refs/heads/master | 2022-05-10T12:17:38.797994 | 2021-12-21T03:49:19 | 2021-12-21T03:49:19 | 234,146,634 | 0 | 0 | MIT | 2020-01-15T18:33:43 | 2020-01-15T18:33:41 | null | UTF-8 | Python | false | false | 392 | py | import _plotly_utils.basevalidators
class Yperiod0Validator(_plotly_utils.basevalidators.AnyValidator):
def __init__(self, plotly_name="yperiod0", parent_name="bar", **kwargs):
super(Yperiod0Validator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
**kwargs
)
| [
"[email protected]"
] | |
082d4dfee16d6f707373ba16b2d564dda78ec509 | d61a1050874abc9883d4787e4fd52c47dada55cb | /faster_rcnn_mxnet/faster_rcnn/core/tester.py | fa9ef4f6d61d87c6a8f85fa6f973ae31a2c3d149 | [] | no_license | larsoncs/focal-loss | 84d842ca13b94549c5ee473c6dcb91a744a9eae7 | f238924eabc566e98d3c72ad1c9c40f72922bc3f | refs/heads/master | 2021-04-06T10:04:10.655371 | 2017-12-20T04:30:15 | 2017-12-20T04:30:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,332 | py | # --------------------------------------------------------
# Deformable Convolutional Networks
# Copyright (c) 2016 by Contributors
# Copyright (c) 2017 Microsoft
# Licensed under The Apache-2.0 License [see LICENSE for details]
# Modified by Yuwen Xiong
# --------------------------------------------------------
import cPickle
import os
import time
import mxnet as mx
import numpy as np
from module import MutableModule
from utils import image
from bbox.bbox_transform import bbox_pred, clip_boxes
from nms.nms import py_nms_wrapper, cpu_nms_wrapper, gpu_nms_wrapper
from utils.PrefetchingIter import PrefetchingIter
class Predictor(object):
def __init__(self, symbol, data_names, label_names,
context=mx.cpu(), max_data_shapes=None,
provide_data=None, provide_label=None,
arg_params=None, aux_params=None):
self._mod = MutableModule(symbol, data_names, label_names,
context=context, max_data_shapes=max_data_shapes)
self._mod.bind(provide_data, provide_label, for_training=False)
self._mod.init_params(arg_params=arg_params, aux_params=aux_params)
def predict(self, data_batch):
self._mod.forward(data_batch)
# [dict(zip(self._mod.output_names, _)) for _ in zip(*self._mod.get_outputs(merge_multi_context=False))]
return [dict(zip(self._mod.output_names, _)) for _ in zip(*self._mod.get_outputs(merge_multi_context=False))]
def im_proposal(predictor, data_batch, data_names, scales):
output_all = predictor.predict(data_batch)
data_dict_all = [dict(zip(data_names, data_batch.data[i])) for i in xrange(len(data_batch.data))]
scores_all = []
boxes_all = []
for output, data_dict, scale in zip(output_all, data_dict_all, scales):
# drop the batch index
boxes = output['rois_output'].asnumpy()[:, 1:]
scores = output['rois_score'].asnumpy()
# transform to original scale
boxes = boxes / scale
scores_all.append(scores)
boxes_all.append(boxes)
return scores_all, boxes_all, data_dict_all
def generate_proposals(predictor, test_data, imdb, cfg, vis=False, thresh=0.):
"""
Generate detections results using RPN.
:param predictor: Predictor
:param test_data: data iterator, must be non-shuffled
:param imdb: image database
:param vis: controls visualization
:param thresh: thresh for valid detections
:return: list of detected boxes
"""
assert vis or not test_data.shuffle
data_names = [k[0] for k in test_data.provide_data[0]]
if not isinstance(test_data, PrefetchingIter):
test_data = PrefetchingIter(test_data)
idx = 0
t = time.time()
imdb_boxes = list()
original_boxes = list()
for im_info, data_batch in test_data:
t1 = time.time() - t
t = time.time()
scales = [iim_info[0, 2] for iim_info in im_info]
scores_all, boxes_all, data_dict_all = im_proposal(predictor, data_batch, data_names, scales)
t2 = time.time() - t
t = time.time()
for delta, (scores, boxes, data_dict, scale) in enumerate(zip(scores_all, boxes_all, data_dict_all, scales)):
# assemble proposals
dets = np.hstack((boxes, scores))
original_boxes.append(dets)
# filter proposals
keep = np.where(dets[:, 4:] > thresh)[0]
dets = dets[keep, :]
imdb_boxes.append(dets)
if vis:
vis_all_detection(data_dict['data'].asnumpy(), [dets], ['obj'], scale, cfg)
print 'generating %d/%d' % (idx + 1, imdb.num_images), 'proposal %d' % (dets.shape[0]), \
'data %.4fs net %.4fs' % (t1, t2 / test_data.batch_size)
idx += 1
assert len(imdb_boxes) == imdb.num_images, 'calculations not complete'
# save results
rpn_folder = os.path.join(imdb.result_path, 'rpn_data')
if not os.path.exists(rpn_folder):
os.mkdir(rpn_folder)
rpn_file = os.path.join(rpn_folder, imdb.name + '_rpn.pkl')
with open(rpn_file, 'wb') as f:
cPickle.dump(imdb_boxes, f, cPickle.HIGHEST_PROTOCOL)
if thresh > 0:
full_rpn_file = os.path.join(rpn_folder, imdb.name + '_full_rpn.pkl')
with open(full_rpn_file, 'wb') as f:
cPickle.dump(original_boxes, f, cPickle.HIGHEST_PROTOCOL)
print 'wrote rpn proposals to {}'.format(rpn_file)
return imdb_boxes
def im_detect(predictor, data_batch, data_names, scales, cfg):
output_all = predictor.predict(data_batch)
data_dict_all = [dict(zip(data_names, idata)) for idata in data_batch.data]
scores_all = []
pred_boxes_all = []
for output, data_dict, scale in zip(output_all, data_dict_all, scales):
if cfg.TEST.HAS_RPN:
rois = output['rois_output'].asnumpy()[:, 1:]
else:
rois = data_dict['rois'].asnumpy().reshape((-1, 5))[:, 1:]
im_shape = data_dict['data'].shape
# save output
scores = output['cls_prob_reshape_output'].asnumpy()[0]
bbox_deltas = output['bbox_pred_reshape_output'].asnumpy()[0]
# post processing
pred_boxes = bbox_pred(rois, bbox_deltas)
pred_boxes = clip_boxes(pred_boxes, im_shape[-2:])
# we used scaled image & roi to train, so it is necessary to transform them back
pred_boxes = pred_boxes / scale
scores_all.append(scores)
pred_boxes_all.append(pred_boxes)
return scores_all, pred_boxes_all, data_dict_all
def pred_eval(predictor, test_data, imdb, cfg, vis=False, thresh=1e-3, logger=None, ignore_cache=True):
"""
wrapper for calculating offline validation for faster data analysis
in this example, all threshold are set by hand
:param predictor: Predictor
:param test_data: data iterator, must be non-shuffle
:param imdb: image database
:param vis: controls visualization
:param thresh: valid detection threshold
:return:
"""
print "$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$$"
det_file = os.path.join(imdb.result_path, imdb.name + '_detections.pkl')
# if os.path.exists(det_file) and not ignore_cache:
# with open(det_file, 'rb') as fid:
# all_boxes = cPickle.load(fid)
# info_str = imdb.evaluate_detections(all_boxes)
# if logger:
# logger.info('evaluate detections: \n{}'.format(info_str))
# return
assert vis or not test_data.shuffle
data_names = [k[0] for k in test_data.provide_data[0]]
if not isinstance(test_data, PrefetchingIter):
test_data = PrefetchingIter(test_data)
nms = py_nms_wrapper(cfg.TEST.NMS)
# limit detections to max_per_image over all classes
max_per_image = cfg.TEST.max_per_image
num_images = imdb.num_images
# all detections are collected into:
# all_boxes[cls][image] = N x 5 array of detections in
# (x1, y1, x2, y2, score)
all_boxes = [[[] for _ in range(num_images)]
for _ in range(imdb.num_classes)]
idx = 0
data_time, net_time, post_time = 0.0, 0.0, 0.0
t = time.time()
for im_info, data_batch in test_data:
t1 = time.time() - t
t = time.time()
scales = [iim_info[0, 2] for iim_info in im_info]
scores_all, boxes_all, data_dict_all = im_detect(predictor, data_batch, data_names, scales, cfg)
t2 = time.time() - t
t = time.time()
for delta, (scores, boxes, data_dict) in enumerate(zip(scores_all, boxes_all, data_dict_all)):
for j in range(1, imdb.num_classes):
indexes = np.where(scores[:, j] > thresh)[0]
cls_scores = scores[indexes, j, np.newaxis]
cls_boxes = boxes[indexes, 4:8] if cfg.CLASS_AGNOSTIC else boxes[indexes, j * 4:(j + 1) * 4]
cls_dets = np.hstack((cls_boxes, cls_scores))
keep = nms(cls_dets)
all_boxes[j][idx+delta] = cls_dets[keep, :]
if max_per_image > 0:
image_scores = np.hstack([all_boxes[j][idx+delta][:, -1]
for j in range(1, imdb.num_classes)])
if len(image_scores) > max_per_image:
image_thresh = np.sort(image_scores)[-max_per_image]
for j in range(1, imdb.num_classes):
keep = np.where(all_boxes[j][idx+delta][:, -1] >= image_thresh)[0]
all_boxes[j][idx+delta] = all_boxes[j][idx+delta][keep, :]
if vis:
boxes_this_image = [[]] + [all_boxes[j][idx+delta] for j in range(1, imdb.num_classes)]
vis_all_detection(data_dict['data'].asnumpy(), boxes_this_image, imdb.classes, scales[delta], cfg)
idx += test_data.batch_size
t3 = time.time() - t
t = time.time()
data_time += t1
net_time += t2
post_time += t3
print 'testing {}/{} data {:.4f}s net {:.4f}s post {:.4f}s'.format(idx, imdb.num_images, data_time / idx * test_data.batch_size, net_time / idx * test_data.batch_size, post_time / idx * test_data.batch_size)
if logger:
logger.info('testing {}/{} data {:.4f}s net {:.4f}s post {:.4f}s'.format(idx, imdb.num_images, data_time / idx * test_data.batch_size, net_time / idx * test_data.batch_size, post_time / idx * test_data.batch_size))
with open(det_file, 'wb') as f:
cPickle.dump(all_boxes, f, protocol=cPickle.HIGHEST_PROTOCOL)
info_str = imdb.evaluate_detections(all_boxes)
if logger:
logger.info('evaluate detections: \n{}'.format(info_str))
def vis_all_detection(im_array, detections, class_names, scale, cfg, threshold=1e-3):
"""
visualize all detections in one image
:param im_array: [b=1 c h w] in rgb
:param detections: [ numpy.ndarray([[x1 y1 x2 y2 score]]) for j in classes ]
:param class_names: list of names in imdb
:param scale: visualize the scaled image
:return:
"""
import matplotlib.pyplot as plt
import random
im = image.transform_inverse(im_array, cfg.network.PIXEL_MEANS)
plt.imshow(im)
for j, name in enumerate(class_names):
if name == '__background__':
continue
color = (random.random(), random.random(), random.random()) # generate a random color
dets = detections[j]
for det in dets:
bbox = det[:4] * scale
score = det[-1]
if score < threshold:
continue
rect = plt.Rectangle((bbox[0], bbox[1]),
bbox[2] - bbox[0],
bbox[3] - bbox[1], fill=False,
edgecolor=color, linewidth=3.5)
plt.gca().add_patch(rect)
plt.gca().text(bbox[0], bbox[1] - 2,
'{:s} {:.3f}'.format(name, score),
bbox=dict(facecolor=color, alpha=0.5), fontsize=12, color='white')
plt.show()
def draw_all_detection(im_array, detections, class_names, scale, cfg, threshold=1e-1):
"""
visualize all detections in one image
:param im_array: [b=1 c h w] in rgb
:param detections: [ numpy.ndarray([[x1 y1 x2 y2 score]]) for j in classes ]
:param class_names: list of names in imdb
:param scale: visualize the scaled image
:return:
"""
import cv2
import random
color_white = (255, 255, 255)
im = image.transform_inverse(im_array, cfg.network.PIXEL_MEANS)
# change to bgr
im = cv2.cvtColor(im, cv2.COLOR_RGB2BGR)
for j, name in enumerate(class_names):
if name == '__background__':
continue
color = (random.randint(0, 256), random.randint(0, 256), random.randint(0, 256)) # generate a random color
dets = detections[j]
for det in dets:
bbox = det[:4] * scale
score = det[-1]
if score < threshold:
continue
bbox = map(int, bbox)
cv2.rectangle(im, (bbox[0], bbox[1]), (bbox[2], bbox[3]), color=color, thickness=2)
cv2.putText(im, '%s %.3f' % (class_names[j], score), (bbox[0], bbox[1] + 10),
color=color_white, fontFace=cv2.FONT_HERSHEY_COMPLEX, fontScale=0.5)
return im
| [
"[email protected]"
] | |
6eeba5437fbc07d0f6fd5a93ffba6c90b5d28a83 | 1751ea577fb63c0e34e469193c0de0519fa5d32b | /manage.py | 43213755618f352dfdc6090a6f0056cb9895fc95 | [] | no_license | zakuro9715/uragf | a987f0ae51c15496e48214eaffc5440a1dbc641e | 060658d2dc5a5a15bdd5f2301a60d050b38ddf3d | refs/heads/master | 2021-01-10T00:57:47.417420 | 2016-04-03T05:26:13 | 2016-04-03T05:26:13 | 36,533,479 | 0 | 2 | null | 2015-06-23T23:30:42 | 2015-05-29T23:01:38 | Python | UTF-8 | Python | false | false | 248 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "uragf.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"[email protected]"
] | |
97a31da16586f6ec9bbf6358ba911523bc468f8d | 02d0714edfef5a2d3630d7659c553c157e291e52 | /tempest/api/compute/security_groups/test_security_groups_negative.py | 120d327e3b83cbae71e3e514138e91218808ccef | [
"Apache-2.0"
] | permissive | atulbangar09/tempest | f07dced592481a7ec71a9c7469b7d50d30cdc171 | 9f5644ce2784cd882e86ac89236f8f8f828d7c43 | refs/heads/master | 2023-02-06T10:26:40.112917 | 2020-01-21T16:17:16 | 2020-01-21T16:17:16 | 234,520,093 | 0 | 0 | Apache-2.0 | 2020-01-17T09:52:53 | 2020-01-17T09:52:52 | null | UTF-8 | Python | false | false | 9,953 | py | # Copyright 2013 Huawei Technologies Co.,LTD.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from tempest_lib import decorators
from tempest_lib import exceptions as lib_exc
import testtools
from tempest.api.compute.security_groups import base
from tempest.common.utils import data_utils
from tempest import config
from tempest import test
CONF = config.CONF
class SecurityGroupsNegativeTestJSON(base.BaseSecurityGroupsTest):
@classmethod
def setup_clients(cls):
super(SecurityGroupsNegativeTestJSON, cls).setup_clients()
cls.client = cls.security_groups_client
@classmethod
def resource_setup(cls):
super(SecurityGroupsNegativeTestJSON, cls).resource_setup()
cls.neutron_available = CONF.service_available.neutron
def _generate_a_non_existent_security_group_id(self):
security_group_id = []
body = self.client.list_security_groups()['security_groups']
for i in range(len(body)):
security_group_id.append(body[i]['id'])
# Generate a non-existent security group id
while True:
non_exist_id = data_utils.rand_int_id(start=999)
if self.neutron_available:
non_exist_id = data_utils.rand_uuid()
if non_exist_id not in security_group_id:
break
return non_exist_id
@test.attr(type=['negative'])
@test.idempotent_id('673eaec1-9b3e-48ed-bdf1-2786c1b9661c')
@test.services('network')
def test_security_group_get_nonexistent_group(self):
# Negative test:Should not be able to GET the details
# of non-existent Security Group
non_exist_id = self._generate_a_non_existent_security_group_id()
self.assertRaises(lib_exc.NotFound, self.client.show_security_group,
non_exist_id)
@decorators.skip_because(bug="1161411",
condition=CONF.service_available.neutron)
@test.attr(type=['negative'])
@test.idempotent_id('1759c3cb-b0fc-44b7-86ce-c99236be911d')
@test.services('network')
def test_security_group_create_with_invalid_group_name(self):
# Negative test: Security Group should not be created with group name
# as an empty string/with white spaces/chars more than 255
s_description = data_utils.rand_name('description')
# Create Security Group with empty string as group name
self.assertRaises(lib_exc.BadRequest,
self.client.create_security_group,
name="", description=s_description)
# Create Security Group with white space in group name
self.assertRaises(lib_exc.BadRequest,
self.client.create_security_group,
name=" ", description=s_description)
# Create Security Group with group name longer than 255 chars
s_name = 'securitygroup-'.ljust(260, '0')
self.assertRaises(lib_exc.BadRequest,
self.client.create_security_group,
name=s_name, description=s_description)
@decorators.skip_because(bug="1161411",
condition=CONF.service_available.neutron)
@test.attr(type=['negative'])
@test.idempotent_id('777b6f14-aca9-4758-9e84-38783cfa58bc')
@test.services('network')
def test_security_group_create_with_invalid_group_description(self):
# Negative test: Security Group should not be created with description
# longer than 255 chars. Empty description is allowed by the API
# reference, however.
s_name = data_utils.rand_name('securitygroup')
# Create Security Group with group description longer than 255 chars
s_description = 'description-'.ljust(260, '0')
self.assertRaises(lib_exc.BadRequest,
self.client.create_security_group,
name=s_name, description=s_description)
@test.idempotent_id('9fdb4abc-6b66-4b27-b89c-eb215a956168')
@testtools.skipIf(CONF.service_available.neutron,
"Neutron allows duplicate names for security groups")
@test.attr(type=['negative'])
@test.services('network')
def test_security_group_create_with_duplicate_name(self):
# Negative test:Security Group with duplicate name should not
# be created
s_name = data_utils.rand_name('securitygroup')
s_description = data_utils.rand_name('description')
self.create_security_group(name=s_name, description=s_description)
# Now try the Security Group with the same 'Name'
self.assertRaises(lib_exc.BadRequest,
self.client.create_security_group,
name=s_name, description=s_description)
@test.attr(type=['negative'])
@test.idempotent_id('36a1629f-c6da-4a26-b8b8-55e7e5d5cd58')
@test.services('network')
def test_delete_the_default_security_group(self):
# Negative test:Deletion of the "default" Security Group should Fail
default_security_group_id = None
body = self.client.list_security_groups()['security_groups']
for i in range(len(body)):
if body[i]['name'] == 'default':
default_security_group_id = body[i]['id']
break
# Deleting the "default" Security Group
self.assertRaises(lib_exc.BadRequest,
self.client.delete_security_group,
default_security_group_id)
@test.attr(type=['negative'])
@test.idempotent_id('6727c00b-214c-4f9e-9a52-017ac3e98411')
@test.services('network')
def test_delete_nonexistent_security_group(self):
# Negative test:Deletion of a non-existent Security Group should fail
non_exist_id = self._generate_a_non_existent_security_group_id()
self.assertRaises(lib_exc.NotFound,
self.client.delete_security_group, non_exist_id)
@test.attr(type=['negative'])
@test.idempotent_id('1438f330-8fa4-4aeb-8a94-37c250106d7f')
@test.services('network')
def test_delete_security_group_without_passing_id(self):
# Negative test:Deletion of a Security Group with out passing ID
# should Fail
self.assertRaises(lib_exc.NotFound,
self.client.delete_security_group, '')
@test.idempotent_id('00579617-fe04-4e1c-9d08-ca7467d2e34b')
@testtools.skipIf(CONF.service_available.neutron,
"Neutron does not check the security group ID")
@test.attr(type=['negative'])
@test.services('network')
def test_update_security_group_with_invalid_sg_id(self):
# Update security_group with invalid sg_id should fail
s_name = data_utils.rand_name('sg')
s_description = data_utils.rand_name('description')
# Create a non int sg_id
sg_id_invalid = data_utils.rand_name('sg')
self.assertRaises(lib_exc.BadRequest,
self.client.update_security_group, sg_id_invalid,
name=s_name, description=s_description)
@test.idempotent_id('cda8d8b4-59f8-4087-821d-20cf5a03b3b1')
@testtools.skipIf(CONF.service_available.neutron,
"Neutron does not check the security group name")
@test.attr(type=['negative'])
@test.services('network')
def test_update_security_group_with_invalid_sg_name(self):
# Update security_group with invalid sg_name should fail
securitygroup = self.create_security_group()
self.assertIn('id', securitygroup)
securitygroup_id = securitygroup['id']
# Update Security Group with group name longer than 255 chars
s_new_name = 'securitygroup-'.ljust(260, '0')
self.assertRaises(lib_exc.BadRequest,
self.client.update_security_group,
securitygroup_id, name=s_new_name)
@test.idempotent_id('97d12b1c-a610-4194-93f1-ba859e718b45')
@testtools.skipIf(CONF.service_available.neutron,
"Neutron does not check the security group description")
@test.attr(type=['negative'])
@test.services('network')
def test_update_security_group_with_invalid_sg_des(self):
# Update security_group with invalid sg_des should fail
securitygroup = self.create_security_group()
self.assertIn('id', securitygroup)
securitygroup_id = securitygroup['id']
# Update Security Group with group description longer than 255 chars
s_new_des = 'des-'.ljust(260, '0')
self.assertRaises(lib_exc.BadRequest,
self.client.update_security_group,
securitygroup_id, description=s_new_des)
@test.attr(type=['negative'])
@test.idempotent_id('27edee9c-873d-4da6-a68a-3c256efebe8f')
@test.services('network')
def test_update_non_existent_security_group(self):
# Update a non-existent Security Group should Fail
non_exist_id = self._generate_a_non_existent_security_group_id()
s_name = data_utils.rand_name('sg')
s_description = data_utils.rand_name('description')
self.assertRaises(lib_exc.NotFound,
self.client.update_security_group,
non_exist_id, name=s_name,
description=s_description)
| [
"[email protected]"
] | |
522c6512806730f37abd8e337751cf53e361a3fb | 0566cf76b456518875edecece15e763a36a4795f | /scrapers/1channelmovie_com.py | 9fdd83bce3433ea161b1b921f96d70c7f22a427f | [] | no_license | theclonedude/Scraping_BeautifulSoup_phantomjs | 684b1f7a993e0d2555daa7a5455cf19bd29b0b1b | faf653feae46c21a72d13b2123cdebdb2f7c05d8 | refs/heads/master | 2023-03-16T19:36:14.867361 | 2018-06-14T14:21:02 | 2018-06-14T14:21:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,172 | py | import re
from sandcrawler.scraper import ScraperBase, SimpleScraperBase, ScraperFetchException
class OneChannelMovieCom(SimpleScraperBase):
BASE_URL = 'http://www.1channelmovie.com'
OTHERS_URLS = ['http://1channelmovie.com', 'http://www.1channelmovie.com']
# it seems we can merge this with PrimewireAg scraper
def setup(self):
self.register_scraper_type(ScraperBase.SCRAPER_TYPE_OSP)
self.search_term_language = "eng"
self.register_media(ScraperBase.MEDIA_TYPE_TV)
self.register_media(ScraperBase.MEDIA_TYPE_FILM)
self.register_url(
ScraperBase.URL_TYPE_SEARCH,
self.BASE_URL,
)
self.register_url(
ScraperBase.URL_TYPE_LISTING,
self.BASE_URL)
def _fetch_next_button(self, soup):
link = soup.select_one('div.pagination a[rel="next"]')
if link:
return link.href
else:
return None
def _get_search_url(self, search_term, media_type):
return self.BASE_URL + '/search/%s/' % \
self.util.quote(search_term)
def search(self, search_term, media_type, **extra):
search_url = self._get_search_url(search_term, media_type)
for soup in self.soup_each([search_url]):
self._parse_search_page(soup)
def _parse_search_page(self, soup):
results = soup.select('div.index_item a')
if not results or len(results) == 0:
return self.submit_search_no_results()
for link in results:
# Skip extra, not useful links.
if re.match('Watch (.*) for FREE', link['title']):
continue
self.submit_search_result(
link_url=link['href'],
link_title=link['title']
)
next_button = self._fetch_next_button(soup)
if next_button and self.can_fetch_next():
soup = self.get_soup(next_button)
self._parse_search_page(soup)
def parse(self, parse_url, **extra):
for soup in self.soup_each([parse_url, ]):
# Movie pages have the versions linked directly off the main page.
self._parse_versionlinks(soup)
# TV you need to go a page deep (ie each episode)
for link in soup.select('div.tv_episode_item a'):
try:
episode_soup = self.get_soup(link['href'])
self._parse_versionlinks(episode_soup)
except Exception as e:
self.log.exception(e)
def _parse_versionlinks(self, soup):
for link in soup.select('span.movie_version_link a'):
# Follow the link to get the 'real' url.
url = link['href']
if 'marketing' in url:
continue
if not url.startswith('http'):
url = self.BASE_URL + url
try:
followed_link = self.get(url)
except Exception:
self.log.warning('Failed to follow link.')
else:
self.submit_parse_result(index_page_title=soup.title.text.strip(),
link_url=followed_link.url,
link_name=link['title']
)
class VodlyTo(OneChannelMovieCom):
BASE_URL = 'http://vodly.cr'
OTHER_URLS = ['http://vodly.to', ]
SINGLE_RESULTS_PAGE = True
#def setup(self):
# raise NotImplementedError('The website is with "Be right back" message on the front page')
def get(self, url, **kwargs):
return super(self.__class__, self).get(
url, allowed_errors_codes=[404, 403], **kwargs)
def _get_search_url(self, search_term, media_type):
return self.BASE_URL + '/movies/filter?genre=&year=&actor={}&director=&submit='.format(search_term)
def _parse_search_page(self, soup):
info_box = soup.select_one('h3[class="comment-reply-title"]')
if info_box and info_box.text.find("No movies were found based on the above search") > -1:
return self.submit_search_no_results()
found = 0
for link in soup.select('div.item-img a'):
if link:
self.submit_search_result(
link_url=link.href,
link_title=link.text,
image=self.util.find_image_src_or_none(link, 'img')
)
found = 1
if found == 0:
self.submit_search_no_results()
def _follow_link(self, link):
soup = self.get_soup(link)
result = soup.select_one('div.video-section a')
return result and result.href or None
def parse(self, parse_url, **extra):
for soup in self.soup_each([parse_url, ]):
title = soup.select_one('h1').text
for link in soup.select('a.external_link'):
url = self._follow_link(link.href)
if url:
self.submit_parse_result(
link_url=url,
link_title=title,
) | [
"[email protected]"
] | |
75ade63013c6ce66d590705e4b19781f46ef7b7e | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03573/s700443478.py | 585b517bf86ef42cb4ad1f8379936f4ffc195509 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 94 | py | X=list(map(int,input().split()))
Y=sorted(X)
if Y[0]==Y[1]:
print(Y[2])
else:
print(Y[0]) | [
"[email protected]"
] | |
fa5e3a6fc50b302b03a1dc690503f0ab53d00c0e | bb45e66a1b438cb9bb8eb2cdcd54d82287338fdf | /pyiem/nws/products/lsr.py | b4e889ea5e7190bcf1cbdd8fe991683969be9c86 | [] | no_license | aarande/pyIEM | 685c50b6af3e2cc936e434d7152e67f4ffc1e0ed | e9467710e6908ca147ebe88c2ee3559484503fe2 | refs/heads/master | 2021-01-21T00:02:22.123066 | 2015-02-27T15:27:45 | 2015-02-27T15:27:45 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,383 | py | '''
Local Storm Report
'''
# Stand Library Imports
import datetime
import re
# Third party
import pytz
from shapely.geometry import Point as ShapelyPoint
SPLITTER = re.compile(r"(^[0-9].+?\n^[0-9].+?\n)((?:.*?\n)+?)(?=^[0-9]|$)",
re.MULTILINE)
from pyiem.nws.product import TextProduct, TextProductException
from pyiem import reference
from pyiem.nws.lsr import LSR
class LSRProductException(TextProductException):
''' Something we can raise when bad things happen! '''
pass
class LSRProduct(TextProduct):
''' Represents a text product of the LSR variety '''
def __init__(self, text, utcnow=None):
''' constructor '''
self.lsrs = []
self.duplicates = 0
TextProduct.__init__(self, text, utcnow=utcnow)
def get_temporal_domain(self):
''' Return the min and max timestamps of lsrs '''
valids = []
for lsr in self.lsrs:
valids.append( lsr.valid )
if len(valids) == 0:
return None, None
return min(valids), max(valids)
def is_summary(self):
''' Returns is this LSR is a summary or not '''
return self.unixtext.find("...SUMMARY") > 0
def get_url(self, baseuri):
''' Get the URL of this product '''
min_time, max_time = self.get_temporal_domain()
wfo = self.source[1:]
return "%s#%s/%s/%s" % (baseuri, wfo,
min_time.strftime("%Y%m%d%H%M"),
max_time.strftime("%Y%m%d%H%M") )
def get_jabbers(self, uri):
''' return a text and html variant for Jabber stuff '''
res = []
wfo = self.source[1:]
url = self.get_url(uri)
for mylsr in self.lsrs:
if mylsr.duplicate:
continue
time_fmt = "%-I:%M %p %Z"
url = "%s#%s/%s/%s" % (uri, mylsr.wfo,
mylsr.utcvalid.strftime("%Y%m%d%H%M"),
mylsr.utcvalid.strftime("%Y%m%d%H%M") )
if mylsr.valid.day != self.utcnow.day:
time_fmt = "%-d %b, %-I:%M %p %Z"
xtra = {
'product_id': self.get_product_id(),
'channels': "LSR%s,LSR.ALL,LSR.%s" % (mylsr.wfo,
mylsr.typetext.replace(" ", "_")),
'geometry': 'POINT(%s %s)' % (mylsr.get_lon(), mylsr.get_lat()),
'ptype' : mylsr.get_dbtype(),
'valid' : mylsr.utcvalid.strftime("%Y%m%dT%H:%M:00"),
'category' : 'LSR',
'twitter' : "%s %s" % (mylsr.tweet(), url),
'lat': str(mylsr.get_lat()),
'long': str(mylsr.get_lon()),
}
html = ("<p>%s [%s Co, %s] %s <a href=\"%s\">reports %s</a> at "
+"%s -- %s</p>") % (
_mylowercase(mylsr.city), mylsr.county.title(), mylsr.state, mylsr.source,
url, mylsr.mag_string(),
mylsr.valid.strftime(time_fmt), mylsr.remark)
plain = "%s [%s Co, %s] %s reports %s at %s -- %s %s" % (
_mylowercase(mylsr.city), mylsr.county.title(),
mylsr.state, mylsr.source,
mylsr.mag_string(),
mylsr.valid.strftime(time_fmt), mylsr.remark, url)
res.append( [plain, html, xtra])
if self.is_summary():
extra_text = ""
if self.duplicates > 0:
extra_text = (", %s out of %s reports were previously "
+"sent and not repeated here.") % (self.duplicates,
len(self.lsrs))
text = "%s: %s issues Summary Local Storm Report %s %s" % (
wfo, wfo, extra_text, url)
html = ("<p>%s issues "
+"<a href='%s'>Summary Local Storm Report</a>%s</p>") % (
wfo, url, extra_text)
xtra = {
'product_id': self.get_product_id(),
'channels': 'LSR%s' % (wfo,),
}
res.append([text, html, xtra] )
return res
def _mylowercase(text):
''' Specialized lowercase function '''
tokens = text.split()
for i,t in enumerate(tokens):
if len(t) > 3:
tokens[i] = t.title()
elif t in ['N', 'NNE', 'NNW', 'NE',
'E', 'ENE', 'ESE', 'SE',
'S', 'SSE', 'SSW', 'SW',
'W', 'WSW', 'WNW', 'NW']:
continue
return " ".join(tokens)
def parse_lsr(text):
''' Emit a LSR object based on this text!
0914 PM HAIL SHAW 33.60N 90.77W
04/29/2005 1.00 INCH BOLIVAR MS EMERGENCY MNGR
'''
lines = text.split("\n")
if len(lines) < 2:
raise LSRProductException("LSR text is too short |%s|" % (
text.replace("\n", "<NL>"),))
lsr = LSR()
lsr.text = text
tokens = lines[0].split()
h12 = tokens[0][:-2]
mm = tokens[0][-2:]
ampm = tokens[1]
dstr = "%s:%s %s %s" % (h12, mm, ampm, lines[1][:10])
lsr.valid = datetime.datetime.strptime(dstr, "%I:%M %p %m/%d/%Y")
lsr.typetext = lines[0][12:29].strip().upper()
lsr.city = lines[0][29:53].strip()
tokens = lines[0][53:].strip().split()
lat = float(tokens[0][:-1])
lon = 0 - float(tokens[1][:-1])
lsr.geometry = ShapelyPoint((lon,lat))
lsr.consume_magnitude( lines[1][12:29].strip() )
lsr.county = lines[1][29:48].strip()
lsr.state = lines[1][48:50]
lsr.source = lines[1][53:].strip()
if len(lines) > 2:
meat = " ".join( lines[2:] ).strip()
lsr.remark = " ".join( meat.split())
return lsr
def parser(text, utcnow=None, ugc_provider=None, nwsli_provider=None):
''' Helper function that actually converts the raw text and emits an
LSRProduct instance or returns an exception'''
prod = LSRProduct(text, utcnow)
for match in SPLITTER.finditer(prod.unixtext):
lsr = parse_lsr("".join(match.groups()))
lsr.wfo = prod.source[1:]
lsr.assign_timezone( prod.tz, prod.z )
prod.lsrs.append( lsr )
return prod | [
"[email protected]"
] | |
8083e994eaa7d834c3bfa85f4db0f2406e2291d2 | 709bd5f2ecc69a340da85f6aed67af4d0603177e | /saleor/account/backends/google.py | 6c8e205b957de89d66aa8f067b3271926f386c40 | [
"BSD-3-Clause"
] | permissive | Kenstogram/opensale | 41c869ee004d195bd191a1a28bf582cc6fbb3c00 | 5102f461fa90f2eeb13b9a0a94ef9cb86bd3a3ba | refs/heads/master | 2022-12-15T02:48:48.810025 | 2020-03-10T02:55:10 | 2020-03-10T02:55:10 | 163,656,395 | 8 | 0 | BSD-3-Clause | 2022-12-08T01:31:09 | 2018-12-31T09:30:41 | Python | UTF-8 | Python | false | false | 222 | py | from social_core.backends.google import GoogleOAuth2
from . import BaseBackend
from ...site import AuthenticationBackends
class CustomGoogleOAuth2(BaseBackend, GoogleOAuth2):
DB_NAME = AuthenticationBackends.GOOGLE
| [
"[email protected]"
] | |
56c555025b131f114b3d96bcf46ab5d8b4e5c909 | 4252102a1946b2ba06d3fa914891ec7f73570287 | /pylearn2/scripts/jobman/tester.py | 6c7685167c84cb7e7757a0b616da9bcc0868a95a | [] | no_license | lpigou/chalearn2014 | 21d487f314c4836dd1631943e20f7ab908226771 | 73b99cdbdb609fecff3cf85e500c1f1bfd589930 | refs/heads/master | 2020-05-17T00:08:11.764642 | 2014-09-24T14:42:00 | 2014-09-24T14:42:00 | 24,418,815 | 2 | 3 | null | null | null | null | UTF-8 | Python | false | false | 3,032 | py | """
This an example script inserting a pylearn2 yaml code into a jobman database.
The code below defines a yaml template string in state.yaml_template,
and the values of its hyper-parameters in state.hyper_parameters, and
run the code that is located in state.extract_results on this model
using jobman.
Actually, we add the job here and it can be launched later as usual
(please check how to start jobs using jobman from the jobman tutorial
website)
"""
from jobman.tools import DD, flatten
from jobman import api0, sql
from pylearn2.scripts.jobman import experiment
def result_extractor(train_obj):
"""
This is a user specific function, that is used by jobman to extract results
The returned dictionary will be saved in state.results
"""
import numpy
channels = train_obj.model.monitor.channels
train_cost = channels['sgd_cost(ExhaustiveSGD[X])']
best_epoch = numpy.argmin(train_cost.val_record)
best_rec_error = train_cost.val_record[best_epoch]
batch_num = train_cost.batch_record[best_epoch]
return dict(
best_epoch=best_epoch,
train_rec_error=best_rec_error,
batch_num=batch_num)
if __name__ == '__main__':
db = api0.open_db('sqlite:///test.db?table=test_jobman_pylearn2')
state = DD()
state.yaml_template = '''
!obj:pylearn2.train.Train {
"dataset": !obj:pylearn2.datasets.npy_npz.NpyDataset &dataset {
"file" : "%(file)s"
},
"model": !obj:pylearn2.autoencoder.ContractiveAutoencoder {
"nvis" : %(nvis)d,
"nhid" : %(nhid)d,
"irange" : 0.05,
"act_enc": "sigmoid", #for some reason only sigmoid function works
"act_dec": "sigmoid",
},
"algorithm": !obj:pylearn2.training_algorithms.sgd.SGD {
"learning_rate" : %(learning_rate)f,
"batch_size" : %(batch_size)d,
"monitoring_batches" : 5,
"monitoring_dataset" : *dataset,
"cost" : !obj:pylearn2.costs.cost.SumOfCosts {
"costs": [
[1.0, !obj:pylearn2.costs.autoencoder.MeanBinaryCrossEntropy {} ],
[%(coefficient)f, !obj:pylearn2.costs.cost.MethodCost { method: 'contraction_penalty' } ]
]
},
"termination_criterion" : %(term_crit)s,
}
}
'''
state.hyper_parameters = {
"file": "${PYLEARN2_DATA_PATH}/UTLC/pca/sylvester_train_x_pca32.npy",
"nvis": 32,
"nhid": 6,
"learning_rate": 0.1,
"batch_size": 10,
"coefficient": 0.5,
"term_crit": {
"__builder__": "pylearn2.training_algorithms.sgd.EpochCounter",
"max_epochs": 2
}
}
state.extract_results = "pylearn2.scripts.jobman.tester.result_extractor"
sql.insert_job(
experiment.train_experiment,
flatten(state),
db,
force_dup=True)
| [
"[email protected]"
] | |
77f509fce29adca2f0ee33c911a594f1f01a20a5 | f2e503885666f35f9c50c9cff411c3a47fb81093 | /andelabs1.py | fd8af25851ab59ffa321b1e06b42dd65a50dd245 | [] | no_license | SamwelOpiyo/andelabs | 87c7a6b0ae078afd0a4b620dc4f0a7ba898e006c | fa7b6fe14f3942d2ee47f81e6a78a375dd7e2210 | refs/heads/master | 2021-01-09T20:43:23.986700 | 2017-01-22T19:45:19 | 2017-01-22T19:45:19 | 65,727,292 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 456 | py | def prime():
integer = int(raw_input("Enter an Integer:"))
prime_list = list()
if integer==2:
prime_list.append(integer)
elif integer==3:
prime_list.append(2)
prime_list.append(integer)
else:
prime_list.append(2)
prime_list.append(3)
for k in range(4,integer+1):
prime_check(k)
return prime_list
print prime()
| [
"[email protected]"
] | |
58aad0ee9098bd5db56320c38d09085fce097091 | 9023909d2776e708755f98d5485c4cffb3a56000 | /oneflow/compatible_single_client_python/nn/optimizer/adamw.py | a4f8cddbc181f472ebc8eeb4a1f3cbfcd18a3c89 | [
"Apache-2.0"
] | permissive | sailfish009/oneflow | f6cf95afe67e284d9f79f1a941e7251dfc58b0f7 | 4780aae50ab389472bd0b76c4333e7e0a1a56ef7 | refs/heads/master | 2023-06-24T02:06:40.957297 | 2021-07-26T09:35:29 | 2021-07-26T09:35:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,090 | py | """
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from typing import List, Dict, Callable, Union, Iterator, Tuple
import collections
from oneflow.compatible import single_client as flow
from oneflow.compatible.single_client.python.oneflow_export import (
oneflow_export,
experimental_api,
)
from oneflow.compatible.single_client.python.nn.parameter import Parameter
from oneflow.compatible.single_client.python.nn.optimizer.optimizer import (
ParamGroup,
Optimizer,
)
@oneflow_export("optim.AdamW")
@experimental_api
class AdamW(Optimizer):
r"""Implements AdamW algorithm.
The original Adam algorithm was proposed in `Adam: A Method for Stochastic Optimization`_.
The AdamW variant was proposed in `Decoupled Weight Decay Regularization`_.
The optimizer of the Adam-weight-decay algorithm.
(More details please refer to `Adam-weight-decay <https://www.fast.ai/2018/07/02/adam-weight-decay/>`_).
So we use Adam-weight-decay algorithm to solve this problem.
the equation of parameters updating is:
.. math::
& V_t = \beta_1*V_{t-1} + (1-\beta_1)*grad
& S_t = \beta_2*S_{t-1} + (1-\beta_2)*{grad} \odot {grad}
& \hat{g} = learning\_rate*(\frac{{V_t}}{\sqrt{{S_t}}+\epsilon}+\lambda*param_{old})
& param_{new} = param_{old} - \hat{g}
Args:
params (iterable): iterable of parameters to optimize or dicts defining
parameter groups
lr (float, optional): learning rate (default: 1e-3)
betas (Tuple[float, float], optional): coefficients used for computing
running averages of gradient and its square (default: (0.9, 0.999))
eps (float, optional): term added to the denominator to improve
numerical stability (default: 1e-8)
weight_decay (float, optional): weight decay (L2 penalty) (In the equation is λ, default: 0)
scale (float, optional): the scale factor of loss (default: 1.0)
.. _Adam\: A Method for Stochastic Optimization:
https://arxiv.org/abs/1412.6980
.. _Decoupled Weight Decay Regularization:
https://arxiv.org/abs/1711.05101
"""
def __init__(
self,
parameters: Union[Iterator[Parameter], List[Dict]],
lr: float = 1e-3,
betas: Tuple[float, float] = (0.9, 0.999),
eps: float = 1e-8,
weight_decay: float = 0,
amsgrad: bool = False,
scale: float = 1.0,
):
super().__init__()
assert lr >= 0.0, f"Invalid learning rate: {lr}"
assert eps >= 0.0, f"Invalid epsilon value: {eps}"
assert (
betas[0] >= 0.0 and betas[0] < 1.0
), f"Invalid beta parameter at index 0: {betas[0]}"
assert (
betas[1] >= 0.0 and betas[1] < 1.0
), f"Invalid beta parameter at index 1: {betas[1]}"
assert weight_decay >= 0.0, f"Invalid weight_decay value: {weight_decay}"
assert scale > 0.0, f"Invalid scale factor: {scale}"
assert amsgrad is False, "Not support AMSGrad now!"
self._default_options["lr"] = lr
self._default_options["eps"] = eps
self._default_options["betas"] = betas
self._default_options["weight_decay"] = weight_decay
self._default_options["amsgrad"] = amsgrad
self._default_options["scale"] = scale
# Add parameters
if isinstance(parameters, collections.abc.Iterator):
self.param_groups.append(ParamGroup(parameters, self._default_options))
else: # List[Dict]
for param in parameters:
self.param_groups.append(ParamGroup(param, self._default_options))
for param_group in self.param_groups:
for param in param_group.parameters:
assert param.is_leaf, "parameters must be leaf tensor"
self._state[param] = dict()
self._state[param]["exp_avg"] = flow.experimental.zeros_like(param)
self._state[param]["exp_avg_sq"] = flow.experimental.zeros_like(param)
self._op = (
flow.builtin_op("adam_update")
.Input("model")
.Input("model_diff")
.Input("m")
.Input("v")
.Attr("l1", 0.0)
.Attr("l2", 0.0)
.Build()
)
def step(self, closure: Callable = None):
"""Performs a single optimization step.
Args:
closure (callable, optional): A closure that reevaluates the model
and returns the loss.
"""
with flow.no_grad():
loss = None
if closure is not None:
loss = closure()
for param_group in self.param_groups:
kwargs = {
"learning_rate_val": param_group["lr"],
"scale": param_group["scale"],
"weight_decay": param_group["weight_decay"],
"beta1": param_group["betas"][0],
"beta2": param_group["betas"][1],
"epsilon": param_group["eps"],
}
for param in param_group.parameters:
if param.grad is None:
continue
m_tensor = self._state[param]["exp_avg"]
v_tensor = self._state[param]["exp_avg_sq"]
self._op(
param, param.grad, m_tensor, v_tensor, **kwargs,
)
self._state["step"] = self._state["step"] + 1
return loss
| [
"[email protected]"
] | |
b24e15bf9e9d28c0fd5e65f95e1533732b603aff | 8c95c48d48a5a6a351de57d90b56eb6e2642914c | /TransformGenotypes_ClassesTop.py | f75e5fd761662d53e65396980603dbd5c1f2af52 | [
"curl",
"Apache-2.0"
] | permissive | janaobsteter/Genotype_CODES | e24dafcf00476a9e0cc989b3c822bd13a391f76f | 8adf70660ebff4dd106c666db02cdba8b8ce4f97 | refs/heads/master | 2021-08-09T14:50:50.721598 | 2021-01-27T14:23:16 | 2021-01-27T14:23:16 | 63,061,123 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 16,607 | py | # -*- coding: utf-8 -*-
#This is a script to add newly genotyped individuals and downloaded GeneSeek zip file (Final Reports)
#to the existing database of the latest genotypes
#Pipeline:
#1)define chip, dictionary, dictionary to hold chip: genotype package: animal ids, dictionary to hold genotype package : download date
#2)create temp directory within breed_TEMP/DownloadDate
#3)create directory if not existin, unzip file
#4)for each genotype package: unzip Final report and SNP_Map, change spurious strings within the files and adjust peddar.param file
#run peddar_row to transform FinalReports to PLINK and MAP formats
#write names to dictionaries
import os
os.chdir("/home/jana/Genotipi/Genotipi_CODES/")
import sys
import zipfile
import shutil
from collections import defaultdict
import csv
import GenFiles
import commands
import tempfile
import pandas as pd
def remove_from_zip(zipfname, *filenames):
tempdir = tempfile.mkdtemp()
try:
tempname = os.path.join(tempdir, 'new.zip')
with zipfile.ZipFile(zipfname, 'r') as zipread:
with zipfile.ZipFile(tempname, 'w') as zipwrite:
for item in zipread.infolist():
if item.filename not in filenames:
data = zipread.read(item.filename)
zipwrite.writestr(item, data)
shutil.move(tempname, zipfname)
finally:
shutil.rmtree(tempdir)
########################################################
#set directories and file names
########################################################
# date='09082018'
# pasma="Rjava"
# AlleleFormat="top"
# zip_file="we_mr_19042018_IDB191.zip"
# merge_ask='N'
#Ask the user for the current date (date of download) and breed
date = raw_input("Enter the date (today): ")
pasma = raw_input("Enter the breed [Rjava/Crnobela/Lisasta]: ")
AlleleFormat=raw_input("Enter the desired allele coding [top / forward / ab]: ")
zip_file = raw_input("Enter the name of the downloaded zip file: ")
merge_ask=raw_input("Do you want to merge newly downloaded genotypes to the Latest Genotypes files (by chip)? [Y/N] ")
#ask what action does the user want to perform
#action = raw_input("Do you want to extract SNPs for parental verification [Y/N] ")
action = 'N'
if action == 'Y':
PVSNPs = input("How many SNPs would you like to use for parental verification? ")
#ask whether you want to remove original zip
rmOriginalZip=raw_input('Remove original zip? [Y/N] ')
#create directory path to hold current temp genotype files within Genotipi_DATA and breed directory
tempDir = "/home/jana/Genotipi/Genotipi_DATA/" + pasma + "_TEMP/Genotipi_" + str(date) + "/"
#PEDDAROW directory
peddarow="/home/jana/Genotipi/TransformGeno/SNPchimpRepo/source_codes/PEDDA_ROW/"
#Zip latest
Zip_lat="/home/jana/Genotipi/Genotipi_DATA/Genotipi_latest/" + pasma + "/Top/ZipGenoFiles/"
#Zip_lat="/home/jana/Genotipi/Genotipi_DATA/Genotipi_latest/Rjava/Zip/"
PLINKDIR = '/home/jana/Genotipi/Genotipi_DATA/Genotipi_latest/' + pasma + '/Top/'
#PLINKDIR="/home/jana/Genotipi/Genotipi_DATA/Genotipi_latest/" + pasma + "/Top/"
#Genotipi_latest directory - sploh ni v uporabi kasneje v skriptu
#Gen_lat = "/home/jana/Genotipi/Genotipi_DATA/Genotipi_latest/"+pasma+"/"
#PLINKDIR = "/run/user/1000/gvfs/smb-share:server=kis-h2.si,share=kisdfs/ZIV/vol1/ZIV/VSI/JanaO/Genotipi/TopPLINK/"
#path to Zanardi
ZanDir="/home/jana/Genotipi/TransformGeno/Zanardi/"
CodeDir = "/home/jana/Genotipi/TransformGeno/"
DownloadDir = "/home/jana/Downloads/"
#File with a list of 800 SNPs for parentage verification
SNP800="/home/jana/Genotipi/ParentalVerification_SNPSNP/Names_800SNPs.txt"
#file with IDs and seq for the animals
Breed_IDSeq="/home/jana/Genotipi/TransformGeno/" + pasma + "_seq_ID.csv"
#SNP coding
SNPSifrant="/home/jana/Genotipi/ParentalVerification_SNPSNP/Sifrant_SNP.csv"
#name of the file
zipPackage = zip_file
#########################################################################################################
##########################################################################################################
##########################################################################################################
#create dictionaries
##########################################################################################################
##########################################################################################################
#create a dictionary of the number of SNPs and corresponding chip names
chips = GenFiles.chips
SNP800Sifrant_Dict = GenFiles.SNP800Sifrant_Dict
GenoFile = defaultdict(set)
SampleIDs = defaultdict(list)
PedFiles = defaultdict(list)
MapFiles = defaultdict(list)
PedFilesQC = defaultdict(list)
MapFilesQC = defaultdict(list)
AllInfo = []
#dictionary to hold downlaod date of the genotype package
DateDownloaded = defaultdict(list)
DateGenotyped = defaultdict(list)
#list to hold the SNP800 files produced in the run
SNP800_Peds=[]
SNP800_Maps=[]
#read in animal ID / Seq / DateOfBirth / SexCode table
#create a dictionary
Breed_IDSeq_Dict = defaultdict()
with open(Breed_IDSeq, 'rb') as IDSeq:
reader = csv.reader(IDSeq, delimiter=',')
for line in reader:
Breed_IDSeq_Dict[line[0]] = line[1:]
############################################################################################################
#############################################################################################################
#create a directory with the current date for temp genotype manipulation
if not os.path.exists(tempDir):
os.makedirs(tempDir)
#change current working directory to the created directory
os.chdir(tempDir)
shutil.copy(DownloadDir + "/" + zipPackage, tempDir)
#zipPackages = (filter(lambda x: x.endswith('.zip'), os.listdir(tempDir)))
#now you have all the files in the same format (the six files zipped)
#extract the FinalReport from each of them and SNPMap
#try:
# zipPackages.remove(zip_file) #remove original zipfile (zipfolder) from the list
#except:
# pass
onePackage=GenFiles.genZipPackage(zipPackage)
onePackage.extractFinalReport()
onePackage.extractSNPMap()
onePackage.extractSampleMap()
print(onePackage.name)
print(onePackage.snpmapname)
print(onePackage.samplemapname)
#check for error IDs and replace the prior identified errouneous IDs
replaceIDs = open(CodeDir + "/ErrorIDs_genotipi.txt").read().strip().split("\n")
errorIDs = onePackage.extractErrorNames() #extract Sample Names if they exist - they shouldnt be in the file
#to samo, če ti samo prav popravi!!!!!!!!!!!!!
if errorIDs:
print (onePackage.name, errorIDs)
for i in errorIDs:
os.system('sed -i "s|' +str(i[0])+ '|' + i[1] + '|g" ' + onePackage.name+"_FinalReport.txt") #errorIDs are tuples, replace first element witht the second
os.system('sed -i "s|' +str(i[0])+ '|' + i[1] + '|g" '+onePackage.name+'_Sample_Map.txt')
###############
for i in replaceIDs:
os.system('sed -i "s|' +i[0]+ '|' + i[1] + '|g" ' + onePackage.name+"_FinalReport.txt") #errorIDs are tuples, replace first element witht the second
os.system('sed -i "s|' +i[0]+ '|' + i[1] + '|g" '+onePackage.name + '_Sample_Map.txt')
print("Succefully updated FinalReport and SampleMap.")
#copy pedda.param and python script to the current directory
shutil.copy((peddarow+"/peddar.param"), "peddar.param")
shutil.copy((peddarow+"/pedda_row.py"), "pedda_row.py")
#replace strings with shell command
os.system('sed -i "s|test_FinalReport.txt|'+ onePackage.name+"_FinalReport.txt" + '|g" peddar.param') #insert FinalReport name into peddar.param
os.system('sed -i "s|Dominant |Dominant_|g" ' + onePackage.name+"_FinalReport.txt") #problem Dominant Red with a space
os.system('sed -i "s|Dominant |Dominant_|g" ' + onePackage.name+'_SNP_Map.txt') ##problem Dominant Red with a space
os.system('sed -i "s/test_outputfile/"'+onePackage.name+'"/g" peddar.param') #insert OutPut name into peddar.param
os.system('sed -i "s/test_SNPMap.txt/"'+onePackage.name+'_SNP_Map.txt'+'"/g" peddar.param') #insert SNPMap name into peddar.param
os.system('sed -i "s/AlleleFormat/"'+AlleleFormat+'"/g" peddar.param') #insert desired AlleleFormat name into peddar.param
os.system('sed -i "s/TEST/"'+pasma+'"/g" peddar.param')
os.system("python2.7 pedda_row.py") #transform into ped and map file
# #ABFORMAT
# shutil.copy((peddarow+"/peddar.param"), "peddar.param")
# shutil.copy((peddarow+"/pedda_row.py"), "pedda_row.py")
# #replace strings with shell command
# os.system('sed -i "s|test_FinalReport.txt|'+ onePackage.name+"_FinalReport.txt" + '|g" peddar.param') #insert FinalReport name into peddar.param
# os.system('sed -i "s|Dominant |Dominant_|g" ' + onePackage.name+"_FinalReport.txt") #problem Dominant Red with a space
# os.system('sed -i "s|Dominant |Dominant_|g" ' + onePackage.name+'_SNP_Map.txt') ##problem Dominant Red with a space
# os.system('sed -i "s/test_outputfile/"'+onePackage.name+"_AB"+'"/g" peddar.param') #insert OutPut name into peddar.param
# os.system('sed -i "s/test_SNPMap.txt/"'+onePackage.name+'_SNP_Map.txt'+'"/g" peddar.param') #insert SNPMap name into peddar.param
# os.system('sed -i "s/AlleleFormat/"'+"ab"+'"/g" peddar.param') #insert desired AlleleFormat name into peddar.param
# os.system('sed -i "s/TEST/"'+pasma+'"/g" peddar.param')
# os.system("python2.7 pedda_row.py") #transform into ped and map file
#create a new zip file with corrected error names
#shutil.move(onePackage.name+'_Sample_Map.txt', 'Sample_Map.txt') #rename extracted SampleMap
with zipfile.ZipFile(onePackage.name+'_FinalReport.zip', 'w', zipfile.ZIP_DEFLATED) as myzip:
myzip.write(onePackage.name+'_FinalReport.txt') #create new FinalReport zip
with zipfile.ZipFile('Sample_Map.zip', 'w', zipfile.ZIP_DEFLATED) as myzip:
myzip.write(onePackage.name+'_Sample_Map.txt') #create new Sample_Map.zip
remove_from_zip(onePackage.zipname, onePackage.finalreportname)
remove_from_zip(onePackage.zipname, onePackage.samplemapname)
shutil.move(onePackage.name+'_FinalReport.zip', onePackage.finalreportname)
shutil.move('Sample_Map.zip', onePackage.samplemapname)
with zipfile.ZipFile(onePackage.zipname, 'a', zipfile.ZIP_DEFLATED) as z:
z.write(onePackage.finalreportname)
with zipfile.ZipFile(onePackage.zipname, 'a', zipfile.ZIP_DEFLATED) as z:
z.write(onePackage.samplemapname)
#make pedfile a GenFiles pedFile object
try:
pedfile=GenFiles.pedFile(onePackage.name + '.ped')
mapfile=GenFiles.mapFile(onePackage.name + '.map')
except:
raise Exception("No .ped file!!!")
#Perform QC!
os.system("bash " + CodeDir + "/1_QC_FileArgs.sh " + pedfile.name + " " + pedfile.chip)
PedFilesQC[pedfile.chip].append(tempDir+pedfile.name + "_" + pedfile.chip + "_CleanIndsMarkers.ped")
MapFilesQC[pedfile.chip].append(tempDir+pedfile.name + "_" + pedfile.chip + "_CleanIndsMarkers.map")
#add file to the dictionary of chip files
PedFiles[pedfile.chip].append(tempDir+pedfile.pedname)
MapFiles[pedfile.chip].append(tempDir+mapfile.mapname)
GenoFile[pedfile.chip].add(pedfile.name)
DateDownloaded[date] += (pedfile.name)
DateGenotyped[onePackage.genodate] += [(x, pedfile.chip) for x in (pedfile.samples)]
AllInfo += [(x, pedfile.chip, pedfile.name, onePackage.genodate) for x in (pedfile.samples)]
for i in pedfile.samples:
if i in Breed_IDSeq_Dict:
SampleIDs[i] = [i, Breed_IDSeq_Dict.get(i)[0], onePackage.genodate, pedfile.chip, date]
else:
print "Sample ID " + i + " in " + pedfile.name +" not found!!!"
################################################################################################
###############################################################################################
#END OF THE LOOP
#merge produced SNP800 files
#merge ped files if merge_ask = Y
#create table for govedo
#############################################################################################
###############################################################################################
print "The number of genotyped animals is {}.".format(len(SampleIDs))
print "The number of genotype packages (different date of genotyping) is {}.".format(len(DateGenotyped))
print "The number of different genotyping chips is {0}: {1}.".format(len(PedFiles), PedFiles.keys())
#Perform QC!!!
# #create a table of individuals for govedo
# #columns are seq, chip, date genotyped
GenotypedInd = pd.DataFrame.from_dict(SampleIDs, orient='index', dtype=None)
GenotypedInd.columns = ['ID', 'ZIV_ID_SEQ','GenoDate','Chip','DownloadDate']
imiss = pd.read_table(tempDir+pedfile.name + "_" + pedfile.chip + ".imiss", sep="\s+")[["IID", "F_MISS"]]
imiss.columns = ['ID', "F_MISS"]
Tabela = pd.merge(GenotypedInd, imiss, on="ID")
Tabela.to_csv(path_or_buf = tempDir+str(onePackage.genodate)+'GovedoInd.csv', sep=",", index=False )
print("Created table for Govedo.")
#
if merge_ask == "Y":
#merge is outside the loop
#merge all the chips needed updating
for i in PedFiles:
if not os.path.exists(PLINKDIR+str(i)):
os.makedirs(PLINKDIR+str(i))
for pedfile, mapfile in zip (PedFiles[i], MapFiles[i]):
shutil.copy(pedfile, PLINKDIR+str(i))
shutil.copy(mapfile, PLINKDIR+str(i))
os.chdir(PLINKDIR+str(i))
shutil.copy("/home/jana/Genotipi/Genotipi_CODES/PARAMFILE.txt", PLINKDIR+i)
pedToMerge = ",".join(PedFiles[i]).strip("'")
mapToMerge = ",".join(MapFiles[i]).strip("'")
if not os.path.isfile(PLINKDIR+i+'/PLINK_MERGED.ped'):
mergeChipCommand = "plink --file {0} --cow --merge-list {1} --recode --out PLINK_MERGED".format((PedFiles[i][0].strip(".ped")), 'MergeChip.txt')
with open('MergeChip.txt', 'w') as csvfile:
writer = csv.writer(csvfile, delimiter=" ")
[writer.writerow(r) for r in zip(PedFiles[i][1:], MapFiles[i][1:])] #leave the first one out - that goes in the plink command line
if os.path.isfile(PLINKDIR+i+'/PLINK_MERGED.ped'):
mergeChipCommand = "plink --file PLINK_MERGED --cow --merge-list {0} --recode --out PLINK_MERGED".format('MergeChip.txt')
with open('MergeChip.txt', 'w') as csvfile:
writer = csv.writer(csvfile, delimiter=" ")
[writer.writerow(r) for r in zip(PedFiles[i], MapFiles[i])]
status, output = commands.getstatusoutput(mergeChipCommand) #merge with plink
if status == 0:
print "Successfully merged " + str(i) + " " + PLINKDIR + " " + i
else:
print "Merging went wrong, error: " + str(status)
for chip in PedFiles:
PedFiles[chip] = [i.replace("ZipGenoFiles", "ZipGenoFiles/") for i in PedFiles[chip]]
for chip in MapFiles:
MapFiles[chip] = [i.replace("ZipGenoFiles", "ZipGenoFiles/") for i in MapFiles[chip]]
#MERGE FOR QC-ed data!!!!
#for i in PedFiles:
# if not os.path.exists(PLINKDIR+str(i)):
# os.makedirs(PLINKDIR+str(i))
# for pedfile, mapfile in zip (PedFilesQC[i], MapFilesQC[i]):
# shutil.copy(pedfile, PLINKDIR+str(i))
# shutil.copy(mapfile, PLINKDIR+str(i))
# os.chdir(PLINKDIR+str(i))
# shutil.copy("/home/jana/Genotipi/Genotipi_CODES/PARAMFILE.txt", PLINKDIR+i)
# pedToMerge = ",".join(PedFilesQC[i]).strip("'")
# mapToMerge = ",".join(MapFilesQC[i]).strip("'")
# if not os.path.isfile(PLINKDIR+i+'/PLINK_MERGED_' + i + '_CleanIndsMarkers.ped'):
# mergeChipCommand = "plink --file {0} --cow --merge-list {1} --recode --out {2}".format((PedFilesQC[i][0].strip(".ped")), 'MergeChip.txt', "PLINK_MERGED_" + i + "_CleanIndsMarkers")
# with open('MergeChip.txt', 'w') as csvfile:
# writer = csv.writer(csvfile, delimiter=" ")
# [writer.writerow(r) for r in zip(PedFilesQC[i][1:], MapFilesQC[i][1:])] #leave the first one out - that goes in the plink command line
# if os.path.isfile(PLINKDIR+i+'/PLINK_MERGED_' + i + '_CleanIndsMarkers.ped'):
# mergeChipCommand = 'plink --file PLINK_MERGED_{0}_CleanIndsMarkers --cow --merge-list {1} --recode --out PLINK_MERGED_{0}_CleanIndsMarkers'.format(i, 'MergeChip.txt')
# with open('MergeChip.txt', 'w') as csvfile:
# writer = csv.writer(csvfile, delimiter=" ")
# [writer.writerow(r) for r in zip(PedFilesQC[i], MapFilesQC[i])]
#
# status, output = commands.getstatusoutput(mergeChipCommand) #merge with plink
#
# if status == 0:
# print "Successfully merged " + str(i) + " " + PLINKDIR + " " + i + "_CleanIndsMarkers"
# else:
# print "Merging went wrong, error: " + str(status)
| [
"[email protected]"
] | |
b6eb79f493fa5f99d269963e332bfdb0f53613f0 | f8c3c677ba536fbf5a37ac4343c1f3f3acd4d9b6 | /ICA_SDK/test/test_subscription.py | b9856eade3bbf9f75b638633d6bce99d475b5031 | [] | no_license | jsialar/integrated_IAP_SDK | 5e6999b0a9beabe4dfc4f2b6c8b0f45b1b2f33eb | c9ff7685ef0a27dc4af512adcff914f55ead0edd | refs/heads/main | 2023-08-25T04:16:27.219027 | 2021-10-26T16:06:09 | 2021-10-26T16:06:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,819 | py | # coding: utf-8
"""
IAP Services
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
The version of the OpenAPI document: v1
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import unittest
import datetime
import ICA_SDK
from ICA_SDK.models.subscription import Subscription # noqa: E501
from ICA_SDK.rest import ApiException
class TestSubscription(unittest.TestCase):
"""Subscription unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def make_instance(self, include_optional):
"""Test Subscription
include_option is a boolean, when False only required
params are included, when True both required and
optional params are included """
# model = ICA_SDK.models.subscription.Subscription() # noqa: E501
if include_optional :
return Subscription(
id = '0',
urn = '0',
type = '0',
actions = [
'0'
],
filter_expression = '0',
name = '0',
description = '0',
delivery_target = ICA_SDK.models.delivery_target.DeliveryTarget(
aws_sns_topic = ICA_SDK.models.delivery_target_aws_sns_topic.DeliveryTargetAwsSnsTopic(
topic_arn = '0', ),
aws_sqs_queue = ICA_SDK.models.delivery_target_aws_sqs_queue.DeliveryTargetAwsSqsQueue(
queue_url = '0', ),
workflow_run_launch = ICA_SDK.models.delivery_target_workflow_run_launch.DeliveryTargetWorkflowRunLaunch(
id = '0',
version = '0',
name = '0',
input = ICA_SDK.models.input.input(), ), ),
match_identities = [
'0'
],
acl = [
'0'
],
tenant_id = '0',
created_by_user_id = '0',
time_created = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
deleted_by_user_id = '0',
time_deleted = datetime.datetime.strptime('2013-10-20 19:20:30.00', '%Y-%m-%d %H:%M:%S.%f'),
is_deleted = True
)
else :
return Subscription(
)
def testSubscription(self):
"""Test Subscription"""
inst_req_only = self.make_instance(include_optional=False)
inst_req_and_optional = self.make_instance(include_optional=True)
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
2a254ef86f045c6f89a9e764f306c7e7d2475cca | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/nos/v7_2_0/hide_virtual_ip_holder/chassis/__init__.py | fed935303c48d8ee6197a3b1b604d38e4c2f9692 | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 13,753 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import oper_address
class chassis(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-chassis - based on the path /hide-virtual-ip-holder/chassis. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__virtual_ip','__virtual_ipv6','__oper_address',)
_yang_name = 'chassis'
_rest_name = 'chassis'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__oper_address = YANGDynClass(base=oper_address.oper_address, is_container='container', presence=False, yang_name="oper-address", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='container', is_config=True)
self.__virtual_ipv6 = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}), is_leaf=True, yang_name="virtual-ipv6", rest_name="virtual-ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv6 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv6-address-prefix', is_config=True)
self.__virtual_ip = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([1-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.)(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([1-9])|([1-2][0-9])|(3[0-1]))'}), is_leaf=True, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv4 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv4-prefix-mask', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'hide-virtual-ip-holder', u'chassis']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'chassis']
def _get_virtual_ip(self):
"""
Getter method for virtual_ip, mapped from YANG variable /hide_virtual_ip_holder/chassis/virtual_ip (common-def:ipv4-prefix-mask)
"""
return self.__virtual_ip
def _set_virtual_ip(self, v, load=False):
"""
Setter method for virtual_ip, mapped from YANG variable /hide_virtual_ip_holder/chassis/virtual_ip (common-def:ipv4-prefix-mask)
If this variable is read-only (config: false) in the
source YANG file, then _set_virtual_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_virtual_ip() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([1-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.)(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([1-9])|([1-2][0-9])|(3[0-1]))'}), is_leaf=True, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv4 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv4-prefix-mask', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """virtual_ip must be of a type compatible with common-def:ipv4-prefix-mask""",
'defined-type': "common-def:ipv4-prefix-mask",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([1-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.)(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([1-9])|([1-2][0-9])|(3[0-1]))'}), is_leaf=True, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv4 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv4-prefix-mask', is_config=True)""",
})
self.__virtual_ip = t
if hasattr(self, '_set'):
self._set()
def _unset_virtual_ip(self):
self.__virtual_ip = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'(([1-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.)(([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])\\.){2}([0-9]|[1-9][0-9]|1[0-9][0-9]|2[0-4][0-9]|25[0-5])/(([1-9])|([1-2][0-9])|(3[0-1]))'}), is_leaf=True, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv4 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv4-prefix-mask', is_config=True)
def _get_virtual_ipv6(self):
"""
Getter method for virtual_ipv6, mapped from YANG variable /hide_virtual_ip_holder/chassis/virtual_ipv6 (common-def:ipv6-address-prefix)
"""
return self.__virtual_ipv6
def _set_virtual_ipv6(self, v, load=False):
"""
Setter method for virtual_ipv6, mapped from YANG variable /hide_virtual_ip_holder/chassis/virtual_ipv6 (common-def:ipv6-address-prefix)
If this variable is read-only (config: false) in the
source YANG file, then _set_virtual_ipv6 is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_virtual_ipv6() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}), is_leaf=True, yang_name="virtual-ipv6", rest_name="virtual-ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv6 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv6-address-prefix', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """virtual_ipv6 must be of a type compatible with common-def:ipv6-address-prefix""",
'defined-type': "common-def:ipv6-address-prefix",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}), is_leaf=True, yang_name="virtual-ipv6", rest_name="virtual-ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv6 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv6-address-prefix', is_config=True)""",
})
self.__virtual_ipv6 = t
if hasattr(self, '_set'):
self._set()
def _unset_virtual_ipv6(self):
self.__virtual_ipv6 = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'pattern': u'((:|[0-9a-fA-F]{0,4}):)([0-9a-fA-F]{0,4}:){0,5}((([0-9a-fA-F]{0,4}:)?(:|[0-9a-fA-F]{0,4}))|(((25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])\\.){3}(25[0-5]|2[0-4][0-9]|[01]?[0-9]?[0-9])))(/(([0-9])|([0-9]{2})|(1[0-1][0-9])|(12[0-8])))'}), is_leaf=True, yang_name="virtual-ipv6", rest_name="virtual-ipv6", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Chassis Virtual IPv6 address'}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='common-def:ipv6-address-prefix', is_config=True)
def _get_oper_address(self):
"""
Getter method for oper_address, mapped from YANG variable /hide_virtual_ip_holder/chassis/oper_address (container)
"""
return self.__oper_address
def _set_oper_address(self, v, load=False):
"""
Setter method for oper_address, mapped from YANG variable /hide_virtual_ip_holder/chassis/oper_address (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_oper_address is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_oper_address() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=oper_address.oper_address, is_container='container', presence=False, yang_name="oper-address", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """oper_address must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=oper_address.oper_address, is_container='container', presence=False, yang_name="oper-address", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='container', is_config=True)""",
})
self.__oper_address = t
if hasattr(self, '_set'):
self._set()
def _unset_oper_address(self):
self.__oper_address = YANGDynClass(base=oper_address.oper_address, is_container='container', presence=False, yang_name="oper-address", rest_name="", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-drop-node-name': None}}, namespace='urn:brocade.com:mgmt:brocade-chassis', defining_module='brocade-chassis', yang_type='container', is_config=True)
virtual_ip = __builtin__.property(_get_virtual_ip, _set_virtual_ip)
virtual_ipv6 = __builtin__.property(_get_virtual_ipv6, _set_virtual_ipv6)
oper_address = __builtin__.property(_get_oper_address, _set_oper_address)
_pyangbind_elements = {'virtual_ip': virtual_ip, 'virtual_ipv6': virtual_ipv6, 'oper_address': oper_address, }
| [
"[email protected]"
] | |
a89de6c602a550c947a03e246638a7a409ca8899 | 878a3094430bb914717d641a4f4b06574e872518 | /hm_00_python/hm_0x_编辑用.py | e3e42e7d8c6ae0ec59b41a29d979116092a16cdf | [] | no_license | 2020668/python2019 | 3f33eea85fdd3f2866d867859d5694abb71effe9 | f8a98389fa09f95e72914afa4935afc5c68eaccd | refs/heads/master | 2020-06-07T23:36:17.871376 | 2019-08-29T09:45:10 | 2019-08-29T09:45:10 | 193,116,002 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 208 | py |
def sum_2_num(num1,num2):
"""对两个数字求和"""
return num1 + num2
# 调用函数,并使用result变量接收计算结果
result = sum_2_num(10,20)
print("计算结果是: %d" % result)
| [
"[email protected]"
] | |
a361e33242f19d9a4224e3176f42124b93cac1ef | daf7f2ba83c57819e85356906919309933af4526 | /util/validate-outputs.py | 246d094723a6c9142614278f5d233e876eb43baa | [
"MIT"
] | permissive | ihmwg/python-ihm | 158514e6148fa6f2651548077cf386d4bbdf1483 | ab685eaabd537a46184172cffe7a2f057343d390 | refs/heads/main | 2023-09-01T16:15:00.148446 | 2023-08-31T00:45:21 | 2023-08-31T00:45:21 | 123,497,102 | 17 | 8 | MIT | 2023-07-10T19:00:38 | 2018-03-01T21:57:40 | Python | UTF-8 | Python | false | false | 976 | py | #!/usr/bin/python3
"""Check the output of each example for validity against the PDBx
and IHM dictionaries.
This should be periodically rechecked in case the PDBx and IHM dictionaries
are updated.
"""
import sys
import os
import subprocess
import ihm.dictionary
import urllib.request
with urllib.request.urlopen(
'http://mmcif.wwpdb.org/dictionaries/ascii/mmcif_pdbx_v50.dic') as fh:
d_pdbx = ihm.dictionary.read(fh)
with urllib.request.urlopen(
'http://mmcif.wwpdb.org/dictionaries/ascii/mmcif_ihm.dic') as fh:
d_ihm = ihm.dictionary.read(fh)
pdbx_ihm = d_pdbx + d_ihm
for script in ('simple-docking.py', 'ligands_water.py',
'non_standard_residues.py'):
print(script)
subprocess.check_call([sys.executable, '../examples/' + script])
with open('output.cif') as fh:
try:
pdbx_ihm.validate(fh)
except ihm.dictionary.ValidatorError as exc:
print(exc)
os.unlink('output.cif')
| [
"[email protected]"
] | |
4a318f935f352e1c80115d618d78044b026f739e | 0133d8d56ee611a0c65ef80693ae263692557b96 | /spira/yevon/geometry/ports/port_list.py | 31f160cc38564e31df0c0739c6945d3478e93dc1 | [
"MIT"
] | permissive | JCoetzee123/spira | e77380df2e79333b0c48953faae2d3dae50a8d27 | dae08feba1578ecc8745b45109f4fb7bef374546 | refs/heads/master | 2021-06-25T23:32:52.289382 | 2019-07-17T13:25:50 | 2019-07-17T13:25:50 | 198,605,222 | 1 | 0 | MIT | 2019-07-24T09:42:07 | 2019-07-24T09:42:06 | null | UTF-8 | Python | false | false | 8,221 | py | from spira.core.typed_list import TypedList
from spira.core.transformable import Transformable
from spira.core.parameters.variables import FloatParameter
from spira.core.parameters.descriptor import ParameterDescriptor
from spira.core.parameters.restrictions import RestrictType
from spira.yevon.geometry.ports.base import __Port__
__all__ = ['PortList', 'PortListParameter']
class PortList(TypedList, Transformable):
__item_type__ = __Port__
# port_angle_decision = FloatParameter(default=0.0)
port_angle_decision = FloatParameter(default=90.0)
def __repr__(self):
if len(self._list) == 0:
print('PortList is empty')
return '\n'.join('{}'.format(k) for k in enumerate(self._list))
def __str__(self):
return self.__repr__()
def __getitem__(self, key):
from spira.yevon.geometry.ports.base import __Port__
if isinstance(key, int):
return self._list[key]
elif isinstance(key, str):
for p in self._list:
if p.name == key:
return p
elif issubclass(type(key), __Port__):
for p in self._list:
if p == key:
return p
else:
return self.get_port_from_label(key)
def __contains__(self, item):
for p in self._list:
# if p.name == item.name:
if p == item:
return True
return False
def __delitem__(self, key):
for i in range(0, len(self._list)):
if self._list[i] is key:
return list.__delitem__(self._list, i)
def __sub__(self, other):
pass
def __or__(self, other):
pass
def union(self, other):
return self.__or__(self, other)
def intersection(self, other):
return self.__and__(self, other)
def difference(self, other):
return self.__sub__(self, other)
def update_layercopy(self, layer):
P = self.__class__()
for p in self._list:
p.edgelayer = layer
P.append(p)
return P
def flat_copy(self, level=-1):
el = PortList()
for e in self._list:
el += e.flat_copy(level)
return el
def move(self, position):
for c in self._list:
c.move(position)
return self
def movecopy(self, position):
T = self.__class__()
for c in self._list:
T.append(c.movecopy(position))
return T
def transform_copy(self, transformation):
T = self.__class__()
for c in self._list:
T.append(c.transform_copy(transformation))
return T
def transform(self, transformation):
for c in self._list:
c.transform(transformation)
return self
def invert(self):
for c in self._list:
c.invert()
return self
def invertcopy(self):
L = self.__class__()
for c in self._list:
L += c.invertcopy()
return L
def x_sorted(self):
return self.__class__(sorted(self._list, key=lambda f: f.position[0]))
def x_sorted_backward(self):
return self.__class__(sorted(self._list, key=lambda f: (-f.position[0])))
def y_sorted(self):
return self.__class__(sorted(self._list, key=lambda f: f.position[1]))
def y_sorted_backward(self):
return self.__class__(sorted(self._list, key=lambda f: (-f.position[1])))
def sorted_in_direction(self, direction):
if direction == NORTH:
return self.y_sorted()
elif direction == SOUTH:
return self.y_sorted_backward()
elif direction == EAST:
return self.x_sorted()
elif direction == WEST:
return self.x_sorted_backward()
else:
raise AttributeError("Direction should be NORTH, EAST, SOUTH or WEST")
def angle_sorted(self, reference_angle=0.0):
""" sorts ports by angle, using angles between the reference_angle and reference_angle+360 """
return self.__class__(sorted(self._list, key=lambda f: ((f.orientation - reference_angle) % 360.0)))
def angle_sorted_backward(self, reference_angle=0.0):
""" sorts ports by angle, using angles between the reference_angle and reference_angle+360 """
return self.__class__(sorted(self._list, key=lambda f: (-(f.orientation - reference_angle) % 360.0)))
def get_names(self):
names = []
for p in self._list:
names.append(p.name)
return names
def get_ports_within_angles(self, start_angle, end_angle):
pl = self.__class__()
aspread = (end_angle - start_angle) % 360.0
sa = start_angle % 360.0
ea = sa + aspread
for p in self._list:
a = (p.orientation - sa) % 360.0
if a <= aspread: pl.append(p)
return pl
def get_ports_on_process(self, process):
pl = self.__class__()
for p in self._list:
if p.process == process:
pl.append(p)
return pl
def get_ports_by_purpose(self, purpose):
pl = self.__class__()
for p in self._list:
if p.purpose == purpose:
pl.append(p)
return pl
def get_ports_by_type(self, port_type):
pl = self.__class__()
if port_type == 'D':
for p in self._list:
if p.name[0] == 'D':
pl.append()
return pl
@property
def west_ports(self):
start_angle = 180.0 - 0.5 * self.port_angle_decision
end_angle = 180.0 + 0.5 * self.port_angle_decision
return self.get_ports_within_angles(start_angle, end_angle)
@property
def east_ports(self):
start_angle = -0.5 * self.port_angle_decision
end_angle = +0.5 * self.port_angle_decision
return self.get_ports_within_angles(start_angle, end_angle)
@property
def north_ports(self):
start_angle = 90.0 - 0.5 * self.port_angle_decision
end_angle = 90.0 + 0.5 * self.port_angle_decision
return self.get_ports_within_angles(start_angle, end_angle)
@property
def south_ports(self):
start_angle = 270.0 - 0.5 * self.port_angle_decision
end_angle = 270.0 + 0.5 * self.port_angle_decision
return self.get_ports_within_angles(start_angle, end_angle)
@property
def unlock(self):
""" Unlock the edge and convert it to a port. """
for i, p in enumerate(self._list):
name = p.name.replace('E', 'P')
self._list[i] = p.copy(name=name)
return self
class PortListParameter(ParameterDescriptor):
from spira.yevon.geometry.ports.port_list import PortList
__type__ = PortList
def __init__(self, default=[], **kwargs):
kwargs['default'] = self.__type__(default)
kwargs['restrictions'] = RestrictType([self.__type__])
super().__init__(**kwargs)
def __repr__(self):
return ''
def __str__(self):
return ''
def call_param_function(self, obj):
f = self.get_param_function(obj)
value = f(self.__type__())
if value is None:
value = self.__type__()
self.__cache_parameter_value__(obj, value)
new_value = self.__get_parameter_value__(obj)
return new_value
def __cache_parameter_value__(self, obj, ports):
if isinstance(ports, self.__type__):
super().__cache_parameter_value__(obj, ports)
elif isinstance(ports, list):
super().__cache_parameter_value__(obj, self.__type__(ports))
else:
raise TypeError("Invalid type in setting value of PortListParameter: " + str(type(ports)))
def __set__(self, obj, ports):
if isinstance(ports, self.__type__):
self.__externally_set_parameter_value__(obj, ports)
elif isinstance(ports, list):
self.__externally_set_parameter_value__(obj, self.__type__(ports))
else:
raise TypeError("Invalid type in setting value of PortListParameter: " + str(type(ports)))
return
| [
"[email protected]"
] | |
21e7a9d8d8ebbd2404e408c669c67da0b5559eb7 | b0cdab54c5e81681125c01801148c287605ee8d0 | /speciality/urls.py | 74bbfe233256fb4a38f2fe7a7aa9036aae4aa4f5 | [] | no_license | lpd76/rdavid2 | 5528746749acc51d4d0f5efd77886929798e2569 | 18aa5120fe4ba0ea44f611dd52b008db52641f17 | refs/heads/master | 2020-04-13T20:47:58.141579 | 2019-01-17T16:51:31 | 2019-01-17T16:51:31 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 133 | py | from django.urls import path
from . import views
urlpatterns = [
path('index', views.speciality_list, name='speciality_list'),
] | [
"[email protected]"
] | |
dc82be647e361b442c5007d4a567670023744ff1 | b420377a638dc9a5d8c09ebc39b0448d47ddb74e | /ddd-django/polls_app/base_site/views.py | 6fe6e69eeec0dc6328f6242d5cb592395cd53429 | [] | no_license | shimakaze-git/drf-sample | d4e4e8e4d380f0b77e807d4bbf4e3f0d98ee6bcd | 4294cd5adeea0ef51d3b7eee6a154d23dd089afc | refs/heads/master | 2022-05-02T20:19:09.901257 | 2019-09-15T12:46:51 | 2019-09-15T12:46:51 | 205,698,781 | 0 | 0 | null | 2022-04-22T22:29:32 | 2019-09-01T15:52:14 | Python | UTF-8 | Python | false | false | 626 | py | from django.shortcuts import render
from django.http import HttpResponse
from django.views import View
class IndexView(View):
template_name = 'index.html'
def get(self, request):
context = {}
# return HttpResponse('test')
return render(request, self.template_name, context)
def post(self, request, *args, **kwargs):
print(request.POST)
context = {}
# return HttpResponse('test')
return render(request, self.template_name, context)
def index(request):
context = {}
# return HttpResponse('test')
return render(request, 'index.html', context)
| [
"[email protected]"
] | |
c355a985943e570ffb642a4967e78a28c1b18b0d | aca7ba7e5f801f89374ac4be3544ceb49889e59f | /data/diseases/importPgkbDis.py | 81aac7a5322e0da4f0bbb60386ea2f6e8c024c63 | [] | no_license | strbean/pubMunch | 582e47eadaeb5e204960c21d84d2eaf5d10b9726 | c81d9935505779508df8e99577dd71cc104ea4ee | refs/heads/master | 2020-03-21T10:22:57.685056 | 2018-06-24T05:05:37 | 2018-06-24T05:05:37 | 138,448,203 | 0 | 0 | null | 2018-06-24T02:43:32 | 2018-06-24T02:43:32 | null | UTF-8 | Python | false | false | 258 | py | for line in open("diseases.tsv"):
if line.startswith("PharmGKB"):
continue
fields = line.split("\t")
name = fields[1]
syns = fields[2].split('","')
syns = [s.strip('",') for s in syns]
print "%s\t%s" % (name, "|".join(syns))
| [
"[email protected]"
] | |
9fbbcf8154c05f7064a216d1d47c4fefd91bb5af | fa114e6fa2c642613ac67960911a21b91bfa5089 | /Home-Services/store/migrations/0001_initial.py | fa40f81506df6858bf11007e732078db786f4829 | [] | no_license | AbdurRahman111/Home-Work-Services | 122c533d440da82199a3d4b647cd0feadb582d54 | 7cb8495d90f980264f97606da120662c7cf56d47 | refs/heads/master | 2023-04-06T22:53:34.590306 | 2021-04-12T08:23:49 | 2021-04-12T08:23:49 | 357,113,994 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,280 | py | # Generated by Django 3.1.4 on 2021-02-28 08:30
import datetime
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Category',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=20)),
],
),
migrations.CreateModel(
name='Customer',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('phone', models.CharField(max_length=15)),
('email', models.EmailField(max_length=254)),
('password', models.CharField(max_length=500)),
('aadhar', models.CharField(max_length=100)),
('address', models.CharField(max_length=100)),
('area', models.CharField(max_length=100)),
('landmark', models.CharField(max_length=100)),
('pincode', models.CharField(max_length=100)),
],
),
migrations.CreateModel(
name='Feedback',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Appointment_id', models.CharField(max_length=200)),
('Service_provider_name', models.CharField(max_length=200)),
('Customer_name', models.CharField(max_length=200)),
('address', models.CharField(max_length=200)),
('Booking_date', models.CharField(max_length=200)),
('Completion_date', models.CharField(max_length=200)),
('Total_cost', models.CharField(max_length=50)),
('Review_Admin', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='ServiceProvider',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('first_name', models.CharField(max_length=50)),
('last_name', models.CharField(max_length=50)),
('phone', models.CharField(max_length=15)),
('email', models.EmailField(max_length=254)),
('password', models.CharField(max_length=500)),
('service_type', models.CharField(max_length=200)),
('aadhar', models.CharField(max_length=100)),
('address', models.CharField(max_length=100)),
('area', models.CharField(max_length=100)),
('landmark', models.CharField(max_length=100)),
('pincode', models.CharField(max_length=10)),
('image', models.ImageField(upload_to='media/profiles/')),
('date', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Product',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('price', models.IntegerField(default=0)),
('description', models.CharField(blank=True, default='', max_length=200, null=True)),
('image', models.ImageField(upload_to='media/products/')),
('complition_time', models.IntegerField(default=1)),
('category', models.ForeignKey(default=1, on_delete=django.db.models.deletion.CASCADE, to='store.category')),
('service_provider', models.ForeignKey(default=10, on_delete=django.db.models.deletion.CASCADE, to='store.serviceprovider')),
],
),
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('quantity', models.IntegerField(default=1)),
('price', models.IntegerField()),
('Service_date', models.CharField(blank=True, default='', max_length=50)),
('Time_slots', models.CharField(blank=True, default='', max_length=50)),
('date', models.DateField(default=datetime.datetime.today)),
('complition_date', models.DateField(default=datetime.datetime.today)),
('Accept_this_order', models.BooleanField(default=False)),
('Cancel_this_order', models.BooleanField(default=False)),
('customer', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.customer')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='store.product')),
('service_provider', models.ForeignKey(default=10, on_delete=django.db.models.deletion.CASCADE, to='store.serviceprovider')),
],
),
]
| [
"[email protected]"
] | |
1bd5242816fc5adefb64d1509e8712a63499e48a | 1c0509a06cec726735048f00f63d2529f5e43ce6 | /code_gasoline_france/analysis/analysis_dispersion/graphs/graphs_macro_trends.py | 433290f30b21187244f88206ba00b34d93d9613b | [] | no_license | etiennecha/master_code | e99c62e93aa052a66d4cdd3f3e3aa25a3aec4880 | 48821f6c854a1c6aa05cf81b653b3b757212b6f8 | refs/heads/master | 2021-01-23T14:35:45.904595 | 2018-03-11T18:57:38 | 2018-03-11T18:57:38 | 16,312,906 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,465 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
import add_to_path
from add_to_path import path_data
from generic_master_price import *
from generic_master_info import *
path_dir_built = os.path.join(path_data,
u'data_gasoline',
u'data_built',
u'data_scraped_2011_2014')
path_dir_built_csv = os.path.join(path_dir_built, u'data_csv')
path_dir_built_dis = os.path.join(path_data,
u'data_gasoline',
u'data_built',
u'data_dispersion')
path_dir_built_dis_json = os.path.join(path_dir_built_dis, 'data_json')
path_dir_built_dis_csv = os.path.join(path_dir_built_dis, 'data_csv')
path_dir_built_dis_graphs = os.path.join(path_dir_built_dis, 'data_graphs')
path_dir_built_other = os.path.join(path_data,
u'data_gasoline',
u'data_built',
u'data_other')
path_dir_built_other_csv = os.path.join(path_dir_built_other, 'data_csv')
pd.set_option('float_format', '{:,.3f}'.format)
format_float_int = lambda x: '{:10,.0f}'.format(x)
format_float_float = lambda x: '{:10,.2f}'.format(x)
from pylab import *
rcParams['figure.figsize'] = 16, 6
## french date format
#import locale
#locale.setlocale(locale.LC_ALL, 'fra_fra')
dir_graphs = 'color'
str_ylabel = 'Price (euro/liter)'
# #########
# LOAD DATA
# #########
# DF STATION INFO
df_info = pd.read_csv(os.path.join(path_dir_built_csv,
'df_station_info_final.csv'),
encoding = 'utf-8',
dtype = {'id_station' : str,
'adr_zip' : str,
'adr_dpt' : str,
'ci_1' : str,
'ci_ardt_1' :str,
'ci_2' : str,
'ci_ardt_2' : str,
'dpt' : str},
parse_dates = [u'day_%s' %i for i in range(4)]) # fix
df_info.set_index('id_station', inplace = True)
df_info = df_info[df_info['highway'] != 1]
# DF PRICES
df_prices_ht = pd.read_csv(os.path.join(path_dir_built_csv,
'df_prices_ht_final.csv'),
parse_dates = ['date'])
df_prices_ht.set_index('date', inplace = True)
df_prices_ttc = pd.read_csv(os.path.join(path_dir_built_csv,
'df_prices_ttc_final.csv'),
parse_dates = ['date'])
df_prices_ttc.set_index('date', inplace = True)
# DF QUOTATIONS (WHOLESALE GAS PRICES)
df_quotations = pd.read_csv(os.path.join(path_dir_built_other_csv,
'df_quotations.csv'),
encoding = 'utf-8',
parse_dates = ['date'])
df_quotations.set_index('date', inplace = True)
# REFINE GROUP TYPE
# beginning: ELF + need to use future info
# (todo: add TA with no detected margin chge?)
df_info.loc[((df_info['brand_0'] == 'ELF') |\
(df_info['brand_last'] == 'ESSO_EXPRESS')),
'group_type'] = 'DIS'
df_info.loc[(df_info['brand_last'].isin(['ELF',
'ESSO_EXPRESS',
'TOTAL_ACCESS'])),
'group_type_last'] = 'DIS'
## Further GMS refining
#ls_hypers = ['AUCHAN', 'CARREFOUR', 'GEANT', 'LECLERC', 'CORA',
# 'INTERMARCHE', 'SYSTEMEU']
#df_info.loc[(df_info['brand_0'].isin(ls_hypers)),
# 'group_type'] = 'HYP'
#df_info.loc[(df_info['brand_last'].isin(ls_hypers)),
# 'group_type_last'] = 'HYP'
# ###############################
# GRAPHS: MACRO TRENDS
# ###############################
ls_sup_dis_ids = df_info[(df_info['group_type_last'] == 'SUP') |
((df_info['group_type'] == 'DIS') &\
(df_info['group_type_last'] == 'DIS'))].index
ls_oil_ind_ids = df_info[(df_info['group_type_last'] == 'OIL') |
(df_info['group_type_last'] == 'IND')].index
df_quotations['UFIP Brent R5 EL'] = df_quotations['UFIP Brent R5 EB'] / 158.987
#df_quotations[['UFIP Brent R5 EL', 'Europe Brent FOB EL']].plot()
#plt.show()
df_macro = pd.DataFrame(df_prices_ht.mean(1).values,
columns = [u'All gas stations'],
index = df_prices_ht.index)
df_macro['Brent'] = df_quotations['UFIP Brent R5 EL']
df_macro[u'Supermarket & Discount'] = df_prices_ht[ls_sup_dis_ids].mean(1)
df_macro[u'Oil & Independent'] = df_prices_ht[ls_oil_ind_ids].mean(1)
# Column order determines legend
df_macro = df_macro[[u'Brent',
u'All gas stations',
u'Supermarket & Discount',
u'Oil & Independent']]
df_macro['Brent'] = df_macro['Brent'].fillna(method = 'bfill')
fig = plt.figure()
ax1 = fig.add_subplot(111)
ls_l = []
for col, ls, alpha, color in zip(df_macro.columns,
['-', '-', '-', '-'],
[1, 1, 1, 1],
['b', 'g', 'r', 'c']):
ls_l.append(ax1.plot(df_macro.index,
df_macro[col].values,
c = color, ls = ls, alpha = alpha,
label = col))
lns = ls_l[0] + ls_l[1] + ls_l[2] + ls_l[3]
labs = [l.get_label() for l in lns]
ax1.legend(lns, labs, loc=0)
ax1.grid()
# Show ticks only on left and bottom axis, out of graph
ax1.yaxis.set_ticks_position('left')
ax1.xaxis.set_ticks_position('bottom')
ax1.get_yaxis().set_tick_params(which='both', direction='out')
ax1.get_xaxis().set_tick_params(which='both', direction='out')
plt.xlabel('')
plt.ylabel(str_ylabel)
plt.tight_layout()
plt.savefig(os.path.join(path_dir_built_dis_graphs,
dir_graphs,
'macro_trends.png'),
bbox_inches='tight')
plt.close()
# #################
# GRAPH PRICE CHGES
# #################
zero = 1e-10
df_chges = df_prices_ttc - df_prices_ttc.shift(1)
#df_chges = df_chges.ix['2012-01-01':'2012-12-31']
se_neg_chges = df_chges[df_chges < - zero].count(1)
se_pos_chges = df_chges[df_chges > zero].count(1)
fig = plt.figure()
ax = plt.subplot(111)
b0 = ax.bar(se_neg_chges.index,
(-se_neg_chges).values,
lw=0,
alpha = 0.5,
color = 'b')
b1 = ax.bar(se_pos_chges.index,
se_pos_chges.values,
lw=0,
alpha = 0.5,
color = 'g')
ax.legend((b1[0], b0[0]), ('Price increases', 'Price decreases'))
# make it symmetric
ax.set_ylim(-7000, 7000)
ax.set_yticks((-7000, -5000, -3000, -1000, 0, 1000, 3000, 5000, 7000))
# abs value: number of price changes
ax.set_yticklabels([u'{:.0f}'.format(x) for x in np.abs(ax.get_yticks())])
ax.grid()
# Show ticks only on left and bottom axis, out of graph
ax.yaxis.set_ticks_position('left')
ax.xaxis.set_ticks_position('bottom')
ax.get_yaxis().set_tick_params(which='both', direction='out')
ax.get_xaxis().set_tick_params(which='both', direction='out')
plt.ylabel(u'Nb price changes')
plt.tight_layout()
plt.savefig(os.path.join(path_dir_built_dis_graphs,
dir_graphs,
'macro_vol_price_chges.png'),
bbox_inches='tight')
plt.close()
| [
"[email protected]"
] | |
43a5269c653fb02480325cb8c86fd6ac270b6181 | ce661026009d622db924080d85ab529f1cae6b60 | /codingbat.com/not_string.py | 3a13b969ff38b7af6b31a1ef6b1b887e7e4db819 | [] | no_license | predavlad/projecteuler | d54f5d85ab0133b19b54b4168990b90f09a0184c | 58e1637733bb7e01e44bfac707353ecfe84d9b19 | refs/heads/master | 2021-01-23T15:29:26.257019 | 2019-02-09T10:11:23 | 2019-02-09T10:11:23 | 12,952,194 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 208 | py | def not_string(str):
if str[0:3] == 'not':
return str
return "not " + str
assert not_string('candy') == 'not candy'
assert not_string('x') == 'not x'
assert not_string('not bad') == 'not bad' | [
"[email protected]"
] | |
cb6b8fbf448bffc9b9d83e18ac9a48867102116a | 465ee73eb52b22b4fae19c76c37c88fa97d9249a | /chasm_webservice/chasm_webservice.py | c76bc2913465e3447bb6f5854a4d188b0231e94e | [
"MIT"
] | permissive | fw1121/galaxy_tools | a8ed090c2f38fba78cd45c4cb1164a8ec0ca14c1 | 0dc830fb100213bc8b2275519206fcdaaf500ec5 | refs/heads/master | 2020-03-30T09:17:00.855200 | 2014-12-18T11:16:57 | 2014-12-18T11:16:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,644 | py | #!/usr/bin/python
"""
The MIT License (MIT)
Copyright (c) 2014 Saket Choudhary, <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the 'Software'), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import sys
import requests
import argparse
import time
from functools import wraps
import json
import zipfile
import tempfile
import ntpath
import shutil
import xlrd
import csv
import os
sheet_map = {0: 'Variant_Analysis.csv',
1: 'Amino_Acid_Level_Analysis.csv', 2: 'Gene_Level_Analysis.csv'}
def retry(ExceptionToCheck, tries=40000, delay=3, backoff=2, logger=None):
'''Retry calling the decorated function using an exponential backoff.
http://www.saltycrane.com/blog/2009/11/trying-out-retry-decorator-python/
original from: http://wiki.python.org/moin/PythonDecoratorLibrary#Retry
:param ExceptionToCheck: the exception to check. may be a tuple of
exceptions to check
:type ExceptionToCheck: Exception or tuple
:param tries: number of times to try (not retry) before giving up
:type tries: int
:param delay: initial delay between retries in seconds
:type delay: int
:param backoff: backoff multiplier e.g. value of 2 will double the delay
each retry
:type backoff: int
:param logger: logger to use. If None, print
:type logger: logging.Logger instance
'''
def deco_retry(f):
@wraps(f)
def f_retry(*args, **kwargs):
mtries, mdelay = tries, delay
while mtries > 1:
try:
return f(*args, **kwargs)
except ExceptionToCheck, e:
#msg = '%s, Retrying in %d seconds...' % (str(e), mdelay)
msg = 'Retrying in %d seconds...' % (mdelay)
if logger:
logger.warning(msg)
else:
print msg
time.sleep(mdelay)
mtries -= 1
mdelay *= backoff
return f(*args, **kwargs)
return f_retry # true decorator
return deco_retry
CANCERTYPES = ['Bladder', 'Blood-Lymphocyte', 'Blood-Myeloid',
'Brain-Cerebellum', 'Brain-Glioblastoma_Multiforme',
'Brain-Lower_Grade_Glioma', 'Breast', 'Cervix',
'Colon', 'Head_and_Neck', 'Kidney-Chromophobe',
'Kidney-Clear_Cell', 'Kidney-Papiallary_Cell',
'Liver-Nonviral', 'Liver-Viral', 'Lung-Adenocarcinoma',
'Lung-Squamous_Cell', 'Melanoma', 'Other', 'Ovary',
'Pancreas', 'Prostate-Adenocarcinoma', 'Rectum',
'Skin', 'Stomach', 'Thyroid', 'Uterus']
__URL__ = 'http://www.cravat.us/rest/service/submit'
def stop_err(msg):
sys.stderr.write('%s\n' % msg)
sys.exit()
class CHASMWeb:
def __init__(self,
mutationbox=None, filepath=None,
is_hg_18=None, analysis_type=None,
analysis_program=None, chosendb=None,
cancer_type=None, email=None,
annotate_genes=None, text_reports=None,
mupit_out=None):
self.mutationbox = mutationbox
self.filepath = filepath
self.is_hg_18 = is_hg_18
self.analysis_type = analysis_type
self.analysis_program = analysis_program
self.chosendb = chosendb
self.email = email
self.annotate_genes = annotate_genes
self.cancer_type = cancer_type
self.email = email
self.annotate_genes = annotate_genes
self.text_reports = text_reports
self.mupit_input = mupit_out
def make_request(self):
data = {
'mutations ': self.mutationbox,
'hg18': self.is_hg_18,
'analysistype': self.analysis_type,
'analysisitem': self.analysis_program,
'chasmclassifier': self.cancer_type,
'geneannotation': self.annotate_genes,
'email': self.email,
'tsvreport': 'on', # self.text_reports,
'mupitinput': self.mupit_input,
}
stripped_data = {}
for key, value in data.iteritems():
if value is True:
value = 'on'
if value is not None and value is not False:
stripped_data[key] = value
if not self.mutationbox:
file_payload = {'inputfile': open(self.filepath)}
request = requests.post(
__URL__, data=stripped_data, files=file_payload)
else:
request = requests.post(
__URL__, data=stripped_data, files=dict(foo='bar'))
print request.text
job_id = json.loads(request.text)['jobid']
return job_id
@retry(requests.exceptions.HTTPError)
def zip_exists(self, job_id):
print job_id
url = 'http://www.cravat.us/results/%s/%s.zip' % (job_id, job_id)
zip_download_request = requests.request('GET', url)
if zip_download_request.status_code == 404:
raise requests.HTTPError()
else:
return url
def download_zip(self, url, job_id):
self.tmp_dir = tempfile.mkdtemp()
r = requests.get(url, stream=True)
if r.status_code == 200:
self.path = os.path.join(self.tmp_dir, job_id + '.zip')
with open(self.path, 'wb') as f:
for chunk in r.iter_content(128):
f.write(chunk)
else:
self.path = None
return self.path
def move_files(self, file_map):
fh = open(self.path, 'rb')
zip_files = zipfile.ZipFile(fh)
for name in zip_files.namelist():
filename = ntpath.basename(name)
extension = ntpath.splitext(filename)[-1]
source_file = zip_files.open(name)
if extension == '.txt':
target_file = open(file_map['error.txt'], 'wb')
elif filename != 'SnvGet Feature Description.xls' and extension != '.xls':
target_file = open(file_map[filename], 'wbb')
else:
target_file = None
if target_file:
with source_file, target_file:
shutil.copyfileobj(source_file, target_file)
if filename == 'SnvGet Feature Description.xls':
with xlrd.open_workbook(source_file) as wb:
sheet_names = wb.sheet_names()
for name in sheet_names:
sh = wb.sheet_by_name(name)
name_shortened = name.replace(' ').strip() + '.csv'
with open(name_shortened, 'wb') as f:
c = csv.writer(f)
for r in range(sh.nrows):
c.writerow(sh.row_values(r))
shutil.rmtree(self.tmp_dir)
fh.close()
def main(params):
parser = argparse.ArgumentParser()
parser.add_argument('-i', '--input',
type=str, dest='mutationbox',
help='Input variants')
parser.add_argument('--path', type=str,
dest='input_file_location',
help='Input file location')
parser.add_argument('--hg18', dest='hg18',
action='store_true')
parser.add_argument('--analysis_type', dest='analysis_type',
type=str,
choices=['driver', 'functional',
'geneannotationonly'],
default='driver')
parser.add_argument('--chosendb', dest='chosendb',
type=str, nargs='*',
choices=['CHASM', 'SnvGet'],
default='CHASM')
parser.add_argument('--cancertype', dest='cancer_type',
type=str, choices=CANCERTYPES,
required=True)
parser.add_argument('--email', dest='email',
required=True, type=str)
parser.add_argument('--annotate', dest='annotate',
action='store_true', default=None)
parser.add_argument('--tsv_report', dest='tsv_report',
action='store_true', default=None)
parser.add_argument('--mupit_out', dest='mupit_out',
action='store_true', default=None)
parser.add_argument('--gene_analysis_out', dest='gene_analysis_out',
type=str, required=True)
parser.add_argument('--variant_analysis_out',
dest='variant_analysis_out',
type=str, required=True)
parser.add_argument('--amino_acid_level_analysis_out',
dest='amino_acid_level_analysis_out',
type=str, required=True,)
parser.add_argument('--codon_level_analysis_out',
dest='codon_level_analysis_out',
type=str, required=True,)
parser.add_argument('--error_file', dest='error_file_out',
type=str, required=True)
parser.add_argument('--snv_box_out', dest='snv_box_out',
type=str, required=False)
parser.add_argument('--snv_features', dest='snv_features_out',
type=str, required=False)
args = parser.parse_args(params)
chasm_web = CHASMWeb(mutationbox=args.mutationbox,
filepath=args.input_file_location,
is_hg_18=args.hg18,
analysis_type=args.analysis_type,
chosendb=args.chosendb,
cancer_type=args.cancer_type,
email=args.email,
annotate_genes=args.annotate,
text_reports=args.tsv_report,
mupit_out=args.mupit_out)
job_id = chasm_web.make_request()
file_map = {'Amino_Acid_Level_Analysis.Result.tsv': args.amino_acid_level_analysis_out,
'SNVBox.tsv': args.snv_box_out,
'Variant_Analysis.Result.tsv': args.variant_analysis_out,
'Gene_Level_Analysis.Result.tsv': args.gene_analysis_out,
'SnvGet Feature Description.xls': args.snv_features_out,
'error.txt': args.error_file_out,
'Codon_Level_Analysis.Result.tsv': args.codon_level_analysis_out,
}
url = chasm_web.zip_exists(job_id)
download = chasm_web.download_zip(url, job_id)
if download:
chasm_web.move_files(file_map=file_map)
else:
stop_err('Unable to download from the server')
if __name__ == '__main__':
main(sys.argv[1:])
| [
"[email protected]"
] | |
c137abb7879efd8cba96903b886443f16417a424 | e1292fb9f2b359f71fbc54a4eb6ae4cf0c1ff51d | /machines/towermain/temperature.py | 0f2911f2ee9401ba811d95d931d6a99320f76880 | [] | no_license | JNRiedel/PyExpLabSys | 879d5c6bf552e89134629f0c6ca011af67937c3d | b69daaa9c932b9264d9f731cc3f2091f31f5d36e | refs/heads/master | 2020-04-08T08:45:17.466865 | 2014-08-22T11:06:24 | 2014-08-22T11:06:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,371 | py | # -*- coding: utf-8 -*-
"""This script read the sample temperature from an Omega CNi3244_C24
temperature control unit and makes it available on a data socket. Furthermore,
it also log significant temperature points to the database.
"""
import time
from PyExpLabSys.drivers.omega import CNi3244_C24
from PyExpLabSys.common.sockets import DateDataSocket
from PyExpLabSys.common.loggers import ContinuousLogger
from PyExpLabSys.common.utilities import get_logger
LOGGER = get_logger('temperatue', level='INFO', file_log=True,
file_name='temperature_log')
TEMPERATURE_CHANGE_THRESHOLD = 0.3
TIMEOUT = 600
SHORT_NAME = 'tts'
NAME = 'tower_temperature_sample'
def main_measure_loop(cni, socket, db_logger):
"""The main measuring loop"""
last_temp = -100000
last_time = 0
while True:
# Current values
now = time.time()
current = cni.read_temperature()
# The read_tempearture returns None if no thermocouple is connected
if current is not None:
# Set point on socket
socket.set_point_now(SHORT_NAME, current)
# Log if required
if now - last_time > TIMEOUT or\
abs(current - last_temp) > TEMPERATURE_CHANGE_THRESHOLD:
db_logger.enqueue_point_now('tower_temperature_sample',
current)
LOGGER.info('Value {} sent'.format(current))
last_time = now
last_temp = current
def main():
LOGGER.info('main started')
cni = CNi3244_C24(0)
socket = DateDataSocket([SHORT_NAME], timeouts=1.0)
socket.start()
db_logger = ContinuousLogger(
table='dateplots_tower', username='N/A', password='N/A',
measurement_codenames=[NAME],
dsn='servcinf'
)
db_logger.start()
time.sleep(0.1)
# Main part
try:
main_measure_loop(cni, socket, db_logger)
except KeyboardInterrupt:
LOGGER.info('Keyboard Interrupt. Shutting down!')
db_logger.stop()
cni.close()
socket.stop()
if __name__ == '__main__':
try:
main()
# This nasty little except on all exception makes sure that exception are
# logged
except Exception as e:
LOGGER.exception(e)
raise(e)
raw_input("Press enter to exit") | [
"[email protected]"
] | |
929c36a16c6510056a32e3a51fdc66da8a0b0bae | 47deebe6fefedb01fdce5d4e82f58bb08f8e1e92 | /python core/Lesson_35/own_filter.py | 88b22438ef99e205d34e4d28385a56a6dc930970 | [] | no_license | developeryuldashev/python-core | 5bb162603bdb5782acf05e3fb25ca5dd6347067a | 08fca77c9cfde69d93a7875b3fb65b98f3dabd78 | refs/heads/main | 2023-08-21T03:33:12.160133 | 2021-10-19T04:56:53 | 2021-10-19T04:56:53 | 393,383,696 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 464 | py | from telegram.ext import MessageFilter
class HandlePython(MessageFilter):
name='Filters.python'
def filter(self, message):
return 'python' in message
python=HandlePython()
class FilterAwesome(MessageFilter):
def filter(self, message):
return 'Dilshod' in message.text
filter_awesome=FilterAwesome()
class FilterRaxmat(MessageFilter):
def raxmat(self,message):
return 'raxmat' in message.text
filter_raxmat=FilterRaxmat() | [
"[email protected]"
] | |
ef244246907dc513e7d6480d145bca83363884b1 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/sql/v20210201preview/get_sync_agent.py | c041588b469bf583ea5fe3162b8e9d913380dff9 | [
"BSD-3-Clause",
"Apache-2.0"
] | permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,746 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
__all__ = [
'GetSyncAgentResult',
'AwaitableGetSyncAgentResult',
'get_sync_agent',
'get_sync_agent_output',
]
@pulumi.output_type
class GetSyncAgentResult:
"""
An Azure SQL Database sync agent.
"""
def __init__(__self__, expiry_time=None, id=None, is_up_to_date=None, last_alive_time=None, name=None, state=None, sync_database_id=None, type=None, version=None):
if expiry_time and not isinstance(expiry_time, str):
raise TypeError("Expected argument 'expiry_time' to be a str")
pulumi.set(__self__, "expiry_time", expiry_time)
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if is_up_to_date and not isinstance(is_up_to_date, bool):
raise TypeError("Expected argument 'is_up_to_date' to be a bool")
pulumi.set(__self__, "is_up_to_date", is_up_to_date)
if last_alive_time and not isinstance(last_alive_time, str):
raise TypeError("Expected argument 'last_alive_time' to be a str")
pulumi.set(__self__, "last_alive_time", last_alive_time)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if state and not isinstance(state, str):
raise TypeError("Expected argument 'state' to be a str")
pulumi.set(__self__, "state", state)
if sync_database_id and not isinstance(sync_database_id, str):
raise TypeError("Expected argument 'sync_database_id' to be a str")
pulumi.set(__self__, "sync_database_id", sync_database_id)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
if version and not isinstance(version, str):
raise TypeError("Expected argument 'version' to be a str")
pulumi.set(__self__, "version", version)
@property
@pulumi.getter(name="expiryTime")
def expiry_time(self) -> str:
"""
Expiration time of the sync agent version.
"""
return pulumi.get(self, "expiry_time")
@property
@pulumi.getter
def id(self) -> str:
"""
Resource ID.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter(name="isUpToDate")
def is_up_to_date(self) -> bool:
"""
If the sync agent version is up to date.
"""
return pulumi.get(self, "is_up_to_date")
@property
@pulumi.getter(name="lastAliveTime")
def last_alive_time(self) -> str:
"""
Last alive time of the sync agent.
"""
return pulumi.get(self, "last_alive_time")
@property
@pulumi.getter
def name(self) -> str:
"""
Resource name.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def state(self) -> str:
"""
State of the sync agent.
"""
return pulumi.get(self, "state")
@property
@pulumi.getter(name="syncDatabaseId")
def sync_database_id(self) -> Optional[str]:
"""
ARM resource id of the sync database in the sync agent.
"""
return pulumi.get(self, "sync_database_id")
@property
@pulumi.getter
def type(self) -> str:
"""
Resource type.
"""
return pulumi.get(self, "type")
@property
@pulumi.getter
def version(self) -> str:
"""
Version of the sync agent.
"""
return pulumi.get(self, "version")
class AwaitableGetSyncAgentResult(GetSyncAgentResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSyncAgentResult(
expiry_time=self.expiry_time,
id=self.id,
is_up_to_date=self.is_up_to_date,
last_alive_time=self.last_alive_time,
name=self.name,
state=self.state,
sync_database_id=self.sync_database_id,
type=self.type,
version=self.version)
def get_sync_agent(resource_group_name: Optional[str] = None,
server_name: Optional[str] = None,
sync_agent_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSyncAgentResult:
"""
An Azure SQL Database sync agent.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str server_name: The name of the server on which the sync agent is hosted.
:param str sync_agent_name: The name of the sync agent.
"""
__args__ = dict()
__args__['resourceGroupName'] = resource_group_name
__args__['serverName'] = server_name
__args__['syncAgentName'] = sync_agent_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:sql/v20210201preview:getSyncAgent', __args__, opts=opts, typ=GetSyncAgentResult).value
return AwaitableGetSyncAgentResult(
expiry_time=__ret__.expiry_time,
id=__ret__.id,
is_up_to_date=__ret__.is_up_to_date,
last_alive_time=__ret__.last_alive_time,
name=__ret__.name,
state=__ret__.state,
sync_database_id=__ret__.sync_database_id,
type=__ret__.type,
version=__ret__.version)
@_utilities.lift_output_func(get_sync_agent)
def get_sync_agent_output(resource_group_name: Optional[pulumi.Input[str]] = None,
server_name: Optional[pulumi.Input[str]] = None,
sync_agent_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSyncAgentResult]:
"""
An Azure SQL Database sync agent.
:param str resource_group_name: The name of the resource group that contains the resource. You can obtain this value from the Azure Resource Manager API or the portal.
:param str server_name: The name of the server on which the sync agent is hosted.
:param str sync_agent_name: The name of the sync agent.
"""
...
| [
"[email protected]"
] | |
79cd366aeeb329a3b587ec905738a51e0b4b27ca | 9f495456202ecbfdcbc17aae96f8db47116f7adf | /myenv/bin/django-admin.py | ea04fb36ad23374794a12481f9eeae5847dd1e0f | [] | no_license | nknaveenkumar760/pythontutorial | 8dfae178e5ffa1942722a3754bd1b0c1fc99aa3b | 22df07acad252040c6b9b68c935fef5add9cf974 | refs/heads/master | 2022-02-19T18:06:40.386333 | 2019-08-02T08:12:11 | 2019-08-02T08:12:11 | 171,852,495 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 159 | py | #!/home/ubuntu/pythontutorial/myenv/bin/python3
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"[email protected]"
] | |
72536ffc2001ee0a6739dd5cbc5f200e014063a0 | 5ed2d0e107e4cdcd8129f418fdc40f1f50267514 | /Medium/WordBreak/test.py | b3be093b1de892e18dfc3f8c3a1b7047621616fd | [] | no_license | tliu57/Leetcode | 6cdc3caa460a75c804870f6615653f335fc97de1 | c480697d174d33219b513a0b670bc82b17c91ce1 | refs/heads/master | 2020-05-21T03:14:07.399407 | 2018-07-08T18:50:01 | 2018-07-08T18:50:01 | 31,505,035 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 322 | py | class Solution(object):
def wordBreak(self, s, wordDict):
dp = [0 for i in range(len(s)+1)]
dp[0] = 1
for i in range(1, len(s)+1):
for j in range(0, i):
dp[i] = dp[j] and (s[j:i] in wordDict)
if dp[i]:
break
return dp[len(s)]
sol = Solution()
print sol.wordBreak("leetcode", ["leet", "code"])
| [
"[email protected]"
] | |
43db92d549c4350589805bb14e755300ffbd27b8 | 29345337bf86edc938f3b5652702d551bfc3f11a | /python/src/main/python/pyalink/alink/tests/examples/operator/batch/test_fpgrowth.py | 320e2b8f49396dfb6cd99a9decade08889f1534a | [
"Apache-2.0"
] | permissive | vacaly/Alink | 32b71ac4572ae3509d343e3d1ff31a4da2321b6d | edb543ee05260a1dd314b11384d918fa1622d9c1 | refs/heads/master | 2023-07-21T03:29:07.612507 | 2023-07-12T12:41:31 | 2023-07-12T12:41:31 | 283,079,072 | 0 | 0 | Apache-2.0 | 2020-07-28T02:46:14 | 2020-07-28T02:46:13 | null | UTF-8 | Python | false | false | 733 | py | import unittest
import numpy as np
import pandas as pd
from pyalink.alink import *
class TestFpGrowth(unittest.TestCase):
def test_fpgrowth(self):
data = np.array([
["A,B,C,D"],
["B,C,E"],
["A,B,C,E"],
["B,D,E"],
["A,B,C,D"],
])
df_data = pd.DataFrame({
"items": data[:, 0],
})
data = dataframeToOperator(df_data, schemaStr='items string', op_type="batch")
fpGrowth = FpGrowthBatchOp() \
.setItemsCol("items") \
.setMinSupportPercent(0.4) \
.setMinConfidence(0.6)
fpGrowth.linkFrom(data)
fpGrowth.print()
fpGrowth.getSideOutput(0).print()
| [
"[email protected]"
] | |
5bdcfff19eab4786af18070910c55548fcd426dc | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02909/s681576776.py | 241c49b560159f84603785465ffd994ee3525f21 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 253 | py | import sys
readline = sys.stdin.readline
MOD = 10 ** 9 + 7
INF = float('INF')
sys.setrecursionlimit(10 ** 5)
def main():
S = input()
p = ["Sunny", "Cloudy", "Rainy"]
print(p[(p.index(S) + 1) % 3])
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
c96f6aeeabc58d40d4d44ece2ada2adcf6232c01 | 66fc3d58e94e8340a0d825501776a1dea37c0198 | /share/clf/process_clf_test_frames.py | aa63846e8c287609b0713bda9dd25814f6949612 | [
"CC-BY-4.0",
"BSD-3-Clause"
] | permissive | AcademySoftwareFoundation/OpenColorIO | dad370b54be147ae94f18ed6414d53bd76e9ef74 | 96f528fdfb7f9fb24388e33f6a968d29a3909cf8 | refs/heads/main | 2023-08-29T08:51:45.625957 | 2023-08-29T01:42:37 | 2023-08-29T01:42:37 | 775,131 | 843 | 236 | BSD-3-Clause | 2023-09-14T02:56:01 | 2010-07-14T18:22:06 | C++ | UTF-8 | Python | false | false | 3,582 | py | # SPDX-License-Identifier: BSD-3-Clause
# Copyright Contributors to the OpenColorIO Project.
# Use OpenColorIO to apply the CLF files from the CLF test kit to the CLF target image
# and produce a directory of processed OpenEXR images at the specified location.
# Run the script with "-h" for usage information.
# This script is python 2.7 and python 3 compatible.
import os
import argparse
def process_frames( options ):
dst_path = options.dst_dir
use_gpu = options.gpu
opt_level = options.opt
# Check the arguments are as expected.
if not os.path.exists( dst_path ):
os.mkdir( dst_path )
if not os.path.isdir( dst_path ):
raise ValueError( "Destination path must be a directory: " + dst_path )
# Get the path to the CLF target image, relative to the path of this script.
script_path = os.path.abspath( __file__ )
parts = script_path.split( os.sep )
ocio_base_path = os.path.join( os.sep, *parts[0:-3] )
src_image = os.path.join( ocio_base_path, 'share', 'clf', 'CLF_testimage.exr' )
# Get the path to the CLF files, relative to the path of this script.
clf_path = os.path.join( ocio_base_path, 'tests', 'data', 'files', 'clf' )
# Set the optimization level. None or lossless avoids the fast SSE log/exponent.
# (Note that the decimal value is available by simply printing the enum in Python.)
if (opt_level == 'none') or (opt_level is None):
# For default for this script, use no optimization rather than OCIO's default optimization
# in order to apply the operators exactly as they appear in the CLF file with no attempt
# to speed up the processing.
print( 'Optimization level: None' )
os.environ["OCIO_OPTIMIZATION_FLAGS"] = "0"
elif opt_level == 'lossless':
print( 'Optimization level: Lossless' )
os.environ["OCIO_OPTIMIZATION_FLAGS"] = "144457667"
elif opt_level == 'default':
print( 'Optimization level: Default' )
else:
raise ValueError( 'Unexpected --opt argument.' )
# TODO: Add an option to turn on only SSE without removing any ops.
if use_gpu:
print( 'Processing on the GPU\n' )
cmd_base = 'ocioconvert --gpu --lut %s %s %s'
else:
print( 'Processing on the CPU\n' )
cmd_base = 'ocioconvert --lut %s %s %s'
# Iterate over each legal CLF file in the suite.
for f in os.listdir( clf_path ):
fname, ext = os.path.splitext( f )
if ext == '.clf':
# Build the full path to the file.
p = os.path.join( clf_path, f )
# Build the name of the destination image.
dst_image = os.path.join( dst_path, fname + '.exr' )
# Build the command.
cmd = cmd_base % (p, src_image, dst_image)
print('================='); print( cmd )
# Process the image.
os.system( cmd )
if __name__=='__main__':
import sys
import argparse
parser = argparse.ArgumentParser(description='Process CLF test images using OCIO.')
parser.add_argument('dst_dir',
help='Path to a directory where the result images will be stored.')
parser.add_argument('--gpu', action='store_true',
help='Process using the GPU rather than the CPU.')
parser.add_argument('--opt', choices=['none','lossless','default'],
help='Specify the OCIO optimization level. If not specified, "none" will be used.')
options = parser.parse_args(sys.argv[1:])
process_frames(options)
| [
"[email protected]"
] | |
8a5f6c810c7991adc091d743259b6ed996eb5cd7 | e49d49b54f2d5cd25674050419f991477363c46b | /myshop/myshop/settings.py | 0a395ec79e6d309382afe3967821b0a84079f336 | [] | no_license | akashgiricse/online-shop | 0d87323e016f5c8a48020abd1d973987abd86035 | 94438876b8b2dd530ec05cb3c8df1b6a049fa514 | refs/heads/master | 2020-04-10T12:00:02.644277 | 2018-12-24T17:27:06 | 2018-12-24T17:27:06 | 161,008,658 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,310 | py | """
Django settings for myshop project.
Generated by 'django-admin startproject' using Django 2.0.5.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
import os
from decouple import config, Csv
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = config('SECRET_KEY')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = config('DEBUG', default=True, cast=bool)
ALLOWED_HOSTS = config('ALLOWED_HOSTS', cast=Csv())
# Application definition
INSTALLED_APPS = [
'shop.apps.ShopConfig',
'cart.apps.CartConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'myshop.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'myshop.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
CART_SESSION_ID = 'cart'
| [
"[email protected]"
] | |
f80bb33e5349f60b61a5f79be62824f534c30071 | 52a0e6c0be7eebfde22c886ff7b8e605a27428ce | /src/looped/__init__.py | 22239d5ad5b39afa66288653d476cd7ead07ba1e | [] | no_license | vied12/LoopED | 42d5663c36d7b0026196faa152a75576993fb30d | 4126a2b655f68c20bd1029a4d69597dd877c18cb | refs/heads/master | 2022-07-01T04:42:02.150799 | 2017-07-26T12:55:39 | 2017-07-26T12:55:39 | 84,230,616 | 1 | 1 | null | 2022-06-17T21:14:14 | 2017-03-07T18:05:26 | CSS | UTF-8 | Python | false | false | 204 | py | from .Led import create_led
from .animations import Jump, Tuner, Intro, Metronome
from .gamepads import WebGamePad
__all__ = [
'create_led',
'Jump',
'WebGamePad',
'Tuner',
'Intro',
]
| [
"[email protected]"
] | |
dec105911efbe7e1fcfffd57f3a39b128a4b12f8 | 4bd9d17a8697c0e87ce8b35e6189913d9f7dd1db | /legtool/gait/test_leg_ik.py | 2ce98243e4217c2bdb3a010821e25e54a5c1ce3f | [
"Apache-2.0"
] | permissive | jpieper/legtool | c72fe8a77871720f217634bb07602c709ba52583 | ab3946051bd16817b61d3073ce7be8bd27af90d0 | refs/heads/master | 2020-05-21T11:34:15.762189 | 2014-12-01T02:15:46 | 2014-12-01T02:15:46 | 21,541,872 | 10 | 9 | null | null | null | null | UTF-8 | Python | false | false | 4,133 | py | # Copyright 2014 Josh Pieper, [email protected].
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from . import leg_ik
def get_lizard_config():
config = leg_ik.Configuration()
config.coxa_length_mm = 50
config.femur_length_mm = 40
config.tibia_length_mm = 30
config.coxa_min_deg = -90
config.coxa_idle_deg = 0
config.coxa_max_deg = 90
config.femur_min_deg = -90
config.femur_idle_deg = 0
config.femur_max_deg = 90
config.tibia_min_deg = -90
config.tibia_idle_deg = 0
config.tibia_max_deg = 90
config.coxa_ident = 3
config.femur_ident = 4
config.tibia_ident = 5
return config
def test_lizard_3dof():
config = get_lizard_config()
point = leg_ik.Point3D(0, 90, -30)
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg) < 0.01
assert abs(result.femur_deg) < 0.01
assert abs(result.tibia_deg) < 0.01
point = leg_ik.Point3D(0, 90, -25)
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg) < 0.01
assert abs(result.femur_deg - 7.18) < 0.01
assert abs(result.tibia_deg + 6.58) < 0.01
point = leg_ik.Point3D(0, 90, -35)
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg) < 0.01
assert abs(result.femur_deg + 7.18) < 0.01
assert abs(result.tibia_deg - 7.78) < 0.01
point = leg_ik.Point3D(0, 95, -30)
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg) < 0.01
assert abs(result.femur_deg + 0.60) < 0.01
assert abs(result.tibia_deg - 10.20) < 0.01
# Now test some with coxa.
point = leg_ik.Point3D(20, 87.75, -30)
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg - 12.84) < 0.01
assert abs(result.femur_deg) < 0.01
assert abs(result.tibia_deg) < 0.01
point = leg_ik.Point3D(20, 87.75, -25)
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg - 12.84) < 0.01
assert abs(result.femur_deg - 7.18) < 0.01
assert abs(result.tibia_deg + 6.58) < 0.01
command_dict = result.command_dict()
assert sorted(command_dict.keys()) == [3, 4, 5]
assert command_dict[3] == result.coxa_deg
assert command_dict[4] == result.femur_deg
assert command_dict[5] == result.tibia_deg
# Try adding in some idle to the coxa.
config.coxa_idle_deg = 3.0
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg - 15.84) < 0.01
assert abs(result.femur_deg - 7.18) < 0.01
assert abs(result.tibia_deg + 6.58) < 0.01
# And some idle to femur.
config.femur_idle_deg = 4.0
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg - 15.84) < 0.01
assert abs(result.femur_deg - 11.18) < 0.01
assert abs(result.tibia_deg + 6.58) < 0.01
# And some idle to tibia.
config.tibia_idle_deg = 5.0
result = leg_ik.lizard_3dof_ik(point, config)
assert abs(result.coxa_deg - 15.84) < 0.01
assert abs(result.femur_deg - 11.18) < 0.01
assert abs(result.tibia_deg + 1.58) < 0.01
# Now try setting the max coxa low enough that we should get None.
config.coxa_max_deg = 15.0
result = leg_ik.lizard_3dof_ik(point, config)
assert result is None
config.coxa_max_deg = 90.0
result = leg_ik.lizard_3dof_ik(point, config)
assert result is not None
# And set the tibia max deg low enough to get None.
config.femur_max_deg = 10.0
result = leg_ik.lizard_3dof_ik(point, config)
assert result is None
# We'll assume the other bounds (min, and tibia) are correct for
# now.
| [
"[email protected]"
] | |
dd539a81c74353ac014e0b9b7bf8636a6b5d7ef4 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p04020/s011481880.py | cd16b5ea063aa01e01e9a52b7814e2636bd9e260 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 413 | py | n = int(input())
a = [int(input()) for _ in range(n)]
lsts = []
temp = []
for aa in a:
if aa == 0:
if temp:
lsts.append(temp)
temp = []
else:
temp.append(aa)
if temp: lsts.append(temp)
ans = 0
for lst in lsts:
for i, aa in enumerate(lst):
ans += aa // 2
if i != len(lst)-1:
lst[i+1] -= aa % 2
ans += aa % 2
print(ans) | [
"[email protected]"
] | |
78ea63e50440ee9e3830cc8df7eb1b07d9a104d5 | ce76b3ef70b885d7c354b6ddb8447d111548e0f1 | /want_part/know_great_company.py | 48b6a068bf3763ad3f82345d0496999bf868095a | [] | no_license | JingkaiTang/github-play | 9bdca4115eee94a7b5e4ae9d3d6052514729ff21 | 51b550425a91a97480714fe9bc63cb5112f6f729 | refs/heads/master | 2021-01-20T20:18:21.249162 | 2016-08-19T07:20:12 | 2016-08-19T07:20:12 | 60,834,519 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 176 | py |
#! /usr/bin/env python
def say_thing(str_arg):
world(str_arg)
print('week')
def world(str_arg):
print(str_arg)
if __name__ == '__main__':
say_thing('life')
| [
"[email protected]"
] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.