blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
listlengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c1fa7ddb3cb26e3ba1673fd853a21c0bcecf5b52 | 865ee6eb8ee52c8056fbf406059c5481f365de6e | /openresty-win32-build/thirdparty/x86/pgsql/pgAdmin 4/venv/Lib/site-packages/typing.py | 693945365d671e3b1c522b0d19caa4eba538a5c8 | [
"MIT",
"LicenseRef-scancode-ssleay",
"BSD-3-Clause",
"LicenseRef-scancode-openssl",
"LicenseRef-scancode-ssleay-windows",
"LicenseRef-scancode-pcre",
"LicenseRef-scancode-public-domain",
"Zlib",
"BSD-2-Clause"
]
| permissive | nneesshh/openresty-oss | 76e119081ea06bc82b184f96d531cc756b716c9d | bfbb9d7526020eda1788a0ed24f2be3c8be5c1c3 | refs/heads/master | 2022-12-12T21:39:48.917622 | 2019-05-31T03:14:18 | 2019-05-31T03:14:18 | 184,213,410 | 1 | 0 | MIT | 2022-12-06T17:28:59 | 2019-04-30T07:28:45 | C | UTF-8 | Python | false | false | 71,396 | py | from __future__ import absolute_import, unicode_literals
import abc
from abc import abstractmethod, abstractproperty
import collections
import functools
import re as stdlib_re # Avoid confusion with the re we export.
import sys
import types
import copy
try:
import collections.abc as collections_abc
except ImportError:
import collections as collections_abc # Fallback for PY3.2.
# Please keep __all__ alphabetized within each category.
__all__ = [
# Super-special typing primitives.
'Any',
'Callable',
'ClassVar',
'Generic',
'Optional',
'Tuple',
'Type',
'TypeVar',
'Union',
# ABCs (from collections.abc).
'AbstractSet', # collections.abc.Set.
'GenericMeta', # subclass of abc.ABCMeta and a metaclass
# for 'Generic' and ABCs below.
'ByteString',
'Container',
'ContextManager',
'Hashable',
'ItemsView',
'Iterable',
'Iterator',
'KeysView',
'Mapping',
'MappingView',
'MutableMapping',
'MutableSequence',
'MutableSet',
'Sequence',
'Sized',
'ValuesView',
# Structural checks, a.k.a. protocols.
'Reversible',
'SupportsAbs',
'SupportsComplex',
'SupportsFloat',
'SupportsInt',
# Concrete collection types.
'Counter',
'Deque',
'Dict',
'DefaultDict',
'List',
'Set',
'FrozenSet',
'NamedTuple', # Not really a type.
'Generator',
# One-off things.
'AnyStr',
'cast',
'get_type_hints',
'NewType',
'no_type_check',
'no_type_check_decorator',
'overload',
'Text',
'TYPE_CHECKING',
]
# The pseudo-submodules 're' and 'io' are part of the public
# namespace, but excluded from __all__ because they might stomp on
# legitimate imports of those modules.
def _qualname(x):
if sys.version_info[:2] >= (3, 3):
return x.__qualname__
else:
# Fall back to just name.
return x.__name__
def _trim_name(nm):
whitelist = ('_TypeAlias', '_ForwardRef', '_TypingBase', '_FinalTypingBase')
if nm.startswith('_') and nm not in whitelist:
nm = nm[1:]
return nm
class TypingMeta(type):
"""Metaclass for most types defined in typing module
(not a part of public API).
This also defines a dummy constructor (all the work for most typing
constructs is done in __new__) and a nicer repr().
"""
_is_protocol = False
def __new__(cls, name, bases, namespace):
return super(TypingMeta, cls).__new__(cls, str(name), bases, namespace)
@classmethod
def assert_no_subclassing(cls, bases):
for base in bases:
if isinstance(base, cls):
raise TypeError("Cannot subclass %s" %
(', '.join(map(_type_repr, bases)) or '()'))
def __init__(self, *args, **kwds):
pass
def _eval_type(self, globalns, localns):
"""Override this in subclasses to interpret forward references.
For example, List['C'] is internally stored as
List[_ForwardRef('C')], which should evaluate to List[C],
where C is an object found in globalns or localns (searching
localns first, of course).
"""
return self
def _get_type_vars(self, tvars):
pass
def __repr__(self):
qname = _trim_name(_qualname(self))
return '%s.%s' % (self.__module__, qname)
class _TypingBase(object):
"""Internal indicator of special typing constructs."""
__metaclass__ = TypingMeta
__slots__ = ('__weakref__',)
def __init__(self, *args, **kwds):
pass
def __new__(cls, *args, **kwds):
"""Constructor.
This only exists to give a better error message in case
someone tries to subclass a special typing object (not a good idea).
"""
if (len(args) == 3 and
isinstance(args[0], str) and
isinstance(args[1], tuple)):
# Close enough.
raise TypeError("Cannot subclass %r" % cls)
return super(_TypingBase, cls).__new__(cls)
# Things that are not classes also need these.
def _eval_type(self, globalns, localns):
return self
def _get_type_vars(self, tvars):
pass
def __repr__(self):
cls = type(self)
qname = _trim_name(_qualname(cls))
return '%s.%s' % (cls.__module__, qname)
def __call__(self, *args, **kwds):
raise TypeError("Cannot instantiate %r" % type(self))
class _FinalTypingBase(_TypingBase):
"""Internal mix-in class to prevent instantiation.
Prevents instantiation unless _root=True is given in class call.
It is used to create pseudo-singleton instances Any, Union, Optional, etc.
"""
__slots__ = ()
def __new__(cls, *args, **kwds):
self = super(_FinalTypingBase, cls).__new__(cls, *args, **kwds)
if '_root' in kwds and kwds['_root'] is True:
return self
raise TypeError("Cannot instantiate %r" % cls)
def __reduce__(self):
return _trim_name(type(self).__name__)
class _ForwardRef(_TypingBase):
"""Internal wrapper to hold a forward reference."""
__slots__ = ('__forward_arg__', '__forward_code__',
'__forward_evaluated__', '__forward_value__')
def __init__(self, arg):
super(_ForwardRef, self).__init__(arg)
if not isinstance(arg, basestring):
raise TypeError('Forward reference must be a string -- got %r' % (arg,))
try:
code = compile(arg, '<string>', 'eval')
except SyntaxError:
raise SyntaxError('Forward reference must be an expression -- got %r' %
(arg,))
self.__forward_arg__ = arg
self.__forward_code__ = code
self.__forward_evaluated__ = False
self.__forward_value__ = None
def _eval_type(self, globalns, localns):
if not self.__forward_evaluated__ or localns is not globalns:
if globalns is None and localns is None:
globalns = localns = {}
elif globalns is None:
globalns = localns
elif localns is None:
localns = globalns
self.__forward_value__ = _type_check(
eval(self.__forward_code__, globalns, localns),
"Forward references must evaluate to types.")
self.__forward_evaluated__ = True
return self.__forward_value__
def __eq__(self, other):
if not isinstance(other, _ForwardRef):
return NotImplemented
return (self.__forward_arg__ == other.__forward_arg__ and
self.__forward_value__ == other.__forward_value__)
def __hash__(self):
return hash((self.__forward_arg__, self.__forward_value__))
def __instancecheck__(self, obj):
raise TypeError("Forward references cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Forward references cannot be used with issubclass().")
def __repr__(self):
return '_ForwardRef(%r)' % (self.__forward_arg__,)
class _TypeAlias(_TypingBase):
"""Internal helper class for defining generic variants of concrete types.
Note that this is not a type; let's call it a pseudo-type. It cannot
be used in instance and subclass checks in parameterized form, i.e.
``isinstance(42, Match[str])`` raises ``TypeError`` instead of returning
``False``.
"""
__slots__ = ('name', 'type_var', 'impl_type', 'type_checker')
def __init__(self, name, type_var, impl_type, type_checker):
"""Initializer.
Args:
name: The name, e.g. 'Pattern'.
type_var: The type parameter, e.g. AnyStr, or the
specific type, e.g. str.
impl_type: The implementation type.
type_checker: Function that takes an impl_type instance.
and returns a value that should be a type_var instance.
"""
assert isinstance(name, basestring), repr(name)
assert isinstance(impl_type, type), repr(impl_type)
assert not isinstance(impl_type, TypingMeta), repr(impl_type)
assert isinstance(type_var, (type, _TypingBase)), repr(type_var)
self.name = name
self.type_var = type_var
self.impl_type = impl_type
self.type_checker = type_checker
def __repr__(self):
return "%s[%s]" % (self.name, _type_repr(self.type_var))
def __getitem__(self, parameter):
if not isinstance(self.type_var, TypeVar):
raise TypeError("%s cannot be further parameterized." % self)
if self.type_var.__constraints__ and isinstance(parameter, type):
if not issubclass(parameter, self.type_var.__constraints__):
raise TypeError("%s is not a valid substitution for %s." %
(parameter, self.type_var))
if isinstance(parameter, TypeVar) and parameter is not self.type_var:
raise TypeError("%s cannot be re-parameterized." % self)
return self.__class__(self.name, parameter,
self.impl_type, self.type_checker)
def __eq__(self, other):
if not isinstance(other, _TypeAlias):
return NotImplemented
return self.name == other.name and self.type_var == other.type_var
def __hash__(self):
return hash((self.name, self.type_var))
def __instancecheck__(self, obj):
if not isinstance(self.type_var, TypeVar):
raise TypeError("Parameterized type aliases cannot be used "
"with isinstance().")
return isinstance(obj, self.impl_type)
def __subclasscheck__(self, cls):
if not isinstance(self.type_var, TypeVar):
raise TypeError("Parameterized type aliases cannot be used "
"with issubclass().")
return issubclass(cls, self.impl_type)
def _get_type_vars(types, tvars):
for t in types:
if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
t._get_type_vars(tvars)
def _type_vars(types):
tvars = []
_get_type_vars(types, tvars)
return tuple(tvars)
def _eval_type(t, globalns, localns):
if isinstance(t, TypingMeta) or isinstance(t, _TypingBase):
return t._eval_type(globalns, localns)
return t
def _type_check(arg, msg):
"""Check that the argument is a type, and return it (internal helper).
As a special case, accept None and return type(None) instead.
Also, _TypeAlias instances (e.g. Match, Pattern) are acceptable.
The msg argument is a human-readable error message, e.g.
"Union[arg, ...]: arg should be a type."
We append the repr() of the actual value (truncated to 100 chars).
"""
if arg is None:
return type(None)
if isinstance(arg, basestring):
arg = _ForwardRef(arg)
if (
isinstance(arg, _TypingBase) and type(arg).__name__ == '_ClassVar' or
not isinstance(arg, (type, _TypingBase)) and not callable(arg)
):
raise TypeError(msg + " Got %.100r." % (arg,))
# Bare Union etc. are not valid as type arguments
if (
type(arg).__name__ in ('_Union', '_Optional') and
not getattr(arg, '__origin__', None) or
isinstance(arg, TypingMeta) and arg._gorg in (Generic, _Protocol)
):
raise TypeError("Plain %s is not valid as type argument" % arg)
return arg
def _type_repr(obj):
"""Return the repr() of an object, special-casing types (internal helper).
If obj is a type, we return a shorter version than the default
type.__repr__, based on the module and qualified name, which is
typically enough to uniquely identify a type. For everything
else, we fall back on repr(obj).
"""
if isinstance(obj, type) and not isinstance(obj, TypingMeta):
if obj.__module__ == '__builtin__':
return _qualname(obj)
return '%s.%s' % (obj.__module__, _qualname(obj))
if obj is Ellipsis:
return('...')
if isinstance(obj, types.FunctionType):
return obj.__name__
return repr(obj)
class ClassVarMeta(TypingMeta):
"""Metaclass for _ClassVar"""
def __new__(cls, name, bases, namespace):
cls.assert_no_subclassing(bases)
self = super(ClassVarMeta, cls).__new__(cls, name, bases, namespace)
return self
class _ClassVar(_FinalTypingBase):
"""Special type construct to mark class variables.
An annotation wrapped in ClassVar indicates that a given
attribute is intended to be used as a class variable and
should not be set on instances of that class. Usage::
class Starship:
stats = {} # type: ClassVar[Dict[str, int]] # class variable
damage = 10 # type: int # instance variable
ClassVar accepts only types and cannot be further subscribed.
Note that ClassVar is not a class itself, and should not
be used with isinstance() or issubclass().
"""
__metaclass__ = ClassVarMeta
__slots__ = ('__type__',)
def __init__(self, tp=None, _root=False):
self.__type__ = tp
def __getitem__(self, item):
cls = type(self)
if self.__type__ is None:
return cls(_type_check(item,
'{} accepts only types.'.format(cls.__name__[1:])),
_root=True)
raise TypeError('{} cannot be further subscripted'
.format(cls.__name__[1:]))
def _eval_type(self, globalns, localns):
return type(self)(_eval_type(self.__type__, globalns, localns),
_root=True)
def __repr__(self):
r = super(_ClassVar, self).__repr__()
if self.__type__ is not None:
r += '[{}]'.format(_type_repr(self.__type__))
return r
def __hash__(self):
return hash((type(self).__name__, self.__type__))
def __eq__(self, other):
if not isinstance(other, _ClassVar):
return NotImplemented
if self.__type__ is not None:
return self.__type__ == other.__type__
return self is other
ClassVar = _ClassVar(_root=True)
class AnyMeta(TypingMeta):
"""Metaclass for Any."""
def __new__(cls, name, bases, namespace):
cls.assert_no_subclassing(bases)
self = super(AnyMeta, cls).__new__(cls, name, bases, namespace)
return self
class _Any(_FinalTypingBase):
"""Special type indicating an unconstrained type.
- Any is compatible with every type.
- Any assumed to have all methods.
- All values assumed to be instances of Any.
Note that all the above statements are true from the point of view of
static type checkers. At runtime, Any should not be used with instance
or class checks.
"""
__metaclass__ = AnyMeta
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("Any cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Any cannot be used with issubclass().")
Any = _Any(_root=True)
class NoReturnMeta(TypingMeta):
"""Metaclass for NoReturn."""
def __new__(cls, name, bases, namespace):
cls.assert_no_subclassing(bases)
self = super(NoReturnMeta, cls).__new__(cls, name, bases, namespace)
return self
class _NoReturn(_FinalTypingBase):
"""Special type indicating functions that never return.
Example::
from typing import NoReturn
def stop() -> NoReturn:
raise Exception('no way')
This type is invalid in other positions, e.g., ``List[NoReturn]``
will fail in static type checkers.
"""
__metaclass__ = NoReturnMeta
__slots__ = ()
def __instancecheck__(self, obj):
raise TypeError("NoReturn cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("NoReturn cannot be used with issubclass().")
NoReturn = _NoReturn(_root=True)
class TypeVarMeta(TypingMeta):
def __new__(cls, name, bases, namespace):
cls.assert_no_subclassing(bases)
return super(TypeVarMeta, cls).__new__(cls, name, bases, namespace)
class TypeVar(_TypingBase):
"""Type variable.
Usage::
T = TypeVar('T') # Can be anything
A = TypeVar('A', str, bytes) # Must be str or bytes
Type variables exist primarily for the benefit of static type
checkers. They serve as the parameters for generic types as well
as for generic function definitions. See class Generic for more
information on generic types. Generic functions work as follows:
def repeat(x: T, n: int) -> List[T]:
'''Return a list containing n references to x.'''
return [x]*n
def longest(x: A, y: A) -> A:
'''Return the longest of two strings.'''
return x if len(x) >= len(y) else y
The latter example's signature is essentially the overloading
of (str, str) -> str and (bytes, bytes) -> bytes. Also note
that if the arguments are instances of some subclass of str,
the return type is still plain str.
At runtime, isinstance(x, T) and issubclass(C, T) will raise TypeError.
Type variables defined with covariant=True or contravariant=True
can be used do declare covariant or contravariant generic types.
See PEP 484 for more details. By default generic types are invariant
in all type variables.
Type variables can be introspected. e.g.:
T.__name__ == 'T'
T.__constraints__ == ()
T.__covariant__ == False
T.__contravariant__ = False
A.__constraints__ == (str, bytes)
"""
__metaclass__ = TypeVarMeta
__slots__ = ('__name__', '__bound__', '__constraints__',
'__covariant__', '__contravariant__')
def __init__(self, name, *constraints, **kwargs):
super(TypeVar, self).__init__(name, *constraints, **kwargs)
bound = kwargs.get('bound', None)
covariant = kwargs.get('covariant', False)
contravariant = kwargs.get('contravariant', False)
self.__name__ = name
if covariant and contravariant:
raise ValueError("Bivariant types are not supported.")
self.__covariant__ = bool(covariant)
self.__contravariant__ = bool(contravariant)
if constraints and bound is not None:
raise TypeError("Constraints cannot be combined with bound=...")
if constraints and len(constraints) == 1:
raise TypeError("A single constraint is not allowed")
msg = "TypeVar(name, constraint, ...): constraints must be types."
self.__constraints__ = tuple(_type_check(t, msg) for t in constraints)
if bound:
self.__bound__ = _type_check(bound, "Bound must be a type.")
else:
self.__bound__ = None
def _get_type_vars(self, tvars):
if self not in tvars:
tvars.append(self)
def __repr__(self):
if self.__covariant__:
prefix = '+'
elif self.__contravariant__:
prefix = '-'
else:
prefix = '~'
return prefix + self.__name__
def __instancecheck__(self, instance):
raise TypeError("Type variables cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Type variables cannot be used with issubclass().")
# Some unconstrained type variables. These are used by the container types.
# (These are not for export.)
T = TypeVar('T') # Any type.
KT = TypeVar('KT') # Key type.
VT = TypeVar('VT') # Value type.
T_co = TypeVar('T_co', covariant=True) # Any type covariant containers.
V_co = TypeVar('V_co', covariant=True) # Any type covariant containers.
VT_co = TypeVar('VT_co', covariant=True) # Value type covariant containers.
T_contra = TypeVar('T_contra', contravariant=True) # Ditto contravariant.
# A useful type variable with constraints. This represents string types.
# (This one *is* for export!)
AnyStr = TypeVar('AnyStr', bytes, unicode)
def _replace_arg(arg, tvars, args):
"""An internal helper function: replace arg if it is a type variable
found in tvars with corresponding substitution from args or
with corresponding substitution sub-tree if arg is a generic type.
"""
if tvars is None:
tvars = []
if hasattr(arg, '_subs_tree') and isinstance(arg, (GenericMeta, _TypingBase)):
return arg._subs_tree(tvars, args)
if isinstance(arg, TypeVar):
for i, tvar in enumerate(tvars):
if arg == tvar:
return args[i]
return arg
# Special typing constructs Union, Optional, Generic, Callable and Tuple
# use three special attributes for internal bookkeeping of generic types:
# * __parameters__ is a tuple of unique free type parameters of a generic
# type, for example, Dict[T, T].__parameters__ == (T,);
# * __origin__ keeps a reference to a type that was subscripted,
# e.g., Union[T, int].__origin__ == Union;
# * __args__ is a tuple of all arguments used in subscripting,
# e.g., Dict[T, int].__args__ == (T, int).
def _subs_tree(cls, tvars=None, args=None):
"""An internal helper function: calculate substitution tree
for generic cls after replacing its type parameters with
substitutions in tvars -> args (if any).
Repeat the same following __origin__'s.
Return a list of arguments with all possible substitutions
performed. Arguments that are generic classes themselves are represented
as tuples (so that no new classes are created by this function).
For example: _subs_tree(List[Tuple[int, T]][str]) == [(Tuple, int, str)]
"""
if cls.__origin__ is None:
return cls
# Make of chain of origins (i.e. cls -> cls.__origin__)
current = cls.__origin__
orig_chain = []
while current.__origin__ is not None:
orig_chain.append(current)
current = current.__origin__
# Replace type variables in __args__ if asked ...
tree_args = []
for arg in cls.__args__:
tree_args.append(_replace_arg(arg, tvars, args))
# ... then continue replacing down the origin chain.
for ocls in orig_chain:
new_tree_args = []
for arg in ocls.__args__:
new_tree_args.append(_replace_arg(arg, ocls.__parameters__, tree_args))
tree_args = new_tree_args
return tree_args
def _remove_dups_flatten(parameters):
"""An internal helper for Union creation and substitution: flatten Union's
among parameters, then remove duplicates and strict subclasses.
"""
# Flatten out Union[Union[...], ...].
params = []
for p in parameters:
if isinstance(p, _Union) and p.__origin__ is Union:
params.extend(p.__args__)
elif isinstance(p, tuple) and len(p) > 0 and p[0] is Union:
params.extend(p[1:])
else:
params.append(p)
# Weed out strict duplicates, preserving the first of each occurrence.
all_params = set(params)
if len(all_params) < len(params):
new_params = []
for t in params:
if t in all_params:
new_params.append(t)
all_params.remove(t)
params = new_params
assert not all_params, all_params
# Weed out subclasses.
# E.g. Union[int, Employee, Manager] == Union[int, Employee].
# If object is present it will be sole survivor among proper classes.
# Never discard type variables.
# (In particular, Union[str, AnyStr] != AnyStr.)
all_params = set(params)
for t1 in params:
if not isinstance(t1, type):
continue
if any(isinstance(t2, type) and issubclass(t1, t2)
for t2 in all_params - {t1}
if not (isinstance(t2, GenericMeta) and
t2.__origin__ is not None)):
all_params.remove(t1)
return tuple(t for t in params if t in all_params)
def _check_generic(cls, parameters):
# Check correct count for parameters of a generic cls (internal helper).
if not cls.__parameters__:
raise TypeError("%s is not a generic class" % repr(cls))
alen = len(parameters)
elen = len(cls.__parameters__)
if alen != elen:
raise TypeError("Too %s parameters for %s; actual %s, expected %s" %
("many" if alen > elen else "few", repr(cls), alen, elen))
_cleanups = []
def _tp_cache(func):
maxsize = 128
cache = {}
_cleanups.append(cache.clear)
@functools.wraps(func)
def inner(*args):
key = args
try:
return cache[key]
except TypeError:
# Assume it's an unhashable argument.
return func(*args)
except KeyError:
value = func(*args)
if len(cache) >= maxsize:
# If the cache grows too much, just start over.
cache.clear()
cache[key] = value
return value
return inner
class UnionMeta(TypingMeta):
"""Metaclass for Union."""
def __new__(cls, name, bases, namespace):
cls.assert_no_subclassing(bases)
return super(UnionMeta, cls).__new__(cls, name, bases, namespace)
class _Union(_FinalTypingBase):
"""Union type; Union[X, Y] means either X or Y.
To define a union, use e.g. Union[int, str]. Details:
- The arguments must be types and there must be at least one.
- None as an argument is a special case and is replaced by
type(None).
- Unions of unions are flattened, e.g.::
Union[Union[int, str], float] == Union[int, str, float]
- Unions of a single argument vanish, e.g.::
Union[int] == int # The constructor actually returns int
- Redundant arguments are skipped, e.g.::
Union[int, str, int] == Union[int, str]
- When comparing unions, the argument order is ignored, e.g.::
Union[int, str] == Union[str, int]
- When two arguments have a subclass relationship, the least
derived argument is kept, e.g.::
class Employee: pass
class Manager(Employee): pass
Union[int, Employee, Manager] == Union[int, Employee]
Union[Manager, int, Employee] == Union[int, Employee]
Union[Employee, Manager] == Employee
- Similar for object::
Union[int, object] == object
- You cannot subclass or instantiate a union.
- You can use Optional[X] as a shorthand for Union[X, None].
"""
__metaclass__ = UnionMeta
__slots__ = ('__parameters__', '__args__', '__origin__', '__tree_hash__')
def __new__(cls, parameters=None, origin=None, *args, **kwds):
self = super(_Union, cls).__new__(cls, parameters, origin, *args, **kwds)
if origin is None:
self.__parameters__ = None
self.__args__ = None
self.__origin__ = None
self.__tree_hash__ = hash(frozenset(('Union',)))
return self
if not isinstance(parameters, tuple):
raise TypeError("Expected parameters=<tuple>")
if origin is Union:
parameters = _remove_dups_flatten(parameters)
# It's not a union if there's only one type left.
if len(parameters) == 1:
return parameters[0]
self.__parameters__ = _type_vars(parameters)
self.__args__ = parameters
self.__origin__ = origin
# Pre-calculate the __hash__ on instantiation.
# This improves speed for complex substitutions.
subs_tree = self._subs_tree()
if isinstance(subs_tree, tuple):
self.__tree_hash__ = hash(frozenset(subs_tree))
else:
self.__tree_hash__ = hash(subs_tree)
return self
def _eval_type(self, globalns, localns):
if self.__args__ is None:
return self
ev_args = tuple(_eval_type(t, globalns, localns) for t in self.__args__)
ev_origin = _eval_type(self.__origin__, globalns, localns)
if ev_args == self.__args__ and ev_origin == self.__origin__:
# Everything is already evaluated.
return self
return self.__class__(ev_args, ev_origin, _root=True)
def _get_type_vars(self, tvars):
if self.__origin__ and self.__parameters__:
_get_type_vars(self.__parameters__, tvars)
def __repr__(self):
if self.__origin__ is None:
return super(_Union, self).__repr__()
tree = self._subs_tree()
if not isinstance(tree, tuple):
return repr(tree)
return tree[0]._tree_repr(tree)
def _tree_repr(self, tree):
arg_list = []
for arg in tree[1:]:
if not isinstance(arg, tuple):
arg_list.append(_type_repr(arg))
else:
arg_list.append(arg[0]._tree_repr(arg))
return super(_Union, self).__repr__() + '[%s]' % ', '.join(arg_list)
@_tp_cache
def __getitem__(self, parameters):
if parameters == ():
raise TypeError("Cannot take a Union of no types.")
if not isinstance(parameters, tuple):
parameters = (parameters,)
if self.__origin__ is None:
msg = "Union[arg, ...]: each arg must be a type."
else:
msg = "Parameters to generic types must be types."
parameters = tuple(_type_check(p, msg) for p in parameters)
if self is not Union:
_check_generic(self, parameters)
return self.__class__(parameters, origin=self, _root=True)
def _subs_tree(self, tvars=None, args=None):
if self is Union:
return Union # Nothing to substitute
tree_args = _subs_tree(self, tvars, args)
tree_args = _remove_dups_flatten(tree_args)
if len(tree_args) == 1:
return tree_args[0] # Union of a single type is that type
return (Union,) + tree_args
def __eq__(self, other):
if isinstance(other, _Union):
return self.__tree_hash__ == other.__tree_hash__
elif self is not Union:
return self._subs_tree() == other
else:
return self is other
def __hash__(self):
return self.__tree_hash__
def __instancecheck__(self, obj):
raise TypeError("Unions cannot be used with isinstance().")
def __subclasscheck__(self, cls):
raise TypeError("Unions cannot be used with issubclass().")
Union = _Union(_root=True)
class OptionalMeta(TypingMeta):
"""Metaclass for Optional."""
def __new__(cls, name, bases, namespace):
cls.assert_no_subclassing(bases)
return super(OptionalMeta, cls).__new__(cls, name, bases, namespace)
class _Optional(_FinalTypingBase):
"""Optional type.
Optional[X] is equivalent to Union[X, None].
"""
__metaclass__ = OptionalMeta
__slots__ = ()
@_tp_cache
def __getitem__(self, arg):
arg = _type_check(arg, "Optional[t] requires a single type.")
return Union[arg, type(None)]
Optional = _Optional(_root=True)
def _next_in_mro(cls):
"""Helper for Generic.__new__.
Returns the class after the last occurrence of Generic or
Generic[...] in cls.__mro__.
"""
next_in_mro = object
# Look for the last occurrence of Generic or Generic[...].
for i, c in enumerate(cls.__mro__[:-1]):
if isinstance(c, GenericMeta) and c._gorg is Generic:
next_in_mro = cls.__mro__[i + 1]
return next_in_mro
def _make_subclasshook(cls):
"""Construct a __subclasshook__ callable that incorporates
the associated __extra__ class in subclass checks performed
against cls.
"""
if isinstance(cls.__extra__, abc.ABCMeta):
# The logic mirrors that of ABCMeta.__subclasscheck__.
# Registered classes need not be checked here because
# cls and its extra share the same _abc_registry.
def __extrahook__(cls, subclass):
res = cls.__extra__.__subclasshook__(subclass)
if res is not NotImplemented:
return res
if cls.__extra__ in getattr(subclass, '__mro__', ()):
return True
for scls in cls.__extra__.__subclasses__():
if isinstance(scls, GenericMeta):
continue
if issubclass(subclass, scls):
return True
return NotImplemented
else:
# For non-ABC extras we'll just call issubclass().
def __extrahook__(cls, subclass):
if cls.__extra__ and issubclass(subclass, cls.__extra__):
return True
return NotImplemented
return classmethod(__extrahook__)
class GenericMeta(TypingMeta, abc.ABCMeta):
"""Metaclass for generic types.
This is a metaclass for typing.Generic and generic ABCs defined in
typing module. User defined subclasses of GenericMeta can override
__new__ and invoke super().__new__. Note that GenericMeta.__new__
has strict rules on what is allowed in its bases argument:
* plain Generic is disallowed in bases;
* Generic[...] should appear in bases at most once;
* if Generic[...] is present, then it should list all type variables
that appear in other bases.
In addition, type of all generic bases is erased, e.g., C[int] is
stripped to plain C.
"""
def __new__(cls, name, bases, namespace,
tvars=None, args=None, origin=None, extra=None, orig_bases=None):
"""Create a new generic class. GenericMeta.__new__ accepts
keyword arguments that are used for internal bookkeeping, therefore
an override should pass unused keyword arguments to super().
"""
if tvars is not None:
# Called from __getitem__() below.
assert origin is not None
assert all(isinstance(t, TypeVar) for t in tvars), tvars
else:
# Called from class statement.
assert tvars is None, tvars
assert args is None, args
assert origin is None, origin
# Get the full set of tvars from the bases.
tvars = _type_vars(bases)
# Look for Generic[T1, ..., Tn].
# If found, tvars must be a subset of it.
# If not found, tvars is it.
# Also check for and reject plain Generic,
# and reject multiple Generic[...].
gvars = None
for base in bases:
if base is Generic:
raise TypeError("Cannot inherit from plain Generic")
if (isinstance(base, GenericMeta) and
base.__origin__ is Generic):
if gvars is not None:
raise TypeError(
"Cannot inherit from Generic[...] multiple types.")
gvars = base.__parameters__
if gvars is None:
gvars = tvars
else:
tvarset = set(tvars)
gvarset = set(gvars)
if not tvarset <= gvarset:
raise TypeError(
"Some type variables (%s) "
"are not listed in Generic[%s]" %
(", ".join(str(t) for t in tvars if t not in gvarset),
", ".join(str(g) for g in gvars)))
tvars = gvars
initial_bases = bases
if extra is None:
extra = namespace.get('__extra__')
if extra is not None and type(extra) is abc.ABCMeta and extra not in bases:
bases = (extra,) + bases
bases = tuple(b._gorg if isinstance(b, GenericMeta) else b for b in bases)
# remove bare Generic from bases if there are other generic bases
if any(isinstance(b, GenericMeta) and b is not Generic for b in bases):
bases = tuple(b for b in bases if b is not Generic)
namespace.update({'__origin__': origin, '__extra__': extra})
self = super(GenericMeta, cls).__new__(cls, name, bases, namespace)
super(GenericMeta, self).__setattr__('_gorg',
self if not origin else origin._gorg)
self.__parameters__ = tvars
# Be prepared that GenericMeta will be subclassed by TupleMeta
# and CallableMeta, those two allow ..., (), or [] in __args___.
self.__args__ = tuple(Ellipsis if a is _TypingEllipsis else
() if a is _TypingEmpty else
a for a in args) if args else None
# Speed hack (https://github.com/python/typing/issues/196).
self.__next_in_mro__ = _next_in_mro(self)
# Preserve base classes on subclassing (__bases__ are type erased now).
if orig_bases is None:
self.__orig_bases__ = initial_bases
# This allows unparameterized generic collections to be used
# with issubclass() and isinstance() in the same way as their
# collections.abc counterparts (e.g., isinstance([], Iterable)).
if (
'__subclasshook__' not in namespace and extra or
# allow overriding
getattr(self.__subclasshook__, '__name__', '') == '__extrahook__'
):
self.__subclasshook__ = _make_subclasshook(self)
if origin and hasattr(origin, '__qualname__'): # Fix for Python 3.2.
self.__qualname__ = origin.__qualname__
self.__tree_hash__ = (hash(self._subs_tree()) if origin else
super(GenericMeta, self).__hash__())
return self
def __init__(self, *args, **kwargs):
super(GenericMeta, self).__init__(*args, **kwargs)
if isinstance(self.__extra__, abc.ABCMeta):
self._abc_registry = self.__extra__._abc_registry
self._abc_cache = self.__extra__._abc_cache
elif self.__origin__ is not None:
self._abc_registry = self.__origin__._abc_registry
self._abc_cache = self.__origin__._abc_cache
# _abc_negative_cache and _abc_negative_cache_version
# realised as descriptors, since GenClass[t1, t2, ...] always
# share subclass info with GenClass.
# This is an important memory optimization.
@property
def _abc_negative_cache(self):
if isinstance(self.__extra__, abc.ABCMeta):
return self.__extra__._abc_negative_cache
return self._gorg._abc_generic_negative_cache
@_abc_negative_cache.setter
def _abc_negative_cache(self, value):
if self.__origin__ is None:
if isinstance(self.__extra__, abc.ABCMeta):
self.__extra__._abc_negative_cache = value
else:
self._abc_generic_negative_cache = value
@property
def _abc_negative_cache_version(self):
if isinstance(self.__extra__, abc.ABCMeta):
return self.__extra__._abc_negative_cache_version
return self._gorg._abc_generic_negative_cache_version
@_abc_negative_cache_version.setter
def _abc_negative_cache_version(self, value):
if self.__origin__ is None:
if isinstance(self.__extra__, abc.ABCMeta):
self.__extra__._abc_negative_cache_version = value
else:
self._abc_generic_negative_cache_version = value
def _get_type_vars(self, tvars):
if self.__origin__ and self.__parameters__:
_get_type_vars(self.__parameters__, tvars)
def _eval_type(self, globalns, localns):
ev_origin = (self.__origin__._eval_type(globalns, localns)
if self.__origin__ else None)
ev_args = tuple(_eval_type(a, globalns, localns) for a
in self.__args__) if self.__args__ else None
if ev_origin == self.__origin__ and ev_args == self.__args__:
return self
return self.__class__(self.__name__,
self.__bases__,
dict(self.__dict__),
tvars=_type_vars(ev_args) if ev_args else None,
args=ev_args,
origin=ev_origin,
extra=self.__extra__,
orig_bases=self.__orig_bases__)
def __repr__(self):
if self.__origin__ is None:
return super(GenericMeta, self).__repr__()
return self._tree_repr(self._subs_tree())
def _tree_repr(self, tree):
arg_list = []
for arg in tree[1:]:
if arg == ():
arg_list.append('()')
elif not isinstance(arg, tuple):
arg_list.append(_type_repr(arg))
else:
arg_list.append(arg[0]._tree_repr(arg))
return super(GenericMeta, self).__repr__() + '[%s]' % ', '.join(arg_list)
def _subs_tree(self, tvars=None, args=None):
if self.__origin__ is None:
return self
tree_args = _subs_tree(self, tvars, args)
return (self._gorg,) + tuple(tree_args)
def __eq__(self, other):
if not isinstance(other, GenericMeta):
return NotImplemented
if self.__origin__ is None or other.__origin__ is None:
return self is other
return self.__tree_hash__ == other.__tree_hash__
def __hash__(self):
return self.__tree_hash__
@_tp_cache
def __getitem__(self, params):
if not isinstance(params, tuple):
params = (params,)
if not params and self._gorg is not Tuple:
raise TypeError(
"Parameter list to %s[...] cannot be empty" % _qualname(self))
msg = "Parameters to generic types must be types."
params = tuple(_type_check(p, msg) for p in params)
if self is Generic:
# Generic can only be subscripted with unique type variables.
if not all(isinstance(p, TypeVar) for p in params):
raise TypeError(
"Parameters to Generic[...] must all be type variables")
if len(set(params)) != len(params):
raise TypeError(
"Parameters to Generic[...] must all be unique")
tvars = params
args = params
elif self in (Tuple, Callable):
tvars = _type_vars(params)
args = params
elif self is _Protocol:
# _Protocol is internal, don't check anything.
tvars = params
args = params
elif self.__origin__ in (Generic, _Protocol):
# Can't subscript Generic[...] or _Protocol[...].
raise TypeError("Cannot subscript already-subscripted %s" %
repr(self))
else:
# Subscripting a regular Generic subclass.
_check_generic(self, params)
tvars = _type_vars(params)
args = params
prepend = (self,) if self.__origin__ is None else ()
return self.__class__(self.__name__,
prepend + self.__bases__,
dict(self.__dict__),
tvars=tvars,
args=args,
origin=self,
extra=self.__extra__,
orig_bases=self.__orig_bases__)
def __subclasscheck__(self, cls):
if self.__origin__ is not None:
# This should only be modules within the standard
# library. singledispatch is the only exception, because
# it's a Python 2 backport of functools.singledispatch.
if sys._getframe(1).f_globals['__name__'] not in ['abc', 'functools',
'singledispatch']:
raise TypeError("Parameterized generics cannot be used with class "
"or instance checks")
return False
if self is Generic:
raise TypeError("Class %r cannot be used with class "
"or instance checks" % self)
return super(GenericMeta, self).__subclasscheck__(cls)
def __instancecheck__(self, instance):
# Since we extend ABC.__subclasscheck__ and
# ABC.__instancecheck__ inlines the cache checking done by the
# latter, we must extend __instancecheck__ too. For simplicity
# we just skip the cache check -- instance checks for generic
# classes are supposed to be rare anyways.
if not isinstance(instance, type):
return issubclass(instance.__class__, self)
return False
def __setattr__(self, attr, value):
# We consider all the subscripted genrics as proxies for original class
if (
attr.startswith('__') and attr.endswith('__') or
attr.startswith('_abc_')
):
super(GenericMeta, self).__setattr__(attr, value)
else:
super(GenericMeta, self._gorg).__setattr__(attr, value)
def _copy_generic(self):
"""Hack to work around https://bugs.python.org/issue11480 on Python 2"""
return self.__class__(self.__name__, self.__bases__, dict(self.__dict__),
self.__parameters__, self.__args__, self.__origin__,
self.__extra__, self.__orig_bases__)
copy._copy_dispatch[GenericMeta] = _copy_generic
# Prevent checks for Generic to crash when defining Generic.
Generic = None
def _generic_new(base_cls, cls, *args, **kwds):
# Assure type is erased on instantiation,
# but attempt to store it in __orig_class__
if cls.__origin__ is None:
return base_cls.__new__(cls)
else:
origin = cls._gorg
obj = base_cls.__new__(origin)
try:
obj.__orig_class__ = cls
except AttributeError:
pass
obj.__init__(*args, **kwds)
return obj
class Generic(object):
"""Abstract base class for generic types.
A generic type is typically declared by inheriting from
this class parameterized with one or more type variables.
For example, a generic mapping type might be defined as::
class Mapping(Generic[KT, VT]):
def __getitem__(self, key: KT) -> VT:
...
# Etc.
This class can then be used as follows::
def lookup_name(mapping: Mapping[KT, VT], key: KT, default: VT) -> VT:
try:
return mapping[key]
except KeyError:
return default
"""
__metaclass__ = GenericMeta
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Generic:
raise TypeError("Type Generic cannot be instantiated; "
"it can be used only as a base class")
return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
class _TypingEmpty(object):
"""Internal placeholder for () or []. Used by TupleMeta and CallableMeta
to allow empty list/tuple in specific places, without allowing them
to sneak in where prohibited.
"""
class _TypingEllipsis(object):
"""Internal placeholder for ... (ellipsis)."""
class TupleMeta(GenericMeta):
"""Metaclass for Tuple (internal)."""
@_tp_cache
def __getitem__(self, parameters):
if self.__origin__ is not None or self._gorg is not Tuple:
# Normal generic rules apply if this is not the first subscription
# or a subscription of a subclass.
return super(TupleMeta, self).__getitem__(parameters)
if parameters == ():
return super(TupleMeta, self).__getitem__((_TypingEmpty,))
if not isinstance(parameters, tuple):
parameters = (parameters,)
if len(parameters) == 2 and parameters[1] is Ellipsis:
msg = "Tuple[t, ...]: t must be a type."
p = _type_check(parameters[0], msg)
return super(TupleMeta, self).__getitem__((p, _TypingEllipsis))
msg = "Tuple[t0, t1, ...]: each t must be a type."
parameters = tuple(_type_check(p, msg) for p in parameters)
return super(TupleMeta, self).__getitem__(parameters)
def __instancecheck__(self, obj):
if self.__args__ is None:
return isinstance(obj, tuple)
raise TypeError("Parameterized Tuple cannot be used "
"with isinstance().")
def __subclasscheck__(self, cls):
if self.__args__ is None:
return issubclass(cls, tuple)
raise TypeError("Parameterized Tuple cannot be used "
"with issubclass().")
copy._copy_dispatch[TupleMeta] = _copy_generic
class Tuple(tuple):
"""Tuple type; Tuple[X, Y] is the cross-product type of X and Y.
Example: Tuple[T1, T2] is a tuple of two elements corresponding
to type variables T1 and T2. Tuple[int, float, str] is a tuple
of an int, a float and a string.
To specify a variable-length tuple of homogeneous type, use Tuple[T, ...].
"""
__metaclass__ = TupleMeta
__extra__ = tuple
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Tuple:
raise TypeError("Type Tuple cannot be instantiated; "
"use tuple() instead")
return _generic_new(tuple, cls, *args, **kwds)
class CallableMeta(GenericMeta):
""" Metaclass for Callable."""
def __repr__(self):
if self.__origin__ is None:
return super(CallableMeta, self).__repr__()
return self._tree_repr(self._subs_tree())
def _tree_repr(self, tree):
if self._gorg is not Callable:
return super(CallableMeta, self)._tree_repr(tree)
# For actual Callable (not its subclass) we override
# super(CallableMeta, self)._tree_repr() for nice formatting.
arg_list = []
for arg in tree[1:]:
if not isinstance(arg, tuple):
arg_list.append(_type_repr(arg))
else:
arg_list.append(arg[0]._tree_repr(arg))
if arg_list[0] == '...':
return repr(tree[0]) + '[..., %s]' % arg_list[1]
return (repr(tree[0]) +
'[[%s], %s]' % (', '.join(arg_list[:-1]), arg_list[-1]))
def __getitem__(self, parameters):
"""A thin wrapper around __getitem_inner__ to provide the latter
with hashable arguments to improve speed.
"""
if self.__origin__ is not None or self._gorg is not Callable:
return super(CallableMeta, self).__getitem__(parameters)
if not isinstance(parameters, tuple) or len(parameters) != 2:
raise TypeError("Callable must be used as "
"Callable[[arg, ...], result].")
args, result = parameters
if args is Ellipsis:
parameters = (Ellipsis, result)
else:
if not isinstance(args, list):
raise TypeError("Callable[args, result]: args must be a list."
" Got %.100r." % (args,))
parameters = (tuple(args), result)
return self.__getitem_inner__(parameters)
@_tp_cache
def __getitem_inner__(self, parameters):
args, result = parameters
msg = "Callable[args, result]: result must be a type."
result = _type_check(result, msg)
if args is Ellipsis:
return super(CallableMeta, self).__getitem__((_TypingEllipsis, result))
msg = "Callable[[arg, ...], result]: each arg must be a type."
args = tuple(_type_check(arg, msg) for arg in args)
parameters = args + (result,)
return super(CallableMeta, self).__getitem__(parameters)
copy._copy_dispatch[CallableMeta] = _copy_generic
class Callable(object):
"""Callable type; Callable[[int], str] is a function of (int) -> str.
The subscription syntax must always be used with exactly two
values: the argument list and the return type. The argument list
must be a list of types or ellipsis; the return type must be a single type.
There is no syntax to indicate optional or keyword arguments,
such function types are rarely used as callback types.
"""
__metaclass__ = CallableMeta
__extra__ = collections_abc.Callable
__slots__ = ()
def __new__(cls, *args, **kwds):
if cls._gorg is Callable:
raise TypeError("Type Callable cannot be instantiated; "
"use a non-abstract subclass instead")
return _generic_new(cls.__next_in_mro__, cls, *args, **kwds)
def cast(typ, val):
"""Cast a value to a type.
This returns the value unchanged. To the type checker this
signals that the return value has the designated type, but at
runtime we intentionally don't check anything (we want this
to be as fast as possible).
"""
return val
def _get_defaults(func):
"""Internal helper to extract the default arguments, by name."""
code = func.__code__
pos_count = code.co_argcount
arg_names = code.co_varnames
arg_names = arg_names[:pos_count]
defaults = func.__defaults__ or ()
kwdefaults = func.__kwdefaults__
res = dict(kwdefaults) if kwdefaults else {}
pos_offset = pos_count - len(defaults)
for name, value in zip(arg_names[pos_offset:], defaults):
assert name not in res
res[name] = value
return res
def get_type_hints(obj, globalns=None, localns=None):
"""In Python 2 this is not supported and always returns None."""
return None
def no_type_check(arg):
"""Decorator to indicate that annotations are not type hints.
The argument must be a class or function; if it is a class, it
applies recursively to all methods and classes defined in that class
(but not to methods defined in its superclasses or subclasses).
This mutates the function(s) or class(es) in place.
"""
if isinstance(arg, type):
arg_attrs = arg.__dict__.copy()
for attr, val in arg.__dict__.items():
if val in arg.__bases__ + (arg,):
arg_attrs.pop(attr)
for obj in arg_attrs.values():
if isinstance(obj, types.FunctionType):
obj.__no_type_check__ = True
if isinstance(obj, type):
no_type_check(obj)
try:
arg.__no_type_check__ = True
except TypeError: # built-in classes
pass
return arg
def no_type_check_decorator(decorator):
"""Decorator to give another decorator the @no_type_check effect.
This wraps the decorator with something that wraps the decorated
function in @no_type_check.
"""
@functools.wraps(decorator)
def wrapped_decorator(*args, **kwds):
func = decorator(*args, **kwds)
func = no_type_check(func)
return func
return wrapped_decorator
def _overload_dummy(*args, **kwds):
"""Helper for @overload to raise when called."""
raise NotImplementedError(
"You should not call an overloaded function. "
"A series of @overload-decorated functions "
"outside a stub module should always be followed "
"by an implementation that is not @overload-ed.")
def overload(func):
"""Decorator for overloaded functions/methods.
In a stub file, place two or more stub definitions for the same
function in a row, each decorated with @overload. For example:
@overload
def utf8(value: None) -> None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
In a non-stub file (i.e. a regular .py file), do the same but
follow it with an implementation. The implementation should *not*
be decorated with @overload. For example:
@overload
def utf8(value: None) -> None: ...
@overload
def utf8(value: bytes) -> bytes: ...
@overload
def utf8(value: str) -> bytes: ...
def utf8(value):
# implementation goes here
"""
return _overload_dummy
class _ProtocolMeta(GenericMeta):
"""Internal metaclass for _Protocol.
This exists so _Protocol classes can be generic without deriving
from Generic.
"""
def __instancecheck__(self, obj):
if _Protocol not in self.__bases__:
return super(_ProtocolMeta, self).__instancecheck__(obj)
raise TypeError("Protocols cannot be used with isinstance().")
def __subclasscheck__(self, cls):
if not self._is_protocol:
# No structural checks since this isn't a protocol.
return NotImplemented
if self is _Protocol:
# Every class is a subclass of the empty protocol.
return True
# Find all attributes defined in the protocol.
attrs = self._get_protocol_attrs()
for attr in attrs:
if not any(attr in d.__dict__ for d in cls.__mro__):
return False
return True
def _get_protocol_attrs(self):
# Get all Protocol base classes.
protocol_bases = []
for c in self.__mro__:
if getattr(c, '_is_protocol', False) and c.__name__ != '_Protocol':
protocol_bases.append(c)
# Get attributes included in protocol.
attrs = set()
for base in protocol_bases:
for attr in base.__dict__.keys():
# Include attributes not defined in any non-protocol bases.
for c in self.__mro__:
if (c is not base and attr in c.__dict__ and
not getattr(c, '_is_protocol', False)):
break
else:
if (not attr.startswith('_abc_') and
attr != '__abstractmethods__' and
attr != '_is_protocol' and
attr != '_gorg' and
attr != '__dict__' and
attr != '__args__' and
attr != '__slots__' and
attr != '_get_protocol_attrs' and
attr != '__next_in_mro__' and
attr != '__parameters__' and
attr != '__origin__' and
attr != '__orig_bases__' and
attr != '__extra__' and
attr != '__tree_hash__' and
attr != '__module__'):
attrs.add(attr)
return attrs
class _Protocol(object):
"""Internal base class for protocol classes.
This implements a simple-minded structural issubclass check
(similar but more general than the one-offs in collections.abc
such as Hashable).
"""
__metaclass__ = _ProtocolMeta
__slots__ = ()
_is_protocol = True
# Various ABCs mimicking those in collections.abc.
# A few are simply re-exported for completeness.
Hashable = collections_abc.Hashable # Not generic.
class Iterable(Generic[T_co]):
__slots__ = ()
__extra__ = collections_abc.Iterable
class Iterator(Iterable[T_co]):
__slots__ = ()
__extra__ = collections_abc.Iterator
class SupportsInt(_Protocol):
__slots__ = ()
@abstractmethod
def __int__(self):
pass
class SupportsFloat(_Protocol):
__slots__ = ()
@abstractmethod
def __float__(self):
pass
class SupportsComplex(_Protocol):
__slots__ = ()
@abstractmethod
def __complex__(self):
pass
class SupportsAbs(_Protocol[T_co]):
__slots__ = ()
@abstractmethod
def __abs__(self):
pass
if hasattr(collections_abc, 'Reversible'):
class Reversible(Iterable[T_co]):
__slots__ = ()
__extra__ = collections_abc.Reversible
else:
class Reversible(_Protocol[T_co]):
__slots__ = ()
@abstractmethod
def __reversed__(self):
pass
Sized = collections_abc.Sized # Not generic.
class Container(Generic[T_co]):
__slots__ = ()
__extra__ = collections_abc.Container
# Callable was defined earlier.
class AbstractSet(Sized, Iterable[T_co], Container[T_co]):
__slots__ = ()
__extra__ = collections_abc.Set
class MutableSet(AbstractSet[T]):
__slots__ = ()
__extra__ = collections_abc.MutableSet
# NOTE: It is only covariant in the value type.
class Mapping(Sized, Iterable[KT], Container[KT], Generic[KT, VT_co]):
__slots__ = ()
__extra__ = collections_abc.Mapping
class MutableMapping(Mapping[KT, VT]):
__slots__ = ()
__extra__ = collections_abc.MutableMapping
if hasattr(collections_abc, 'Reversible'):
class Sequence(Sized, Reversible[T_co], Container[T_co]):
__slots__ = ()
__extra__ = collections_abc.Sequence
else:
class Sequence(Sized, Iterable[T_co], Container[T_co]):
__slots__ = ()
__extra__ = collections_abc.Sequence
class MutableSequence(Sequence[T]):
__slots__ = ()
__extra__ = collections_abc.MutableSequence
class ByteString(Sequence[int]):
pass
ByteString.register(str)
ByteString.register(bytearray)
class List(list, MutableSequence[T]):
__slots__ = ()
__extra__ = list
def __new__(cls, *args, **kwds):
if cls._gorg is List:
raise TypeError("Type List cannot be instantiated; "
"use list() instead")
return _generic_new(list, cls, *args, **kwds)
class Deque(collections.deque, MutableSequence[T]):
__slots__ = ()
__extra__ = collections.deque
def __new__(cls, *args, **kwds):
if cls._gorg is Deque:
return collections.deque(*args, **kwds)
return _generic_new(collections.deque, cls, *args, **kwds)
class Set(set, MutableSet[T]):
__slots__ = ()
__extra__ = set
def __new__(cls, *args, **kwds):
if cls._gorg is Set:
raise TypeError("Type Set cannot be instantiated; "
"use set() instead")
return _generic_new(set, cls, *args, **kwds)
class FrozenSet(frozenset, AbstractSet[T_co]):
__slots__ = ()
__extra__ = frozenset
def __new__(cls, *args, **kwds):
if cls._gorg is FrozenSet:
raise TypeError("Type FrozenSet cannot be instantiated; "
"use frozenset() instead")
return _generic_new(frozenset, cls, *args, **kwds)
class MappingView(Sized, Iterable[T_co]):
__slots__ = ()
__extra__ = collections_abc.MappingView
class KeysView(MappingView[KT], AbstractSet[KT]):
__slots__ = ()
__extra__ = collections_abc.KeysView
class ItemsView(MappingView[Tuple[KT, VT_co]],
AbstractSet[Tuple[KT, VT_co]],
Generic[KT, VT_co]):
__slots__ = ()
__extra__ = collections_abc.ItemsView
class ValuesView(MappingView[VT_co]):
__slots__ = ()
__extra__ = collections_abc.ValuesView
class ContextManager(Generic[T_co]):
__slots__ = ()
def __enter__(self):
return self
@abc.abstractmethod
def __exit__(self, exc_type, exc_value, traceback):
return None
@classmethod
def __subclasshook__(cls, C):
if cls is ContextManager:
# In Python 3.6+, it is possible to set a method to None to
# explicitly indicate that the class does not implement an ABC
# (https://bugs.python.org/issue25958), but we do not support
# that pattern here because this fallback class is only used
# in Python 3.5 and earlier.
if (any("__enter__" in B.__dict__ for B in C.__mro__) and
any("__exit__" in B.__dict__ for B in C.__mro__)):
return True
return NotImplemented
class Dict(dict, MutableMapping[KT, VT]):
__slots__ = ()
__extra__ = dict
def __new__(cls, *args, **kwds):
if cls._gorg is Dict:
raise TypeError("Type Dict cannot be instantiated; "
"use dict() instead")
return _generic_new(dict, cls, *args, **kwds)
class DefaultDict(collections.defaultdict, MutableMapping[KT, VT]):
__slots__ = ()
__extra__ = collections.defaultdict
def __new__(cls, *args, **kwds):
if cls._gorg is DefaultDict:
return collections.defaultdict(*args, **kwds)
return _generic_new(collections.defaultdict, cls, *args, **kwds)
class Counter(collections.Counter, Dict[T, int]):
__slots__ = ()
__extra__ = collections.Counter
def __new__(cls, *args, **kwds):
if cls._gorg is Counter:
return collections.Counter(*args, **kwds)
return _generic_new(collections.Counter, cls, *args, **kwds)
# Determine what base class to use for Generator.
if hasattr(collections_abc, 'Generator'):
# Sufficiently recent versions of 3.5 have a Generator ABC.
_G_base = collections_abc.Generator
else:
# Fall back on the exact type.
_G_base = types.GeneratorType
class Generator(Iterator[T_co], Generic[T_co, T_contra, V_co]):
__slots__ = ()
__extra__ = _G_base
def __new__(cls, *args, **kwds):
if cls._gorg is Generator:
raise TypeError("Type Generator cannot be instantiated; "
"create a subclass instead")
return _generic_new(_G_base, cls, *args, **kwds)
# Internal type variable used for Type[].
CT_co = TypeVar('CT_co', covariant=True, bound=type)
# This is not a real generic class. Don't use outside annotations.
class Type(Generic[CT_co]):
"""A special construct usable to annotate class objects.
For example, suppose we have the following classes::
class User: ... # Abstract base for User classes
class BasicUser(User): ...
class ProUser(User): ...
class TeamUser(User): ...
And a function that takes a class argument that's a subclass of
User and returns an instance of the corresponding class::
U = TypeVar('U', bound=User)
def new_user(user_class: Type[U]) -> U:
user = user_class()
# (Here we could write the user object to a database)
return user
joe = new_user(BasicUser)
At this point the type checker knows that joe has type BasicUser.
"""
__slots__ = ()
__extra__ = type
def NamedTuple(typename, fields):
"""Typed version of namedtuple.
Usage::
Employee = typing.NamedTuple('Employee', [('name', str), ('id', int)])
This is equivalent to::
Employee = collections.namedtuple('Employee', ['name', 'id'])
The resulting class has one extra attribute: _field_types,
giving a dict mapping field names to types. (The field names
are in the _fields attribute, which is part of the namedtuple
API.)
"""
fields = [(n, t) for n, t in fields]
cls = collections.namedtuple(typename, [n for n, t in fields])
cls._field_types = dict(fields)
# Set the module to the caller's module (otherwise it'd be 'typing').
try:
cls.__module__ = sys._getframe(1).f_globals.get('__name__', '__main__')
except (AttributeError, ValueError):
pass
return cls
def NewType(name, tp):
"""NewType creates simple unique types with almost zero
runtime overhead. NewType(name, tp) is considered a subtype of tp
by static type checkers. At runtime, NewType(name, tp) returns
a dummy function that simply returns its argument. Usage::
UserId = NewType('UserId', int)
def name_by_id(user_id):
# type: (UserId) -> str
...
UserId('user') # Fails type check
name_by_id(42) # Fails type check
name_by_id(UserId(42)) # OK
num = UserId(5) + 1 # type: int
"""
def new_type(x):
return x
# Some versions of Python 2 complain because of making all strings unicode
new_type.__name__ = str(name)
new_type.__supertype__ = tp
return new_type
# Python-version-specific alias (Python 2: unicode; Python 3: str)
Text = unicode
# Constant that's True when type checking, but False here.
TYPE_CHECKING = False
class IO(Generic[AnyStr]):
"""Generic base class for TextIO and BinaryIO.
This is an abstract, generic version of the return of open().
NOTE: This does not distinguish between the different possible
classes (text vs. binary, read vs. write vs. read/write,
append-only, unbuffered). The TextIO and BinaryIO subclasses
below capture the distinctions between text vs. binary, which is
pervasive in the interface; however we currently do not offer a
way to track the other distinctions in the type system.
"""
__slots__ = ()
@abstractproperty
def mode(self):
pass
@abstractproperty
def name(self):
pass
@abstractmethod
def close(self):
pass
@abstractmethod
def closed(self):
pass
@abstractmethod
def fileno(self):
pass
@abstractmethod
def flush(self):
pass
@abstractmethod
def isatty(self):
pass
@abstractmethod
def read(self, n=-1):
pass
@abstractmethod
def readable(self):
pass
@abstractmethod
def readline(self, limit=-1):
pass
@abstractmethod
def readlines(self, hint=-1):
pass
@abstractmethod
def seek(self, offset, whence=0):
pass
@abstractmethod
def seekable(self):
pass
@abstractmethod
def tell(self):
pass
@abstractmethod
def truncate(self, size=None):
pass
@abstractmethod
def writable(self):
pass
@abstractmethod
def write(self, s):
pass
@abstractmethod
def writelines(self, lines):
pass
@abstractmethod
def __enter__(self):
pass
@abstractmethod
def __exit__(self, type, value, traceback):
pass
class BinaryIO(IO[bytes]):
"""Typed version of the return of open() in binary mode."""
__slots__ = ()
@abstractmethod
def write(self, s):
pass
@abstractmethod
def __enter__(self):
pass
class TextIO(IO[unicode]):
"""Typed version of the return of open() in text mode."""
__slots__ = ()
@abstractproperty
def buffer(self):
pass
@abstractproperty
def encoding(self):
pass
@abstractproperty
def errors(self):
pass
@abstractproperty
def line_buffering(self):
pass
@abstractproperty
def newlines(self):
pass
@abstractmethod
def __enter__(self):
pass
class io(object):
"""Wrapper namespace for IO generic classes."""
__all__ = ['IO', 'TextIO', 'BinaryIO']
IO = IO
TextIO = TextIO
BinaryIO = BinaryIO
io.__name__ = __name__ + b'.io'
sys.modules[io.__name__] = io
Pattern = _TypeAlias('Pattern', AnyStr, type(stdlib_re.compile('')),
lambda p: p.pattern)
Match = _TypeAlias('Match', AnyStr, type(stdlib_re.match('', '')),
lambda m: m.re.pattern)
class re(object):
"""Wrapper namespace for re type aliases."""
__all__ = ['Pattern', 'Match']
Pattern = Pattern
Match = Match
re.__name__ = __name__ + b'.re'
sys.modules[re.__name__] = re
| [
"[email protected]"
]
| |
e5f55207dd9a043e94437287cbd5b94a341aeb9a | 5ec3dc6d172d758f9f547686b68cbbe903ab3161 | /test/no_running_jobs_test.py | 7740a6977d3edacfdbd71a677bae1499bce61a23 | []
| no_license | dixudx/jenkinsflow | ea8bdf4b8abdfb06ab6e05f5c5a83a1c0744f849 | 2c07f8fc2951d9167dcd08ae2e1f6a8afc32f7f5 | refs/heads/master | 2020-12-26T00:25:27.092813 | 2015-05-19T19:41:47 | 2015-05-19T19:41:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,964 | py | # Copyright (c) 2012 - 2015 Lars Hupfeldt Nielsen, Hupfeldt IT
# All rights reserved. This work is under a BSD license, see LICENSE.TXT.
from pytest import raises
from jenkinsflow.flow import serial, JobNotIdleException
from jenkinsflow.mocked import hyperspeed
from .cfg import ApiType
from .framework import api_select
from .framework.utils import assert_lines_in
def test_no_running_jobs(capsys):
with api_select.api(__file__, login=True) as api:
api.flow_job()
api.job('j1', exec_time=50, max_fails=0, expect_invocations=1, expect_order=None, invocation_delay=0, unknown_result=True)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1:
ctrl1.invoke_unchecked('j1')
sout, _ = capsys.readouterr()
assert_lines_in(sout, "unchecked job: 'jenkinsflow_test__no_running_jobs__j1' UNKNOWN - RUNNING")
# Make sure job has actually started before entering new flow
hyperspeed.sleep(1)
with raises(JobNotIdleException) as exinfo:
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1:
ctrl1.invoke('j1')
assert "job: 'jenkinsflow_test__no_running_jobs__j1' is in state RUNNING. It must be IDLE." in exinfo.value.message
def test_no_running_jobs_unchecked(capsys):
with api_select.api(__file__, login=True) as api:
api.flow_job()
api.job('j1', exec_time=50, max_fails=0, expect_invocations=1, expect_order=None, invocation_delay=0, unknown_result=True)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1:
ctrl1.invoke_unchecked('j1')
sout, _ = capsys.readouterr()
assert_lines_in(sout, "unchecked job: 'jenkinsflow_test__no_running_jobs_unchecked__j1' UNKNOWN - RUNNING")
hyperspeed.sleep(1)
with raises(JobNotIdleException) as exinfo:
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1:
ctrl1.invoke_unchecked('j1')
assert "unchecked job: 'jenkinsflow_test__no_running_jobs_unchecked__j1' is in state RUNNING. It must be IDLE." in exinfo.value.message
def test_no_running_jobs_jobs_allowed():
with api_select.api(__file__, login=True) as api:
api.flow_job()
exp_invocations = 2 if api.api_type != ApiType.MOCK else 1
unknown_result = False if api.api_type != ApiType.MOCK else True
api.job('j1', exec_time=20, max_fails=0, expect_invocations=exp_invocations, expect_order=None,
invocation_delay=0, unknown_result=unknown_result)
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix) as ctrl1:
ctrl1.invoke_unchecked('j1')
hyperspeed.sleep(1)
# TODO
if api.api_type != ApiType.MOCK:
with serial(api, timeout=70, job_name_prefix=api.job_name_prefix, require_idle=False) as ctrl1:
ctrl1.invoke('j1')
| [
"[email protected]"
]
| |
7d3fc3ee1fbadfbfdeae383c58c42296cb0e2128 | 73b158f51285300c1d3456b7af9163939ee206f2 | /DevOps/sprint03/t00_lambda/expression.py | 0ddfdb9345aef1a2a08f49f12d1afab8728d3beb | []
| no_license | nnocturnnn/DevOps | 2e332b3552a5b294b36d2af7de854aa18f2da46f | 173c75938e65be8fbbb5c02c3d655d09df9a2931 | refs/heads/master | 2023-06-11T07:21:14.097930 | 2021-06-30T13:58:15 | 2021-06-30T13:58:15 | 352,070,911 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 141 | py |
n = int(input('n: '))
a = int(input('a: '))
b = int(input('b: '))
result = lambda a, b, n : n % a == 0 and n % b == 0
print(result(a,b,n)) | [
"[email protected]"
]
| |
14d21ba34e1b8337cb0439ea712b203c5317703c | 2a8abd5d6acdc260aff3639bce35ca1e688869e9 | /telestream_cloud_qc_sdk/telestream_cloud_qc/models/audio_loudness_itest.py | 387597f5a62e6090eec6343a0872aa4c534e52f5 | [
"MIT"
]
| permissive | Telestream/telestream-cloud-python-sdk | 57dd2f0422c83531e213f48d87bc0c71f58b5872 | ce0ad503299661a0f622661359367173c06889fc | refs/heads/master | 2021-01-18T02:17:44.258254 | 2020-04-09T11:36:07 | 2020-04-09T11:36:07 | 49,494,916 | 0 | 0 | MIT | 2018-01-22T10:07:49 | 2016-01-12T11:10:56 | Python | UTF-8 | Python | false | false | 7,273 | py | # coding: utf-8
"""
Qc API
Qc API # noqa: E501
The version of the OpenAPI document: 3.0.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import pprint
import re # noqa: F401
import six
from telestream_cloud_qc.configuration import Configuration
class AudioLoudnessItest(object):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
'loudness_level': 'float',
'loudness_tolerance': 'float',
'mode': 'LoudnessMode',
'channels': 'Channels',
'reject_on_error': 'bool',
'do_correction': 'bool'
}
attribute_map = {
'loudness_level': 'loudness_level',
'loudness_tolerance': 'loudness_tolerance',
'mode': 'mode',
'channels': 'channels',
'reject_on_error': 'reject_on_error',
'do_correction': 'do_correction'
}
def __init__(self, loudness_level=None, loudness_tolerance=None, mode=None, channels=None, reject_on_error=None, do_correction=None, local_vars_configuration=None): # noqa: E501
"""AudioLoudnessItest - a model defined in OpenAPI""" # noqa: E501
if local_vars_configuration is None:
local_vars_configuration = Configuration()
self.local_vars_configuration = local_vars_configuration
self._loudness_level = None
self._loudness_tolerance = None
self._mode = None
self._channels = None
self._reject_on_error = None
self._do_correction = None
self.discriminator = None
if loudness_level is not None:
self.loudness_level = loudness_level
if loudness_tolerance is not None:
self.loudness_tolerance = loudness_tolerance
if mode is not None:
self.mode = mode
if channels is not None:
self.channels = channels
if reject_on_error is not None:
self.reject_on_error = reject_on_error
if do_correction is not None:
self.do_correction = do_correction
@property
def loudness_level(self):
"""Gets the loudness_level of this AudioLoudnessItest. # noqa: E501
:return: The loudness_level of this AudioLoudnessItest. # noqa: E501
:rtype: float
"""
return self._loudness_level
@loudness_level.setter
def loudness_level(self, loudness_level):
"""Sets the loudness_level of this AudioLoudnessItest.
:param loudness_level: The loudness_level of this AudioLoudnessItest. # noqa: E501
:type: float
"""
self._loudness_level = loudness_level
@property
def loudness_tolerance(self):
"""Gets the loudness_tolerance of this AudioLoudnessItest. # noqa: E501
:return: The loudness_tolerance of this AudioLoudnessItest. # noqa: E501
:rtype: float
"""
return self._loudness_tolerance
@loudness_tolerance.setter
def loudness_tolerance(self, loudness_tolerance):
"""Sets the loudness_tolerance of this AudioLoudnessItest.
:param loudness_tolerance: The loudness_tolerance of this AudioLoudnessItest. # noqa: E501
:type: float
"""
self._loudness_tolerance = loudness_tolerance
@property
def mode(self):
"""Gets the mode of this AudioLoudnessItest. # noqa: E501
:return: The mode of this AudioLoudnessItest. # noqa: E501
:rtype: LoudnessMode
"""
return self._mode
@mode.setter
def mode(self, mode):
"""Sets the mode of this AudioLoudnessItest.
:param mode: The mode of this AudioLoudnessItest. # noqa: E501
:type: LoudnessMode
"""
self._mode = mode
@property
def channels(self):
"""Gets the channels of this AudioLoudnessItest. # noqa: E501
:return: The channels of this AudioLoudnessItest. # noqa: E501
:rtype: Channels
"""
return self._channels
@channels.setter
def channels(self, channels):
"""Sets the channels of this AudioLoudnessItest.
:param channels: The channels of this AudioLoudnessItest. # noqa: E501
:type: Channels
"""
self._channels = channels
@property
def reject_on_error(self):
"""Gets the reject_on_error of this AudioLoudnessItest. # noqa: E501
:return: The reject_on_error of this AudioLoudnessItest. # noqa: E501
:rtype: bool
"""
return self._reject_on_error
@reject_on_error.setter
def reject_on_error(self, reject_on_error):
"""Sets the reject_on_error of this AudioLoudnessItest.
:param reject_on_error: The reject_on_error of this AudioLoudnessItest. # noqa: E501
:type: bool
"""
self._reject_on_error = reject_on_error
@property
def do_correction(self):
"""Gets the do_correction of this AudioLoudnessItest. # noqa: E501
:return: The do_correction of this AudioLoudnessItest. # noqa: E501
:rtype: bool
"""
return self._do_correction
@do_correction.setter
def do_correction(self, do_correction):
"""Sets the do_correction of this AudioLoudnessItest.
:param do_correction: The do_correction of this AudioLoudnessItest. # noqa: E501
:type: bool
"""
self._do_correction = do_correction
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, AudioLoudnessItest):
return False
return self.to_dict() == other.to_dict()
def __ne__(self, other):
"""Returns true if both objects are not equal"""
if not isinstance(other, AudioLoudnessItest):
return True
return self.to_dict() != other.to_dict()
| [
"[email protected]"
]
| |
e4cf3d7b27e696c7ad9997e4ac9f817cb0f3f306 | 9cb5521f247a47963e229a021ece638c2dd6c2ea | /4_genEquTileGrid.py | d60a85541e4020968cedc4af1a3bb8f74fdbd1ed | [
"MIT"
]
| permissive | crpurcell/CORNISH-S-PIPELINE | f97a677bb6047d6291daac184b9992c66a54c98e | 32ebaf694bbd1a46a68fa875f8557243280bfe10 | refs/heads/master | 2021-01-12T02:48:31.985664 | 2017-06-09T05:54:14 | 2017-06-09T05:54:14 | 78,108,522 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,072 | py | #!/usr/bin/env python
#=============================================================================#
# #
# NAME: 4_genEquTileGrid.py #
# #
# USAGE: ./4_genEquTileGrid.py #
# #
# PURPOSE: Read the pointing coordinates from the mosaic files and lay down #
# grid of Equatorial tiles for image testing purposes. #
# #
# MODIFIED: 05-Jun-2017 by C. Purcell #
# #
#=============================================================================#
# Hardcoded paths
dataRootDir = "../DATA"
# CORNISH-South border out to edge of data in Galactic coordinates
# Make this large enough to account for data out to the primary beam FWHM
bMax_deg = +1.2
bMin_deg = -1.2
lMax_deg = 350.2
lMin_deg = 294.8
# Tile parameters
imSize_px = [2000, 2000] # pixels [x, y] tile size
pixScale_deg = [0.60/3600, 0.60/3600] # pixel scale [dx, dy]
overlap_deg = [60.0/3600, 60.0/3600] # overlap between tiles [x, y] (deg)
#-----------------------------------------------------------------------------#
import os
import sys
import copy
import glob
import re
import math as m
import numpy as np
from pyslalib import slalib
import pylab as pl
import matplotlib as mpl
import matplotlib.pyplot as plt
from matplotlib.ticker import MaxNLocator
from matplotlib.patches import Ellipse, RegularPolygon, Polygon, Patch
from matplotlib.collections import PatchCollection
import sqlite3
from Imports.util_ATCA_PIPE import sort_nicely
# Constants
C = 2.998e8
#-----------------------------------------------------------------------------#
def main():
# Create a polygon describing the Galactic border of the survey
# Oversample each edge and combine into an ordered set of vertices
lBorLst_deg = np.linspace(lMin_deg, lMax_deg, 5500).tolist()
bBorLst_deg = np.linspace(bMin_deg, bMax_deg, 220).tolist()
borderPolyGalLst = zip(lBorLst_deg, [bMin_deg]*len(lBorLst_deg))
borderPolyGalLst += zip([lMax_deg]*len(bBorLst_deg), bBorLst_deg,)[1:]
borderPolyGalLst += zip(lBorLst_deg[::-1], [bMax_deg]*len(lBorLst_deg))[1:]
borderPolyGalLst += zip([lMin_deg]*len(bBorLst_deg), bBorLst_deg[::-1])
borderPolyGalArr = np.array(borderPolyGalLst)
lRange_deg = lMax_deg - lMin_deg
bRange_deg = bMax_deg - bMin_deg
# Convert the Galactic polygon vertices into Equatorial coordinates and
# determine the maximum and minimum RA and Dec. limits
borderPolyEquLst = []
for e in borderPolyGalLst:
ra_rad, dec_rad = slalib.sla_galeq(m.radians(e[0]), m.radians(e[1]))
borderPolyEquLst.append( (m.degrees(ra_rad), m.degrees(dec_rad)) )
borderPolyEquArr = np.array(borderPolyEquLst)
raMax_deg = np.max(borderPolyEquArr[:,0])
raMin_deg = np.min(borderPolyEquArr[:,0])
decMax_deg = np.max(borderPolyEquArr[:,1])
decMin_deg = np.min(borderPolyEquArr[:,1])
raRange_deg = raMax_deg - raMin_deg
decRange_deg = decMax_deg - decMin_deg
# Calculate the constant Dec (y) increment between tile centres
yIncr_deg = imSize_px[1] * pixScale_deg[1] - overlap_deg[1]
#------------------------------------------------------------------------#
# NOTE:
# Start at the bottom-left of the Equ grid and advance along a Dec. line
# setting down tiles. Starting coordinate = decMin_deg, raMin_deg.
# Note: Placing tiles on lines of constant Dec does not take into account
# the curvature of the Dec lines as we approach the equatorial pole,
# however, it should be good enough if the overlap between the tiles is
# enough and the cos(Dec) factor is calculated at the most negative Dec.
#------------------------------------------------------------------------#
raCentTileLst_deg = []
decCentTileLst_deg = []
vertexTileEquLst_deg = []
vertexTileGalLst_deg = []
# Loop through Dec rows until decMax reached
i = 0
while True:
# Calculate the Dec at the centre top and bottom of the current row
decTileCent_deg = decMin_deg + (yIncr_deg - 2 * overlap_deg[1]) * i
decTileTop_deg = decTileCent_deg - yIncr_deg/2.0
decTileBot_deg = decTileCent_deg + yIncr_deg/2.0
# Calculate the RA increment for this row
cosDecCent = m.cos(m.radians(decTileCent_deg))
cosDecTop = m.cos(m.radians(decTileTop_deg))
cosDecBot = m.cos(m.radians(decTileBot_deg))
cosDec = min(cosDecCent, cosDecTop, cosDecBot)
xIncr_deg = (imSize_px[0] * pixScale_deg[0] - 2*overlap_deg[0])/cosDec
i += 1
# Loop through the RAs until raMax reached
j = 0
while True:
# Calculate RA for this tile
raTileCent_deg = raMin_deg + xIncr_deg * j
raCentTileLst_deg.append(raTileCent_deg)
decCentTileLst_deg.append(decTileCent_deg)
j += 1
# Calculate the tile corner coorinates in Equ
xIncrTop_deg = (imSize_px[0] * pixScale_deg[0])/cosDecTop
xIncrBot_deg = (imSize_px[0] * pixScale_deg[0])/cosDecBot
x1y2 = (raTileCent_deg + xIncrTop_deg / 2.0, decTileTop_deg)
x2y2 = (raTileCent_deg - xIncrTop_deg / 2.0, decTileTop_deg)
x2y1 = (raTileCent_deg - xIncrBot_deg / 2.0, decTileBot_deg)
x1y1 = (raTileCent_deg + xIncrBot_deg / 2.0, decTileBot_deg)
vertexTileEquLst_deg.append(np.array([x1y1,x1y2,x2y2,x2y1]))
# Calculate the tile corner coordinates in Gal
lV_rad, bV_rad = slalib.sla_eqgal(m.radians(x1y2[0]),
m.radians(x1y2[1]))
x1y2 = (m.degrees(lV_rad), m.degrees(bV_rad))
lV_rad, bV_rad = slalib.sla_eqgal(m.radians(x2y2[0]),
m.radians(x2y2[1]))
x2y2 = (m.degrees(lV_rad), m.degrees(bV_rad))
lV_rad, bV_rad = slalib.sla_eqgal(m.radians(x2y1[0]),
m.radians(x2y1[1]))
x2y1 = (m.degrees(lV_rad), m.degrees(bV_rad))
lV_rad, bV_rad = slalib.sla_eqgal(m.radians(x1y1[0]),
m.radians(x1y1[1]))
x1y1 = (m.degrees(lV_rad), m.degrees(bV_rad))
vertexTileGalLst_deg.append(np.array([x1y1,x1y2,x2y2,x2y1]))
# End of RA While loop
if raTileCent_deg>=raMax_deg:
break
# End of Dec While loop
if decTileCent_deg>=decMax_deg:
break
# Convert the tile centre coordinates to Galactic
lCentTileLst_deg = []
bCentTileLst_deg = []
for i in range(len(raCentTileLst_deg)):
l_rad, b_rad = slalib.sla_eqgal(m.radians(raCentTileLst_deg[i]),
m.radians(decCentTileLst_deg[i]))
lCentTileLst_deg.append(m.degrees(l_rad))
bCentTileLst_deg.append(m.degrees(b_rad))
# Filter both Equ and Gal lists for tiles outside the survey area
# Must iterate from highest index when using 'pop' function
for i in range(len(raCentTileLst_deg)-1, -1, -1):
if not (lCentTileLst_deg[i]>=lMin_deg and
lCentTileLst_deg[i]<=lMax_deg and
bCentTileLst_deg[i]>=bMin_deg and
bCentTileLst_deg[i]<=bMax_deg):
lCentTileLst_deg.pop(i)
bCentTileLst_deg.pop(i)
raCentTileLst_deg.pop(i)
decCentTileLst_deg.pop(i)
vertexTileEquLst_deg.pop(i)
vertexTileGalLst_deg.pop(i)
# Sort the list of tiles into increasing RA
multiLst = zip(raCentTileLst_deg,
decCentTileLst_deg,
lCentTileLst_deg,
bCentTileLst_deg)
multiLst.sort()
(raCentTileLst_deg,
decCentTileLst_deg,
lCentTileLst_deg,
bCentTileLst_deg) = zip(*multiLst)
# Create the remaining columns (ID, pixel-scale, num-pixels
tileIDLst = range(1, len(lCentTileLst_deg)+1)
pixScaleXLst_asec = [pixScale_deg[0]*3600.0] * len(lCentTileLst_deg)
pixScaleYLst_asec = [pixScale_deg[1]*3600.0] * len(lCentTileLst_deg)
nPixXLst = [imSize_px[0]]* len(lCentTileLst_deg)
nPixYLst = [imSize_px[1]]* len(lCentTileLst_deg)
#------------------------------------------------------------------------#
# Upload the tile parameters into each database file in the data directory
dbFileLst = glob.glob(dataRootDir + '/*.sqlite')
# Loop through the database files
for dbFile in dbFileLst:
print ">> Writing tile_coords table to %s ..." % dbFile
# Connect to the database
conn = sqlite3.connect(dbFile)
cursor = conn.cursor()
# Drop old tile_coords table and create a new one
sql = "DROP TABLE IF EXISTS tile_coords"
cursor.execute(sql)
sql = """
CREATE TABLE tile_coords (
tileID INTEGER PRIMARY KEY,
RA_deg DOUBLE,
Dec_deg DOUBLE,
l_deg DOUBLE,
b_deg DOUBLE,
pixScaleX_asec DOUBLE,
pixscaleY_asec DOUBLE,
nPixX INTEGER,
nPixY INTEGER );
"""
cursor.execute(sql)
# Insert the entries into the table
for i in range(len(raCentTileLst_deg)):
sql = """
INSERT INTO tile_coords
(tileID,
RA_deg,
Dec_deg,
l_deg,
b_deg,
pixScaleX_asec,
pixscaleY_asec,
nPixX,
nPixY)
"""
sql += 'VALUES(?, ?, ?, ?, ?, ?, ?, ?, ?) '
vals = [tileIDLst[i],
raCentTileLst_deg[i],
decCentTileLst_deg[i],
lCentTileLst_deg[i],
bCentTileLst_deg[i],
pixScaleXLst_asec[i],
pixScaleYLst_asec[i],
nPixXLst[i],
nPixYLst[i]]
cursor.execute(sql, vals)
# Commit changed to the database and close connection
conn.commit()
cursor.close()
conn.close()
#------------------------------------------------------------------------#
#------------------------------------------------------------------------#
# Plot the tile centres over the survey border
fig = plt.figure(figsize=(18.0, 10.0))
# EQUATORIAL PLOT -------------------------------------------------------#
ax1 = fig.add_axes([0.08, 0.4, 0.88, 0.58])
# Plot the tile centres and vertices
ax1.scatter(np.array(raCentTileLst_deg)/15.0, decCentTileLst_deg, s=2,
zorder=2)
squares = []
for vertex in vertexTileEquLst_deg:
vertex = np.array(vertex)
vertex[:,0]/=15.0
square = Polygon(xy=np.array(vertex), closed=True)
squares.append(square)
s = PatchCollection(squares, alpha=1.0, edgecolor='black',
facecolor='none', zorder=3)
ax1.add_collection(s)
# Plot the border and format the axis
ax1.plot(borderPolyEquArr[:,0]/15.0, borderPolyEquArr[:,1])
ax1.yaxis.grid(True, which='major')
ax1.xaxis.grid(True, which='major')
ax1.set_xlim((raMax_deg+0.01*raRange_deg)/15,
(raMin_deg-0.01*raRange_deg)/15)
ax1.set_ylim(decMin_deg-0.05*decRange_deg, decMax_deg+0.05*decRange_deg)
ax1.set_aspect(1.0/15.0/cosDec)
ax1.set_ylabel('Dec. (deg)')
ax1.set_xlabel('R.A. (hrs)')
# Annotate the Equatorial plot with tile numbers
# for i in range(len(raCentTileLst_deg)):
# ax1.annotate(str(tileIDLst[i]),
# xy=(raCentTileLst_deg[i]/15.0, decCentTileLst_deg[i]),
# horizontalalignment='center',
# verticalalignment='center',
# fontsize=6,
# textcoords='data',
# clip_on=True, backgroundcolor='w')
# GALACTIC PLOT ---------------------------------------------------------#
ax2 = fig.add_axes([0.08, 0.05, 0.88, 0.30])
ax2.plot(borderPolyGalArr[:,0], borderPolyGalArr[:,1])
# Plot the tile centres and vertices
#ax2.scatter(np.array(lCentTileLst_deg), bCentTileLst_deg, s=2,
# zorder=2)
squares = []
for vertex in vertexTileGalLst_deg:
square = Polygon(xy=np.array(vertex), closed=True)
squares.append(square)
s = PatchCollection(squares, alpha=1.0, edgecolor='black',
facecolor='none', zorder=3)
ax2.add_collection(s)
# Plot the border and format the axis
ax2.yaxis.set_major_locator(MaxNLocator(4))
ax2.yaxis.grid(True, which='major')
ax2.xaxis.grid(True, which='major')
ax2.set_xlim(lMax_deg+0.02*lRange_deg, lMin_deg-0.02*lRange_deg)
ax2.set_ylim(bMin_deg-0.19*bRange_deg, bMax_deg+0.19*bRange_deg)
ax2.set_aspect(1.0)
ax2.set_ylabel('Glong. (deg)')
ax2.set_xlabel('Glat. (deg)')
# Annotate the Galactic plot with tile numbers
for i in range(len(lCentTileLst_deg)):
ax2.annotate(str(tileIDLst[i]),
xy=(lCentTileLst_deg[i], bCentTileLst_deg[i]),
horizontalalignment='center',
verticalalignment='center',
fontsize=8,
textcoords='data',
clip_on=True)
fig.show()
fig.savefig('tile_layout.pdf')
print "Press <RETURN> to exit ..."
raw_input()
#-----------------------------------------------------------------------------#
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
937175e4b5291adf7936b6fa829b3bbd28c7c340 | 3af2998fe7bc3c48fbe6eae476f7e0ec5bfc0ca6 | /control_flow/while_loop.py | 503c2959186fb69a16e04e59916b7b694844032c | []
| no_license | east825/python-inference-playground | 22acb8f2c71eb07e13293a9fec1d67a6f5aa25cf | f60387604a1c535ad30b7f3f44acf08cbd7d88c7 | refs/heads/master | 2020-06-01T01:29:00.399986 | 2014-04-15T12:28:18 | 2014-04-15T12:28:18 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 310 | py | while False:
if True:
a1 = 42
break
else:
a1 = 'spam'
print(a1)
while False:
if True:
a2 = 42
else:
a2 = 'spam'
print(a2)
a3 = 42
while False:
if True:
a3 = 'spam'
print(a3)
while False:
if True:
a4 = 42
else:
a4 = 'spam'
print(a4) | [
"[email protected]"
]
| |
b4571590ec6a3e3ec47fcc2114275054d35df44f | d1ddb9e9e75d42986eba239550364cff3d8f5203 | /google-cloud-sdk/lib/surface/container/builds/describe.py | c3386e7ed0435df473c1cbd30730d4657cc15fba | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | bopopescu/searchparty | 8ecd702af0d610a7ad3a8df9c4d448f76f46c450 | afdc2805cb1b77bd5ac9fdd1a76217f4841f0ea6 | refs/heads/master | 2022-11-19T14:44:55.421926 | 2017-07-28T14:55:43 | 2017-07-28T14:55:43 | 282,495,798 | 0 | 0 | Apache-2.0 | 2020-07-25T17:48:53 | 2020-07-25T17:48:52 | null | UTF-8 | Python | false | false | 2,193 | py | # Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Describe build command."""
from googlecloudsdk.api_lib.cloudbuild import cloudbuild_util
from googlecloudsdk.calliope import base
from googlecloudsdk.core import properties
from googlecloudsdk.core import resources
class Describe(base.DescribeCommand):
"""Get information about a particular build."""
@staticmethod
def Args(parser):
"""Register flags for this command.
Args:
parser: An argparse.ArgumentParser-like object. It is mocked out in order
to capture some information, but behaves like an ArgumentParser.
"""
parser.add_argument(
'build',
completion_resource='cloudbuild.projects.builds',
list_command_path='container builds list --uri',
help=('The build to describe. The ID of the build is printed at the '
'end of the build submission process, or in the ID column when '
'listing builds.'),
)
def Run(self, args):
"""This is what gets called when the user runs this command.
Args:
args: an argparse namespace. All the arguments that were provided to this
command invocation.
Returns:
Some value that we want to have printed later.
"""
client = cloudbuild_util.GetClientInstance()
build_ref = resources.REGISTRY.Parse(
args.build,
params={'projectId': properties.VALUES.core.project.GetOrFail},
collection='cloudbuild.projects.builds')
return client.projects_builds.Get(
client.MESSAGES_MODULE.CloudbuildProjectsBuildsGetRequest(
projectId=build_ref.projectId, id=build_ref.id))
| [
"[email protected]"
]
| |
86cfe8a6f28681768008e205860dc50ea646a073 | 76af5f63e173850a461dd104d696a3ad86958b6d | /ObjectDetectionDeps/Generate_Labelmap.py | 543168988a48df229884cc695dd2deda73776def | []
| no_license | Danny-Dasilva/Tensorflow_Object_Detection | 599b76d86918b1425a8d9e35d6dc5644224e6692 | b0386dfac730b516594d511849560ff59a2bf979 | refs/heads/master | 2022-03-30T11:34:08.595899 | 2020-02-08T16:35:20 | 2020-02-08T16:35:20 | 197,986,897 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 756 | py |
import os
path = os.environ['IMAGEPATH'] + '/Train_labels.csv'
import csv
col = []
with open(path, newline='') as csvfile:
reader = csv.DictReader(csvfile)
for row in reader:
#print(set(row['class']))
col.append(row['class'])
classes = list(set(col))
print(classes)
count = 0
pbtxt_label = open("labelmap.pbtxt","w")
for label in classes:
count += 1
pbtxt_label.write("item {\n")
pbtxt_label.write(" id: %s\n" % (count))
pbtxt_label.write(" name: '%s'\n" % (label))
pbtxt_label.write("}\n")
pbtxt_label.write("\n")
count = 0
txt_label = open("labels.txt","w")
for label in classes:
txt_label.write("%s %s\n" % (count, label))
count += 1
pbtxt_label.close()
txt_label.close() | [
"[email protected]"
]
| |
8f608eb54cc99e4c496150d0edecc71a52d4e030 | 3cda2dc11e1b7b96641f61a77b3afde4b93ac43f | /nni/experiment/config/utils/__init__.py | c4b8b586d0953435188171ce60154e6e190380ee | [
"MIT"
]
| permissive | Eurus-Holmes/nni | 6da51c352e721f0241c7fd26fa70a8d7c99ef537 | b84d25bec15ece54bf1703b1acb15d9f8919f656 | refs/heads/master | 2023-08-23T10:45:54.879054 | 2023-08-07T02:39:54 | 2023-08-07T02:39:54 | 163,079,164 | 3 | 2 | MIT | 2023-08-07T12:35:54 | 2018-12-25T12:04:16 | Python | UTF-8 | Python | false | false | 237 | py | # Copyright (c) Microsoft Corporation.
# Licensed under the MIT license.
"""
Utility functions for experiment config classes.
Check "public.py" to see which functions you can utilize.
"""
from .public import *
from .internal import *
| [
"[email protected]"
]
| |
891869c00f24639fa8d33f4d0a3dea0f62cc2f18 | 5b1eb22194cb2f4c9df63765f78a6998a6ad3de2 | /src/helpsys.py | ad173e7ce6e17082de7d532ab17840bb10f89ca2 | []
| no_license | Jawmo/akriosmud | 85c2ecd520fd15ba86d0210b018055146b9e2192 | aac434919586f5590f089e8e87e0f2e946a80aa9 | refs/heads/master | 2020-10-01T23:26:47.290481 | 2019-12-09T01:11:05 | 2019-12-09T01:11:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,804 | py | #! usr/bin/env python
# Project: Akrios
# Filename: helpsys.py
#
# File Description: Module to handle the help system.
#
# By: Jubelo
from collections import namedtuple
import glob
import logging
import json
import os
import olc
import world
log = logging.getLogger(__name__)
WRITE_NEW_FILE_VERSION = False
# Define some named tuples for various Help file values
Section = namedtuple("Section", "name")
sections = {"player": Section("player"),
"administrative": Section("administrative"),
"builder": Section("builder"),
"deity": Section("deity")}
class Help(olc.Editable):
CLASS_NAME = "__Help__"
FILE_VERSION = 1
def __init__(self, path):
super().__init__()
self.path = path
self.json_version = Help.FILE_VERSION
self.json_class_name = Help.CLASS_NAME
self.builder = None
self.creator = ""
self.viewable = ""
self.keywords = []
self.topics = ""
self.section = ""
self.description = ""
self.commands = {"viewable": ("string", ["true", "false"]),
"creator": ("string", None),
"keywords": ("list", None),
"topics": ("string", None),
"section": ("string", sections),
"description": ("description", None)}
if os.path.exists(path):
self.load()
def to_json(self):
if self.json_version == 1:
jsonable = {"json_version": self.json_version,
"json_class_name": self.json_class_name,
"creator": self.creator,
"viewable": self.viewable,
"keywords": self.keywords,
"topics": self.topics,
"section": self.section,
"description": self.description}
return json.dumps(jsonable, sort_keys=True, indent=4)
def load(self):
log.debug(f"Loading help file: {self.path}")
if self.path.endswith("json"):
with open(self.path, "r") as thefile:
help_file_dict = json.loads(thefile.read())
for eachkey, eachvalue in help_file_dict.items():
setattr(self, eachkey, eachvalue)
def save(self):
with open(f"{self.path}", "w") as thefile:
thefile.write(self.to_json())
def display(self):
return (f"{{BCreator{{x: {self.creator}\n"
f"{{BViewable{{x: {self.viewable}\n"
f"{{BKeywords{{x: {', '.join(self.keywords)}\n"
f"{{BTopics{{x: {self.topics}\n"
f"{{BSection{{x: {self.section}\n"
f" {{y{', '.join(sections)}\n"
f"{{BDescription{{x:\n\r"
f"{self.description[:190]}|...{{x\n\r")
helpfiles = {}
def init():
log.info("Initializing all help files.")
allhelps = glob.glob(os.path.join(world.helpDir, "*.json"))
for singlehelp in allhelps:
thehelp = Help(singlehelp)
for keyword in thehelp.keywords:
helpfiles[keyword] = thehelp
if WRITE_NEW_FILE_VERSION:
thehelp.save()
def reload():
helpfiles = {}
init()
def get_help(key, server=False):
key = key.lower()
if key:
if key in helpfiles:
if helpfiles[key].viewable.lower() == "true" or server:
return helpfiles[key].description
else:
log.warning(f"MISSING HELP FILE: {key}")
return "We do not appear to have a help file for that topic. "\
"We have however logged the attempt and will look into creating "\
"a help file for that topic as soon as possible.\n\r"
| [
"[email protected]"
]
| |
6da9faa43719b34fe1f5824aa6c271c993fb4534 | 7ad0ea6e17c6505c419f70b956a06f36b734779b | /BizchoolLab/project/urls.py | e8a928037905962c68a0c309904a81b359a0d1ac | []
| no_license | yoongyo/BizLab | 34cb2e6386030fb091853d90065063367ae32521 | dfe5f1e69d6a711e96f0f456f36ecfbccf010892 | refs/heads/master | 2020-04-12T13:08:55.782891 | 2018-12-20T02:09:54 | 2018-12-20T02:09:54 | 162,513,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 355 | py | from django.urls import path, re_path
from . import views
urlpatterns = [
re_path(r'^new/$', views.project_new, name="project_new"),
re_path(r'^$', views.project_list, name="project_list"),
re_path(r'^(?P<pk>\d+)/$', views.project_detail, name="project_detail"),
re_path(r'^(?P<pk>\d+)/Edit/$', views.project_edit, name="project_edit"),
] | [
"[email protected]"
]
| |
d90fcac0e12cd0f321dbfa11976d0074cb2a681c | 15f321878face2af9317363c5f6de1e5ddd9b749 | /solutions_python/Problem_75/200.py | fffe0758681ce42de20ca4fef4e35391db502cce | []
| no_license | dr-dos-ok/Code_Jam_Webscraper | c06fd59870842664cd79c41eb460a09553e1c80a | 26a35bf114a3aa30fc4c677ef069d95f41665cc0 | refs/heads/master | 2020-04-06T08:17:40.938460 | 2018-10-14T10:12:47 | 2018-10-14T10:12:47 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,091 | py |
def checkCase(data):
elements=[]
nonbase=[]
opposed=[]
for i in xrange(0, int(data[0])):
nonbase.append((data[i+1][0],data[i+1][1],data[i+1][2]))
data=data[int(data[0])+1:]
for i in xrange(0, int(data[0])):
opposed.append((data[i+1][0],data[i+1][1]))
data=data[-1]
for cmd in data:
try:
if len(elements) > 0:
for n in nonbase:
if (n[0] == elements[-1] and cmd == n[1]) or (n[1] == elements[-1] and cmd == n[0]):
elements[-1]=n[2]
1/0
for o in opposed:
if (o[0] in elements and cmd == o[1]) or (o[1] in elements and cmd == o[0]):
elements=[]
1/0
elements.append(cmd)
except:
pass
return str(elements).replace("'","")
data=open("B-large.in","r").read()
data=data.splitlines()[1:]
out=open("out.txt","w")
for c in xrange(0, len(data)):
tmp=data[c].split(" ")
out.write("Case #%i: %s\n"%(c+1,checkCase(tmp)))
out.close() | [
"[email protected]"
]
| |
f2536c6d3f382ecd5d7c0ab7aa19a39a61db1aff | 453d2e699d218fdb3bc1e535a707988194ac6717 | /lib/opengl/postproc/base.py | e38f2f67f6835325a7416c113e3b01a34d8e7a81 | [
"MIT"
]
| permissive | defgsus/thegame | d54ffcd343c7e1805d2c11e24cd38b02243e73d4 | 38a627d9108f1418b94b08831fd640dd87fbba83 | refs/heads/master | 2023-07-23T06:32:40.297591 | 2022-04-11T12:02:32 | 2022-04-11T12:02:32 | 127,875,178 | 1 | 0 | MIT | 2023-07-06T22:07:07 | 2018-04-03T08:21:31 | Python | UTF-8 | Python | false | false | 1,264 | py | from ..RenderSettings import RenderSettings
from ..RenderNode import RenderNode
from ..ScreenQuad import ScreenQuad
from ..core.Shader import Shader
class PostProcNode(RenderNode):
def __init__(self, name):
super().__init__(name)
self.quad = ScreenQuad(name="pp-quad-%s" % self.name)
self.do_compile = True
def release(self):
self.quad.release()
def render(self, rs: RenderSettings, pass_num: int):
if self.do_compile:
self.quad.set_shader_code(self.get_code())
self.do_compile = False
self.quad.drawable.shader.set_uniform("u_tex1", 0)
self.quad.drawable.shader.set_uniform("u_tex2", 1)
self.quad.drawable.shader.set_uniform("u_tex3", 2)
self.quad.drawable.shader.set_uniform("u_tex4", 3)
self.quad.drawable.shader.set_uniform("u_time", rs.time)
self.update_uniforms(self.quad.drawable.shader, rs, pass_num=pass_num)
self.quad.draw(rs.render_width, rs.render_height)
#self.quad.draw_centered(rs.render_width, rs.render_height, rs.render_width, rs.render_height)
def get_code(self):
raise NotImplementedError
def update_uniforms(self, shader: Shader, rs: RenderSettings, pass_num: int):
pass
| [
"[email protected]"
]
| |
038243668ac16b39e17fbc3ecc4dfe6eb39856d0 | 2031771d8c226806a0b35c3579af990dd0747e64 | /pyobjc-framework-Photos/PyObjCTest/test_phphotolibrary.py | 7174dd32bc9163fff070ec2f446bfa8aa62aa0cf | [
"MIT"
]
| permissive | GreatFruitOmsk/pyobjc-mirror | a146b5363a5e39181f09761087fd854127c07c86 | 4f4cf0e4416ea67240633077e5665f5ed9724140 | refs/heads/master | 2018-12-22T12:38:52.382389 | 2018-11-12T09:54:18 | 2018-11-12T09:54:18 | 109,211,701 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,072 | py | from PyObjCTools.TestSupport import *
import sys
if sys.maxsize > 2 ** 32:
import Photos
class TestPHPhotoLibrary (TestCase):
def testConstants(self):
self.assertEqual(Photos.PHAuthorizationStatusNotDetermined, 0)
self.assertEqual(Photos.PHAuthorizationStatusRestricted, 1)
self.assertEqual(Photos.PHAuthorizationStatusDenied, 2)
self.assertEqual(Photos.PHAuthorizationStatusAuthorized, 3)
@min_sdk_level('10.13')
def testProtocols(self):
objc.protocolNamed('PHPhotoLibraryChangeObserver')
@min_os_level('10.13')
def testMethods(self):
self.assertArgIsBlock(Photos.PHPhotoLibrary.requestAuthorization_, 0, b'v' + objc._C_NSInteger)
self.assertArgIsBlock(Photos.PHPhotoLibrary.performChanges_completionHandler_, 1, b'vZ@')
self.assertArgIsOut(Photos.PHPhotoLibrary.performChangesAndWait_error_, 1)
self.assertResultIsBOOL(Photos.PHPhotoLibrary.performChangesAndWait_error_)
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
30fb72f40582c23a9f9dd19a02d75877810dce08 | 44b9fa8d1113299d327d087da73febf26bef61e7 | /WINDOW_openMDAO/AEP/FastAEP/farm_energy/wake_model_mean_new/wake_overlap.py | ff871ca75c2e2612402c55bb235094bbeda88a94 | []
| no_license | sebasanper/WINDOW_openMDAO | 828e6d38546e706d23e4920b1c6e857c6be10825 | 3779fa8380874bc2cd7380df90339b37806a6a60 | refs/heads/master | 2023-04-12T22:09:42.951295 | 2023-04-05T08:54:15 | 2023-04-05T08:54:15 | 107,442,976 | 3 | 9 | null | 2019-01-20T16:32:42 | 2017-10-18T17:47:04 | HTML | UTF-8 | Python | false | false | 789 | py | from math import sqrt
def root_sum_square(array_deficits):
# This is one model, root sum square of individual wind speed deficits.
total_deficit = sqrt(sum([deficit ** 2.0 for deficit in array_deficits]))
return total_deficit
def multiplied(array_deficits):
total_deficit = 1.0
for element in array_deficits:
total_deficit *= element
return total_deficit
def summed(array_deficits):
total_deficit = sum(array_deficits)
if total_deficit > 1.0:
total_deficit = 1.0
return total_deficit
def maximum(array_deficits):
return max(array_deficits)
if __name__ == '__main__':
deficits = [0.3, 0.4]
# print root_sum_square(deficits)
# print multiplied(deficits)
# print summed(deficits)
# print maximum(deficits)
| [
"[email protected]"
]
| |
a3058160dea228fc765e45cdcec942bd35ec57a9 | 148072ce210ca4754ea4a37d83057e2cf2fdc5a1 | /src/core/w3af/w3af/core/data/db/tests/test_dbms.py | cf0aed0578e3412ae13b214eeeea0442098cd14d | []
| no_license | ycc1746582381/webfuzzer | 8d42fceb55c8682d6c18416b8e7b23f5e430c45f | 0d9aa35c3218dc58f81c429cae0196e4c8b7d51b | refs/heads/master | 2021-06-14T18:46:59.470232 | 2017-03-14T08:49:27 | 2017-03-14T08:49:27 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,053 | py | # -*- coding: UTF-8 -*-
"""
Copyright 2013 Andres Riancho
This file is part of w3af, http://w3af.org/ .
w3af is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation version 2 of the License.
w3af is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with w3af; if not, write to the Free Software
Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
"""
import unittest
import string
import os
from itertools import repeat, starmap
from random import choice
from w3af.core.data.db.dbms import SQLiteDBMS, get_default_temp_db_instance
from w3af.core.controllers.exceptions import DBException, NoSuchTableException
from w3af.core.controllers.misc.temp_dir import (get_temp_dir,
create_temp_dir,
remove_temp_dir)
def get_temp_filename():
temp_dir = get_temp_dir()
fname = ''.join(starmap(choice, repeat((string.letters,), 18)))
filename = os.path.join(temp_dir, fname + '.w3af.temp_db')
return filename
class TestDBMS(unittest.TestCase):
def setUp(self):
create_temp_dir()
def tearDown(self):
remove_temp_dir()
def test_open_error(self):
invalid_filename = '/'
self.assertRaises(DBException, SQLiteDBMS, invalid_filename)
def test_simple_db(self):
db = SQLiteDBMS(get_temp_filename())
db.create_table('TEST', set([('id', 'INT'), ('data', 'TEXT')])).result()
db.execute('INSERT INTO TEST VALUES (1,"a")').result()
self.assertIn(('1', 'a'), db.select('SELECT * from TEST'))
self.assertEqual(('1', 'a'), db.select_one('SELECT * from TEST'))
def test_select_non_exist_table(self):
db = SQLiteDBMS(get_temp_filename())
self.assertRaises(NoSuchTableException, db.select, 'SELECT * from TEST')
def test_default_db(self):
db = get_default_temp_db_instance()
db.create_table('TEST', set([('id', 'INT'), ('data', 'TEXT')])).result()
db.execute('INSERT INTO TEST VALUES (1,"a")').result()
self.assertIn(('1', 'a'), db.select('SELECT * from TEST'))
self.assertEqual(('1', 'a'), db.select_one('SELECT * from TEST'))
def test_simple_db_with_pk(self):
db = SQLiteDBMS(get_temp_filename())
fr = db.create_table('TEST', [('id', 'INT'), ('data', 'TEXT')], ['id'])
fr.result()
self.assertEqual([], db.select('SELECT * from TEST'))
def test_drop_table(self):
db = SQLiteDBMS(get_temp_filename())
fr = db.create_table('TEST', [('id', 'INT'), ('data', 'TEXT')], ['id'])
fr.result()
db.drop_table('TEST').result()
self.assertRaises(DBException, db.drop_table('TEST').result)
def test_simple_db_with_index(self):
db = SQLiteDBMS(get_temp_filename())
fr = db.create_table('TEST', [('id', 'INT'), ('data', 'TEXT')], ['id'])
fr.result()
db.create_index('TEST', ['data']).result()
self.assertRaises(DBException,
db.create_index('TEST', ['data']).result)
def test_table_exists(self):
db = SQLiteDBMS(get_temp_filename())
self.assertFalse(db.table_exists('TEST'))
db = SQLiteDBMS(get_temp_filename())
db.create_table('TEST', [('id', 'INT'), ('data', 'TEXT')], ['id'])
self.assertTrue(db.table_exists('TEST'))
def test_close_twice(self):
db = SQLiteDBMS(get_temp_filename())
db.close()
db.close()
class TestDefaultDB(unittest.TestCase):
def test_get_default_temp_db_instance(self):
self.assertEqual(id(get_default_temp_db_instance()),
id(get_default_temp_db_instance()))
| [
"[email protected]"
]
| |
bd5007f5ea485e97bda7e458055eb62fbd663a8a | 8520c991dc543f5f4e1efe59ab401824173bb985 | /457-circular-array-loop/solution.py | e31ba44f932755442e14be18a59b1756d31061c2 | []
| no_license | katryo/leetcode | d44f70f2853c4f5ea9a462d022feb0f5436c2236 | 0da45559271d3dba687858b8945b3e361ecc813c | refs/heads/master | 2020-03-24T12:04:53.859047 | 2020-02-18T04:27:55 | 2020-02-18T04:27:55 | 142,703,107 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,668 | py | class Solution(object):
def circularArrayLoop(self, nums):
"""
:type nums: List[int]
:rtype: bool
"""
def next(idx):
return (idx + nums[idx]) % len(nums)
for i in range(len(nums)):
if nums[i] == 0:
continue
pslow = i
pfast = next(pslow)
npfast = next(pfast)
while nums[i] * nums[pfast] > 0 and nums[i] * nums[next(pfast)] > 0:
if pfast == pslow:
if next(pslow) == pslow:
break
return True
pfast = next(next(pfast))
pslow = next(pslow)
j = i
while nums[j] != 0:
nums[j] = 0
nxt = next(j)
j = nxt
return False
# You are given an array of positive and negative integers.
# If a number n at an index is positive, then move forward n steps.
# Conversely, if it's negative (-n), move backward n steps.
# Assume the first element of the array is forward next to the last element,
# and the last element is backward next to the first element. Determine if there is a loop in this array.
# A loop starts and ends at a particular index with more than 1 element along the loop.
# The loop must be "forward" or "backward'.
if __name__ == '__main__':
s = Solution()
print(s.circularArrayLoop([3, 1, 2]))
print(s.circularArrayLoop([-1]))
print(s.circularArrayLoop([2, -1, 1, -2, -2]))
print(s.circularArrayLoop([-2, 1, -1, -2, -2]))
print(s.circularArrayLoop([2, -1, 1, 2, 2]))
print(s.circularArrayLoop([-1, 2]))
| [
"[email protected]"
]
| |
b728b7a1c74922c4b5ecc77fd20377d3924e6d66 | 0821d92db624dada6bc50887f6e435ef1e1c03e2 | /norm/common.py | f8233b282b34e20c3f2abe8c3bf385be4388f6bb | [
"MIT"
]
| permissive | jcollie/norm | a29a3052705e805ba240232aec1fd6aac59897ba | db303b28e4184cae08228d92868f9409c013096a | refs/heads/master | 2021-01-18T04:19:45.679791 | 2013-07-22T22:34:09 | 2013-07-22T22:34:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,735 | py | # Copyright (c) Matt Haggard.
# See LICENSE for details.
from zope.interface import implements
from twisted.internet import defer
from collections import deque, defaultdict
from norm.interface import IAsyncCursor, IRunner, IPool
class BlockingCursor(object):
"""
I wrap a single DB-API2 db cursor in an asynchronous api.
"""
implements(IAsyncCursor)
def __init__(self, cursor):
self.cursor = cursor
def execute(self, sql, params=()):
return defer.maybeDeferred(self.cursor.execute, sql, params)
def fetchone(self):
return defer.maybeDeferred(self.cursor.fetchone)
def fetchall(self):
return defer.maybeDeferred(self.cursor.fetchall)
def lastRowId(self):
return defer.succeed(self.cursor.lastrowid)
def close(self):
return defer.maybeDeferred(self.cursor.close)
class BlockingRunner(object):
"""
I wrap a single DB-API2 db connection in an asynchronous api.
"""
implements(IRunner)
cursorFactory = BlockingCursor
def __init__(self, conn):
"""
@param conn: A synchronous database connection.
"""
self.conn = conn
def runQuery(self, qry, params=()):
return self.runInteraction(self._runQuery, qry, params)
def _runQuery(self, cursor, qry, params):
d = cursor.execute(qry, params)
d.addCallback(lambda _: cursor.fetchall())
return d
def runOperation(self, qry, params=()):
return self.runInteraction(self._runOperation, qry, params)
def _runOperation(self, cursor, qry, params):
return cursor.execute(qry, params)
def runInteraction(self, function, *args, **kwargs):
cursor = self.cursorFactory(self.conn.cursor())
d = defer.maybeDeferred(function, cursor, *args, **kwargs)
d.addCallback(self._commit)
d.addErrback(self._rollback)
return d
def _commit(self, result):
self.conn.commit()
return result
def _rollback(self, result):
self.conn.rollback()
return result
def close(self):
return defer.maybeDeferred(self.conn.close)
class ConnectionPool(object):
implements(IRunner)
db_scheme = None
def __init__(self, pool=None):
self.pool = pool or NextAvailablePool()
def add(self, conn):
self.pool.add(conn)
def runInteraction(self, function, *args, **kwargs):
return self._runWithConn('runInteraction', function, *args, **kwargs)
def runQuery(self, *args, **kwargs):
return self._runWithConn('runQuery', *args, **kwargs)
def runOperation(self, *args, **kwargs):
return self._runWithConn('runOperation', *args, **kwargs)
def _finish(self, result, conn):
self.pool.done(conn)
return result
def _runWithConn(self, name, *args, **kwargs):
d = self.pool.get()
d.addCallback(self._startRunWithConn, name, *args, **kwargs)
return d
def _startRunWithConn(self, conn, name, *args, **kwargs):
m = getattr(conn, name)
d = m(*args, **kwargs)
return d.addBoth(self._finish, conn)
def close(self):
dlist = []
for item in self.pool.list():
dlist.append(defer.maybeDeferred(item.close))
return defer.gatherResults(dlist)
class NextAvailablePool(object):
"""
I give you the next available object in the pool.
"""
implements(IPool)
def __init__(self):
self._options = deque()
self._all_options = []
self._pending = deque()
self._pending_removal = defaultdict(lambda:[])
def add(self, option):
self._options.append(option)
self._all_options.append(option)
self._fulfillNextPending()
def remove(self, option):
try:
self._options.remove(option)
self._all_options.remove(option)
return defer.succeed(option)
except ValueError:
d = defer.Deferred()
self._pending_removal[option].append(d)
return d
def get(self):
d = defer.Deferred()
self._pending.append(d)
self._fulfillNextPending()
return d
def _fulfillNextPending(self):
if self._pending and self._options:
self._pending.popleft().callback(self._options.popleft())
def done(self, option):
if option in self._pending_removal:
dlist = self._pending_removal.pop(option)
map(lambda d: d.callback(option), dlist)
return
self._options.append(option)
self._fulfillNextPending()
def list(self):
return self._all_options
| [
"[email protected]"
]
| |
53f105e9a16c218d5698c35ab3d888d4d9d69c58 | 9baa9f1bedf7bc973f26ab37c9b3046824b80ca7 | /venv-bck/bin/easy_install | f306fde1138489c4a226dd5e0a062fb6a8fad8e7 | []
| no_license | shakthydoss/suriyan | 58774fc5de1de0a9f9975c2ee3a98900e0a5dff4 | 8e39eb2e65cc6c6551fc165b422b46d598cc54b8 | refs/heads/master | 2020-04-12T05:36:59.957153 | 2017-01-08T06:12:13 | 2017-01-08T06:12:13 | 59,631,349 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | #!/Users/saksekar/suriyan/venv/bin/python
# -*- coding: utf-8 -*-
import sys
import re
from setuptools.command.easy_install import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
311069543284b2bc146f63a4419a6b1c1c2286b8 | 08607218396a0269a90e8b4e6d099a5e99e39a8b | /database/schemes/easyTest/script/testCase/U商城项目/U商城管理端/站点设置/友情链接/worm_1482819508/友情链接.py | a743fe0cfcbaa179d5cb2864b7ab079e770d7400 | [
"MIT"
]
| permissive | TonnaMajesty/test | 4a07297557669f98eeb9f94b177a02a4af6f1af0 | 68b24d1f3e8b4d6154c9d896a7fa3e2f99b49a6f | refs/heads/master | 2021-01-19T22:52:18.309061 | 2017-03-06T10:51:05 | 2017-03-06T10:51:05 | 83,779,681 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,227 | py | # coding=utf-8
from time import sleep, time
from SRC.common.decorator import codeException_dec
from SRC.unittest.case import TestCase
from script.common import utils
class EasyCase(TestCase):
def __init__(self, webDriver, paramsList):
# 请不要修改该方法
super(EasyCase, self).__init__(webDriver, paramsList)
@codeException_dec('3')
def runTest(self):
driver = self.getDriver()
param = self.param
tool = utils
'''
##################################################################
浏览器驱动:driver
例如:
driver.get('http://www.demo.com')
driver.find_element_by_id("kw","输入框").send_keys("Remote")
driver.find_elements_by_id("su","查找")[0].click()
参数化:param
说明:
需要进行参数化的数据,用param.id 替换,id为参数化配置文件中的id值
自定义工具模块:tool 文件所在路径script/common/utils.py
开发人员可根据需要自行添加新的函数
例如:
获取一个随机生成的字符串:number=tool.randomStr(6)
##################################################################
该方法内进行测试用例的编写
'''
# driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/ul[7]/li[1]/upmark').click(); # 点击站点设置
driver.find_element_by_xpath('/html/body/div[1]/div/div[1]/ul[7]/li[10]/a').click() # 点击友情链接
driver.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div[2]/div/a').click() # 点击新增
driver.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div[3]/div[1]/div[1]/div/input').send_keys(u'你想去哪?') # 输入链接名称
driver.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div[3]/div[1]/div[2]/div/input').send_keys('demo.upmall.yonyouup.com') # 输入链接URL
driver.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div[3]/div[1]/div[5]/div/a').click() # 点击上传
driver.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div[3]/div[1]/div[5]/div/input').send_keys('E:\\tupian\\hhhhhh.jpg') # 上传图片
#os.system("E:\\pythonScript\\autoit\\guanbi.au4.exe") # 调用guanbi.exe程序关闭windows窗口
driver.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div[3]/div[2]/div/button[2]').click() # 点击确定
#driver.find_elements_by_xpath('//a[@class="colorblue"]')[0].click(); # 点击编辑
driver.find_element_by_css_selector("body > div.container.corp-page.ng-scope > div > div.col-xs-10.corp-content > div > div:nth-child(3) > div > table > tbody > tr:nth-child(1) > td.text-center > a:nth-child(1)").click() # 点击编辑
driver.find_element_by_xpath('/html/body/div[1]/div/div[2]/div/div[3]/div[2]/div/button[2]').click() # 点击确定
#driver.find_elements_by_xpath('//a[@class="colorblue"]')[1].click(); # 点击删除
driver.find_element_by_css_selector("body > div.container.corp-page.ng-scope > div > div.col-xs-10.corp-content > div > div:nth-child(3) > div > table > tbody > tr:nth-child(1) > td.text-center > a:nth-child(2)").click() # 点击删除
driver.find_element_by_css_selector("body > div.modal.fade.ng-isolate-scope.in > div > div > div.modal-footer.ng-scope > button:nth-child(1)").click() # 点击确定
sleep(3) | [
"[email protected]"
]
| |
d98f3ed569fc29eb0c4c57187c6023849f737681 | d22df6ab8afcf030b92c6813b11cb4f6f1eb2174 | /etude_de_base/ufwi-administration-suite-ufwi-conf/ufwi_conf/backend/components/exim/exim.py | 9518e7893bad6b61379c43510a5f543ed459473c | []
| no_license | maximerobin/Ufwi | 67dbee6c90b21b5a6a1815e3853b9ec8e10747b7 | a516e52535534262fce127d96812b7ded4171707 | refs/heads/master | 2021-01-22T23:16:40.064512 | 2012-03-28T11:12:04 | 2012-03-28T11:12:04 | 3,254,471 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,976 | py | #coding: utf-8
"""
Copyright (C) 2008-2011 EdenWall Technologies
Written by Michael Scherer <m.scherer AT inl.fr>
This file is part of NuFirewall.
NuFirewall is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, version 3 of the License.
NuFirewall is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with NuFirewall. If not, see <http://www.gnu.org/licenses/>
"""
from __future__ import with_statement
import subprocess # TODO remove
from twisted.internet.threads import deferToThread
from twisted.internet.defer import inlineCallbacks, returnValue
from error import NuConfError, MAIL_BAD_CONFIGURATION
from ufwi_rpcd.common import tr
from ufwi_rpcd.backend.exceptions import ConfigError
from ufwi_rpcd.common import EDENWALL
from ufwi_rpcd.core.context import Context
from ufwi_conf.backend.unix_service import (
ConfigServiceComponent,
runCommandAndCheck,
)
from ufwi_conf.common.antispam_cfg import AntispamConf
from ufwi_conf.common.contact_cfg import ContactConf
from ufwi_conf.common.mail_cfg import MailConf
#generated file
GENFILE_HUBBED_HOSTS = '/etc/exim4/hubbed_hosts'
# * relay_domain_in
#Static file
GENFILE_LOCAL_ACL_CONF = '/etc/exim4/local_acl.conf'
#
GENFILE_UPDATE_EXIM4_CONF_CONF = '/etc/exim4/update-exim4.conf.conf'
# * smarthost
# * dc_relay_domain: can be empty
# * dc_relay_nets: can be empty
#Static file
GENFILE_MACRODEFS = '/etc/exim4/conf.d/main/01_exim4-config_listmacrosdefs-local'
# references the 2 following local_acl_antispam.conf & local_acl_antivirus.conf
GENFILE_LOCAL_ACL_ANTISPAM = '/etc/exim4/local_acl_antispam.conf'
#generated file
GENFILE_LOCAL_ACL_ANTIVIRUS = '/etc/exim4/local_acl_antivirus.conf'
# * use_antivirus
GENFILE_MAILNAME = '/etc/mailname'
GEN_FILES = (
GENFILE_HUBBED_HOSTS,
GENFILE_LOCAL_ACL_CONF,
GENFILE_MACRODEFS,
GENFILE_LOCAL_ACL_ANTISPAM,
GENFILE_LOCAL_ACL_ANTIVIRUS,
GENFILE_MAILNAME,
GENFILE_UPDATE_EXIM4_CONF_CONF,
)
class EximComponent(ConfigServiceComponent):
"""
Manage the basic configuration of a exim mail server
"""
NAME = "exim"
VERSION = "1.0"
PIDFILE = "/var/run/exim4/exim.pid"
EXE_NAME = "exim4"
INIT_SCRIPT = 'exim4'
REQUIRES = ('config', 'ufwi_conf', 'hosts', 'hostname')
if EDENWALL:
REQUIRES += ('antispam', )
# not used
CONFIG = {}
CONFIG_DEPENDS = frozenset(('antivirus', 'antispam', 'hostname', 'hosts'))
ACLS = {
'antispam': set(('getAntispamConfig',)),
'antivirus': set(('use',)),
'CORE': set(('hasComponent',)),
'hostname': set(('getShortHostname',)),
'hosts': set(('getFqdn',)),
}
ROLES = {
'conf_read': set(('getMailConfig', 'status')),
'conf_write': set(('setMailConfig',)),
}
check_relay_host = ConfigServiceComponent.check_ip_or_domain
check_virus_scan = ConfigServiceComponent.check_boolean
def __init__(self):
self.config = None
ConfigServiceComponent.__init__(self)
def init(self, core):
ConfigServiceComponent.init(self, core)
for genfile in GEN_FILES:
self.addConfFile(genfile, 'root:root', '0644')
def read_config(self, responsible, *args, **kwargs):
self.config = MailConf.defaultConf()
try:
serialized = self.core.config_manager.get(self.NAME)
except ConfigError:
self.debug("Not configured, defaults loaded.")
return
config = MailConf.deserialize(serialized)
valid, error = config.isValidWithMsg()
if valid:
self.config = config
else:
self.error(
"Component %s read incorrect values. Message was: %s" % (self.NAME, error)
)
def save_config(self, message, context):
serialized = self.config.serialize()
with self.core.config_manager.begin(self, context) as cm:
try:
cm.delete(self.NAME)
except:
pass
cm.set(self.NAME, serialized)
cm.commit(message)
def should_run(self, responsible):
return True
@inlineCallbacks
def genConfigFiles(self, responsible):
templates_variables = {}
for attr in MailConf.ATTRS:
templates_variables[attr] = getattr(self.config, attr)
context = Context.fromComponent(self)
fqdn = yield self.core.callService(context, 'hosts', 'getFqdn')
responsible.feedback(tr("Default FQDN is %(FQDN)s"), FQDN=fqdn)
hostname = yield self.core.callService(
context, 'hostname', 'getShortHostname'
)
responsible.feedback(
tr("Default hostname is %(HOSTNAME)s"), HOSTNAME=hostname
)
templates_variables.update({'fqdn': fqdn, 'hostname': hostname})
templates_variables.update(self._getrelayed())
yield self.addAntispamConfig(context, templates_variables, responsible)
self.generate_configfile(templates_variables)
yield self.updateConf(responsible)
def updateConf(self, responsible):
yield deferToThread(runCommandAndCheck, self,
("/usr/sbin/update-exim4.conf",))
@inlineCallbacks
def addAntispamConfig(self, context, templates_variables, responsible):
try:
serialized_antispam_cfg = yield self.core.callService(context,
'antispam', 'getAntispamConfig')
antispam_cfg = AntispamConf.deserialize(serialized_antispam_cfg)
except Exception, err:
self.writeError(err)
responsible.feedback(tr("Unreadable antispam configuration"))
use_antispam = False
else:
use_antispam = antispam_cfg.use_antispam
if not use_antispam:
templates_variables['use_antispam'] = False
responsible.feedback(tr("Not configured as an antispam system."))
return
templates_variables['use_antispam'] = True
responsible.feedback(tr("Configuring as an antispam system."))
mark_spam_level = float(antispam_cfg.mark_spam_level)
responsible.feedback(tr("Spam mark level: %(LEVEL)s"), LEVEL=mark_spam_level)
templates_variables['mark_spam_level'] = int(10 * mark_spam_level)
deny_spam_level = float(antispam_cfg.deny_spam_level)
responsible.feedback(tr("Spam rejection level: %(LEVEL)s"), LEVEL=deny_spam_level)
templates_variables['deny_spam_level'] = int(10 * deny_spam_level)
def service_getrelayed(self, context):
"""
pre-format relay_domains var
"""
return self._getrelayed()
def _getrelayed(self):
dc_relay_domains = self.config.relay_domain_in
dc_relay_nets = self.config.relay_net_out
if not dc_relay_domains:
dc_relay_domains = ''
else:
dc_relay_domains = \
"'%s'" % ":".join((unicode(domain) for domain in dc_relay_domains))
if not dc_relay_nets:
dc_relay_nets = ''
else:
dc_relay_nets = \
"'%s'" % ":".join((net.strNormal() for net in dc_relay_nets))
return {
'dc_relay_domains': dc_relay_domains,
'dc_relay_nets': dc_relay_nets
}
def get_ports(self):
ports = [ {'proto':'tcp', 'port': 25} ]
return ports
# services
def service_getMailConfig(self, context):
return self.config.serialize()
def service_setMailConfig(self, context, serialized, message):
config = MailConf.deserialize(serialized)
if config.getReceivedSerialVersion() != 1:
raise NuConfError(
MAIL_BAD_CONFIGURATION,
"Incompatible version: %s" % config.getReceivedSerialVersion()
)
valid, error = config.isValidWithMsg()
if not valid:
raise NuConfError(
MAIL_BAD_CONFIGURATION,
"'%s' failed : '%s'" % (valid, error)
)
self.config = config
self.save_config(message, context)
serialized = self.core.config_manager.get(self.NAME)
defer = self.core.callService(context, 'CORE', 'hasComponent', 'antivirus')
defer.addCallback(self._use_antivirus, context)
defer.addErrback(self.writeError)
return defer
def _use_antivirus(self, has_component, context):
if has_component:
defer = self.core.callService(context, 'antivirus', 'use', self.NAME, self.config.use_antivirus)
return defer
else:
self.debug('antivirus component not available')
# Not used yet
#def service_searchLogs(self, context, string):
# """
# Search the logs for the specified string
# """
# return deferToThread(self.search_log, string)
#def search_log(self, string):
# return subprocess.Popen(["/usr/sbin/exigrep", string, '/var/log/exim4/mainlog'], stdout=subprocess.PIPE).communicate()[0]
#def service_searchMailQueue(self, context, string):
# """
# Search the current mail queue for the specified string
# """
# return deferToThread(self.search_queue, string)
#def search_queue(self, string):
# return subprocess.Popen(["/usr/sbin/exiqgrep",string], stdout=subprocess.PIPE).communicate()[0]
def service_restart(self, context):
self.manage_clamav(context)
return ConfigServiceComponent.service_restart(self, context)
def manage_clamav(self, context):
if self.old_clamav_config == self.CONFIG['virus_scan']:
return
if self.old_clamav_config:
self.core.callServiceSync(context, "Clamav", "decrementUsageCount")
else:
self.core.callServiceSync(context, "Clamav", "incrementUsageCount")
self.old_clamav_config = self.CONFIG['virus_scan']
def service_start(self, context):
if self.CONFIG['virus_scan']:
self.core.callServiceSync(context, "Clamav", "decrementUsageCount")
self.old_clamav_config = self.CONFIG['virus_scan']
return ConfigServiceComponent.service_start(self, context)
def service_stop(self, context):
if self.old_clamav_config:
self.core.callServiceSync(context, "Clamav", "decrementUsageCount")
return ConfigServiceComponent.service_stop(self, context)
#@inlineCallbacks
#def service_status(self, context):
# ret = yield self.core.callService(context, 'contact', 'status')
# ret = (self.NAME, ret[1])
# returnValue(ret)
| [
"[email protected]"
]
| |
49c9b831d7494a17b8b9e2e2a8847fe9fb7f86e6 | f928edfc876d715159521589a22485d9de45cc89 | /import_hourly_csv_to_mariadb_09.py | 666d2da36d65dde7c7db69c75b28ea5fa5820375 | []
| no_license | guitar79/AirKorea_Python | cd06432740e0b292ca6ad3cde7144717967f5190 | 8077eaa0b6c444d575a25c7f7b992477a36c8294 | refs/heads/master | 2020-08-05T01:05:47.209200 | 2019-10-28T03:36:14 | 2019-10-28T03:36:14 | 212,342,300 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,349 | py | '''
-*- coding: utf-8 -*-
Auther [email protected]
'''
#import numpy as np
import os
import pymysql
from datetime import datetime
#import warning
#import time
start_time=str(datetime.now())
#mariaDB info
db_host = '10.114.0.121'
db_user = 'modis'
db_pass = 'rudrlrhkgkrrh'
db_name = 'AirKorea'
tb_name = 'hourly_vc'
#base directory
drbase = '/media/guitar79/8T/RS_data/Remote_Sensing/2017RNE/airkorea/csv1/'
#db connect
conn= pymysql.connect(host=db_host, user=db_user, password=db_pass, db=db_name,\
charset='utf8mb4', local_infile=1, cursorclass=pymysql.cursors.DictCursor)
cur = conn.cursor()
cur.execute("SET SQL_MODE = \"NO_AUTO_VALUE_ON_ZERO\";\
SET time_zone = \"+00:00\";")
cur.execute("DROP TABLE IF EXISTS `%s`;" %(tb_name))
cur.execute("DROP TABLE IF EXISTS `Obs_info`;")
cur.execute("CREATE TABLE IF NOT EXISTS `Obs_info` (\
`Ocode` int(6) NOT NULL,\
`Oname` varchar(12) NOT NULL,\
`Region` varchar(20) NOT NULL,\
`Address` varchar(500) DEFAULT NULL,\
`Lat` float DEFAULT NULL,\
`Lon` float DEFAULT NULL,\
`Alt` float DEFAULT NULL,\
`Remarks` char(255) DEFAULT NULL,\
PRIMARY KEY (`Ocode`))\
ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;")
cur.execute("CREATE TABLE IF NOT EXISTS `%s` (\
`Region` varchar(20) DEFAULT NULL,\
`Ocode` int(6) NOT NULL,\
`Oname` varchar(12) DEFAULT NULL,\
`Otime` int(12) NOT NULL,\
`SO2` float DEFAULT NULL,\
`CO` float DEFAULT NULL,\
`O3` float DEFAULT NULL,\
`NO2` float DEFAULT NULL,\
`PM10` int(4) DEFAULT NULL,\
`PM25` int(4) DEFAULT NULL,\
`Address` varchar(200) DEFAULT NULL,\
`id` int(11) NOT NULL AUTO_INCREMENT PRIMARY KEY,\
CONSTRAINT FK_Ocode FOREIGN KEY (`Ocode`) REFERENCES Obs_info(`Ocode`)\
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"\
%(tb_name))
'''
cur.execute("CREATE TABLE IF NOT EXISTS `%s` (\
`Ocode` int(6) NOT NULL,\
`Otime` int(12) NOT NULL,\
`SO2` float DEFAULT NULL,\
`CO` float DEFAULT NULL,\
`O3` float DEFAULT NULL,\
`NO2` float DEFAULT NULL,\
`PM10` int(4) DEFAULT NULL,\
`PM25` int(4) DEFAULT NULL,\
`id` int(11) NOT NULL AUTO_INCREMENT,\
PRIMARY KEY (`id`),\
CONSTRAINT FK_Ocode FOREIGN KEY (`Ocode`) REFERENCES Obs_info(`Ocode`)\
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4;"\
%(tb_name))
'''
cur.execute("ALTER TABLE `%s`\
MODIFY `id` int(11) NOT NULL AUTO_INCREMENT;" %(tb_name))
#delete all data in the table
print("TRUNCATE TABLE %s;" %(tb_name))
cur.execute("TRUNCATE TABLE %s;" %(tb_name))
conn.commit()
#log file
insert_log = open(drbase+'hourly_import_result.log', 'a')
error_log = open(drbase+'hourly_import_error.log', 'a')
for i in sorted(os.listdir(drbase),reverse=True):
#read csv files
if i[-4:] == '.csv':
print(i)
try :
print("LOAD DATA LOCAL \
INFILE '%s%s' \
INTO TABLE %s.%s \
FIELDS TERMINATED BY '\|' \
ENCLOSED BY '\"' \
LINES TERMINATED BY '\\n'\
IGNORE 1 LINES \
(`Region`, `Ocode`, `Oname`, `Otime`, \
`SO2`, `CO`, `O3`, `NO2`, `PM10`, `PM25`, `Address`);"\
%(drbase,i,db_name,tb_name))
cur.execute("LOAD DATA LOCAL \
INFILE '%s%s' \
INTO TABLE %s.%s \
FIELDS TERMINATED BY '\|' \
ENCLOSED BY '\"' \
LINES TERMINATED BY '\\n'\
IGNORE 1 LINES \
(`Region`, `Ocode`, `Oname`, `Otime`, \
`SO2`, `CO`, `O3`, `NO2`, `PM10`, `PM25`, `Address`);"\
%(drbase,i,db_name,tb_name))
conn.commit()
insert_log.write(drbase+i+" is inserted to the %s - %s\n"\
%(tb_name, datetime.now()))
except :
print(drbase+i+" is error : %s - %s\n"\
%(tb_name, datetime.now()))
error_log.write(drbase+i+" is error : %s - %s\n"\
%(tb_name, datetime.now()))
insert_log.close()
error_log.close()
print("CHECK TABLE %s.%s;" %(db_name, tb_name))
cur.execute("CHECK TABLE %s.%s;" %(db_name, tb_name))
conn.commit()
print("ALTER TABLE %s.%s ENGINE = InnoDB;" %(db_name, tb_name))
cur.execute("ALTER TABLE %s.%s ENGINE = InnoDB;" %(db_name, tb_name))
conn.commit()
print("OPTIMIZE TABLE %s.%s;" %(db_name, tb_name))
cur.execute("OPTIMIZE TABLE %s.%s;" %(db_name, tb_name))
conn.commit()
'''
print("FLUSH TABLE %s.%s;" %(db_name, tb_name))
cur.execute("FLUSH TABLE %s.%s;" %(db_name, tb_name))
conn.commit()
'''
cur.close()
end_time = str(datetime.now())
print("start : "+ start_time+" end: "+end_time)
'''
http://localhost/phpMyAdmin/sql.php?db=AirKorea&table=houly_vc&back=tbl_operations.php&goto=tbl_operations.php&sql_query=ALTER+TABLE+%60houly_vc%60+ENGINE+%3D+InnoDB%3B&token=746c2350251eec3ab8bef717286d7272
'''
| [
"[email protected]"
]
| |
6d5d2be5a463e58fc1862feabe2bcc443fce727b | f07391f481150ad07cd5652a7b09cf1cd60d345f | /cmsplugin_container/cms_plugins.py | ee489a8a5b52902cedc985968117762177b4c1a3 | []
| no_license | django-cms-plugins/django-cmsplugin-container | 39dc956d1b7aa29132c0c841aa1d187da779e568 | c35d7111a6bd2c73de3d5df6a673497214df8e76 | refs/heads/master | 2021-01-21T15:07:12.658207 | 2013-07-23T14:56:19 | 2013-07-23T14:56:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,735 | py | #-*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from cms.plugin_pool import plugin_pool
from cms.plugin_base import CMSPluginBase
from cms.models import CMSPlugin
from cmsplugin_container.models import Container, Grid
from cmsplugin_container.forms import ContainerForm
class ContainerPlugin(CMSPluginBase):
model = Container
module = _("C")
name = _("Multi Columns")
render_template = "cms/plugins/container.html"
allow_children = True
child_classes = ["ColumnPlugin"]
form = ContainerForm
def render(self, context, instance, placeholder):
context.update({
'instance': instance,
'placeholder':placeholder,
})
return context
def save_model(self, request, obj, form, change):
response = super(MultiColumnPlugin, self).save_model(request, obj, form, change)
for x in xrange(int(form.cleaned_data['create'])):
col = Column(parent=obj, placeholder=obj.placeholder, language=obj.language, width=form.cleaned_data['create_width'], position=CMSPlugin.objects.filter(parent=obj).count(), plugin_type=ColumnPlugin.__name__)
col.save()
return response
class ColumnPlugin(CMSPluginBase):
model = Column
module = _("Multi Columns")
name = _("Column")
render_template = "cms/plugins/column.html"
#frontend_edit_template = 'cms/plugins/column_edit.html'
allow_children = True
def render(self, context, instance, placeholder):
context.update({
'instance': instance,
'placeholder':placeholder,
})
return context
plugin_pool.register_plugin(MultiColumnPlugin)
plugin_pool.register_plugin(ColumnPlugin)
| [
"[email protected]"
]
| |
ae1e8444b7e83511169be63c369f1ce2d53da1bd | f9462f3768fa058bd895a56b151da694664ce588 | /examples/713_no-op.py | 1a3dcf7281703f179d38d40bd7d138b5afd82c90 | [
"MIT"
]
| permissive | ryanpennings/workshop_swinburne_2021 | 16a9a7e2c7134832f8f714b7b430376f1b67dfb2 | 820ef4e36e73ac950f40e1846739087180af2e1c | refs/heads/main | 2023-05-31T16:35:16.535310 | 2021-06-17T06:24:51 | 2021-06-17T06:24:51 | 377,373,107 | 0 | 0 | MIT | 2021-06-17T06:24:51 | 2021-06-16T04:45:02 | null | UTF-8 | Python | false | false | 418 | py | import compas_rrc as rrc
if __name__ == '__main__':
# Create Ros Client
ros = rrc.RosClient()
ros.run()
# Create ABB Client
abb = rrc.AbbClient(ros, '/rob1')
print('Connected.')
# No operation
done = abb.send_and_wait(rrc.Noop())
# Print feedback
print('Feedback = ', done)
# End of Code
print('Finished')
# Close client
ros.close()
ros.terminate()
| [
"[email protected]"
]
| |
0c3cedf2685c67f2eb9d33bc6c35662dcaa91c7a | fb408595c1edee0be293302c6d7bfc0c77d37c46 | /CODEFORCE/AprilFools2019/d.py | 432b7019cabe3980ec625ddae3e2873b4e70eb90 | []
| no_license | as950118/Algorithm | 39ad25519fd0e42b90ddf3797a61239862ad79b5 | 739a7d4b569057cdb6b6faa74254512b83d02bb1 | refs/heads/master | 2023-07-21T12:38:00.653579 | 2023-07-19T06:57:17 | 2023-07-19T06:57:17 | 125,176,176 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25 | py | input()
input()
print(4)
| [
"[email protected]"
]
| |
817e5a809cd9b42f85bc7b98f88cae0525ecf649 | 4bd555bc662b8182a2e7644976bfdb00ed5e1ebe | /PythonistaAppTemplate/PythonistaKit.framework/pylib/zipfile.py | 9a21f7db76f5b8bd0c14c61af5223034a5ba37a7 | []
| no_license | fhelmli/homeNOWG2 | a103df1ef97194dec9501dbda87ec1f7c111fb4a | e794fd87b296544542fd9dc7ac94c981c6312419 | refs/heads/master | 2020-04-04T13:40:20.417769 | 2019-01-30T21:41:04 | 2019-01-30T21:41:04 | 155,970,686 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65,550 | py | #\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
#\input texinfo
"""
Read and write ZIP files.
"""
import struct, os, time, sys, shutil
import binascii, cStringIO, stat
import io
import re
import string
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = 1 << 16
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
# Other ZIP compression methods not supported
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = "<4s4H2LH"
stringEndArchive = "PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = "PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = "PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = "PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = "PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
if len(data) != sizeEndCentDir64Locator:
return endrec
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
if len(data) != sizeEndCentDir64:
return endrec
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if (len(data) == sizeEndCentDir and
data[0:4] == stringEndArchive and
data[-2:] == b"\000\000"):
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append("")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
if len(recData) != sizeEndCentDir:
# Zip file is corrupted.
return None
endrec = list(struct.unpack(structEndArchive, recData))
commentSize = endrec[_ECD_COMMENT_SIZE] #as claimed by the zip file
comment = data[start+sizeEndCentDir:start+sizeEndCentDir+commentSize]
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return None
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
if date_time[0] < 1980:
raise ValueError('ZIP does not support timestamps before 1980')
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self, zip64=None):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if zip64 is None:
zip64 = file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT
if zip64:
fmt = '<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
if not zip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
file_size = 0xffffffff
compress_size = 0xffffffff
self.extract_version = max(45, self.extract_version)
self.create_version = max(45, self.extract_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
if isinstance(self.filename, unicode):
try:
return self.filename.encode('ascii'), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
else:
return self.filename, self.flag_bits
def _decodeFilename(self):
if self.flag_bits & 0x800:
return self.filename.decode('utf-8')
else:
return self.filename
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while extra:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffffL:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class _ZipDecrypter:
"""Class to handle decryption of files stored within a ZIP archive.
ZIP supports a password-based form of encryption. Even though known
plaintext attacks have been found against it, it is still useful
to be able to get data out of such a file.
Usage:
zd = _ZipDecrypter(mypwd)
plain_char = zd(cypher_char)
plain_text = map(zd, cypher_text)
"""
def _GenerateCRCTable():
"""Generate a CRC-32 table.
ZIP encryption uses the CRC32 one-byte primitive for scrambling some
internal keys. We noticed that a direct implementation is faster than
relying on binascii.crc32().
"""
poly = 0xedb88320
table = [0] * 256
for i in range(256):
crc = i
for j in range(8):
if crc & 1:
crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly
else:
crc = ((crc >> 1) & 0x7FFFFFFF)
table[i] = crc
return table
crctable = _GenerateCRCTable()
def _crc32(self, ch, crc):
"""Compute the CRC32 primitive on one byte."""
return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff]
def __init__(self, pwd):
self.key0 = 305419896
self.key1 = 591751049
self.key2 = 878082192
for p in pwd:
self._UpdateKeys(p)
def _UpdateKeys(self, c):
self.key0 = self._crc32(c, self.key0)
self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295
self.key1 = (self.key1 * 134775813 + 1) & 4294967295
self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2)
def __call__(self, c):
"""Decrypt a single character."""
c = ord(c)
k = self.key2 | 2
c = c ^ (((k * (k^1)) >> 8) & 255)
c = chr(c)
self._UpdateKeys(c)
return c
compressor_names = {
0: 'store',
1: 'shrink',
2: 'reduce',
3: 'reduce',
4: 'reduce',
5: 'reduce',
6: 'implode',
7: 'tokenize',
8: 'deflate',
9: 'deflate64',
10: 'implode',
12: 'bzip2',
14: 'lzma',
18: 'terse',
19: 'lz77',
97: 'wavpack',
98: 'ppmd',
}
class ZipExtFile(io.BufferedIOBase):
"""File-like object for reading an archive member.
Is returned by ZipFile.open().
"""
# Max size supported by decompressor.
MAX_N = 1 << 31 - 1
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
# Search for universal newlines or line chunks.
PATTERN = re.compile(r'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
def __init__(self, fileobj, mode, zipinfo, decrypter=None,
close_fileobj=False):
self._fileobj = fileobj
self._decrypter = decrypter
self._close_fileobj = close_fileobj
self._compress_type = zipinfo.compress_type
self._compress_size = zipinfo.compress_size
self._compress_left = zipinfo.compress_size
if self._compress_type == ZIP_DEFLATED:
self._decompressor = zlib.decompressobj(-15)
elif self._compress_type != ZIP_STORED:
descr = compressor_names.get(self._compress_type)
if descr:
raise NotImplementedError("compression type %d (%s)" % (self._compress_type, descr))
else:
raise NotImplementedError("compression type %d" % (self._compress_type,))
self._unconsumed = ''
self._readbuffer = ''
self._offset = 0
self._universal = 'U' in mode
self.newlines = None
# Adjust read size for encrypted files since the first 12 bytes
# are for the encryption/password information.
if self._decrypter is not None:
self._compress_left -= 12
self.mode = mode
self.name = zipinfo.filename
if hasattr(zipinfo, 'CRC'):
self._expected_crc = zipinfo.CRC
self._running_crc = crc32(b'') & 0xffffffff
else:
self._expected_crc = None
def readline(self, limit=-1):
"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
"""
if not self._universal and limit < 0:
# Shortcut common case - newline found in buffer.
i = self._readbuffer.find('\n', self._offset) + 1
if i > 0:
line = self._readbuffer[self._offset: i]
self._offset = i
return line
if not self._universal:
return io.BufferedIOBase.readline(self, limit)
line = ''
while limit < 0 or len(line) < limit:
readahead = self.peek(2)
if readahead == '':
return line
#
# Search for universal newlines or line chunks.
#
# The pattern returns either a line chunk or a newline, but not
# both. Combined with peek(2), we are assured that the sequence
# '\r\n' is always retrieved completely and never split into
# separate newlines - '\r', '\n' due to coincidental readaheads.
#
match = self.PATTERN.search(readahead)
newline = match.group('newline')
if newline is not None:
if self.newlines is None:
self.newlines = []
if newline not in self.newlines:
self.newlines.append(newline)
self._offset += len(newline)
return line + '\n'
chunk = match.group('chunk')
if limit >= 0:
chunk = chunk[: limit - len(line)]
self._offset += len(chunk)
line += chunk
return line
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
return True
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = ''
if n is None:
n = -1
while True:
if n < 0:
data = self.read1(n)
elif n > len(buf):
data = self.read1(n - len(buf))
else:
return buf
if len(data) == 0:
return buf
buf += data
def _update_crc(self, newdata, eof):
# Update the CRC using the given data.
if self._expected_crc is None:
# No need to compute the CRC if we don't have a reference value
return
self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff
# Check the CRC if we're at the end of the file
if eof and self._running_crc != self._expected_crc:
raise BadZipfile("Bad CRC-32 for file %r" % self.name)
def read1(self, n):
"""Read up to n bytes with at most one read() system call."""
# Simplify algorithm (branching) by transforming negative n to large n.
if n < 0 or n is None:
n = self.MAX_N
# Bytes available in read buffer.
len_readbuffer = len(self._readbuffer) - self._offset
# Read from file.
if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed):
nbytes = n - len_readbuffer - len(self._unconsumed)
nbytes = max(nbytes, self.MIN_READ_SIZE)
nbytes = min(nbytes, self._compress_left)
data = self._fileobj.read(nbytes)
self._compress_left -= len(data)
if data and self._decrypter is not None:
data = ''.join(map(self._decrypter, data))
if self._compress_type == ZIP_STORED:
self._update_crc(data, eof=(self._compress_left==0))
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
else:
# Prepare deflated bytes for decompression.
self._unconsumed += data
# Handle unconsumed data.
if (len(self._unconsumed) > 0 and n > len_readbuffer and
self._compress_type == ZIP_DEFLATED):
data = self._decompressor.decompress(
self._unconsumed,
max(n - len_readbuffer, self.MIN_READ_SIZE)
)
self._unconsumed = self._decompressor.unconsumed_tail
eof = len(self._unconsumed) == 0 and self._compress_left == 0
if eof:
data += self._decompressor.flush()
self._update_crc(data, eof=eof)
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
# Read from buffer.
data = self._readbuffer[self._offset: self._offset + n]
self._offset += len(data)
return data
def close(self):
try :
if self._close_fileobj:
self._fileobj.close()
finally:
super(ZipExtFile, self).close()
class ZipFile(object):
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression) or ZIP_DEFLATED (requires zlib).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
self.pwd = None
self._comment = ''
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
try:
self.fp = open(file, modeDict[mode])
except IOError:
if mode == 'a':
mode = key = 'w'
self.fp = open(file, modeDict[mode])
else:
raise
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
try:
if key == 'r':
self._RealGetContents()
elif key == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
elif key == 'a':
try:
# See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile:
# file is not a zip file, just append
self.fp.seek(0, 2)
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
else:
raise RuntimeError('Mode must be "r", "w" or "a"')
except:
fp = self.fp
self.fp = None
if not self._filePassed:
fp.close()
raise
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
try:
endrec = _EndRecData(fp)
except IOError:
raise BadZipfile("File is not a zip file")
if not endrec:
raise BadZipfile, "File is not a zip file"
if self.debug > 1:
print endrec
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self._comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print "given, inferred, offset", offset_cd, inferred, concat
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if len(centdir) != sizeCentralDir:
raise BadZipfile("Truncated central directory")
centdir = struct.unpack(structCentralDir, centdir)
if centdir[_CD_SIGNATURE] != stringCentralDir:
raise BadZipfile("Bad magic number for central directory")
if self.debug > 2:
print centdir
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print "total", total
def namelist(self):
"""Return a list of file names in the archive."""
l = []
for data in self.filelist:
l.append(data.filename)
return l
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self):
"""Print a table of contents for the zip file."""
print "%-46s %19s %12s" % ("File Name", "Modified ", "Size")
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
def testzip(self):
"""Read all the files and check the CRC."""
chunk_size = 2 ** 20
for zinfo in self.filelist:
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
with self.open(zinfo.filename, "r") as f:
while f.read(chunk_size): # Check CRC-32
pass
except BadZipfile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
info = self.NameToInfo.get(name)
if info is None:
raise KeyError(
'There is no item named %r in the archive' % name)
return info
def setpassword(self, pwd):
"""Set default password for encrypted files."""
self.pwd = pwd
@property
def comment(self):
"""The comment text associated with the ZIP file."""
return self._comment
@comment.setter
def comment(self, comment):
# check for valid comment length
if len(comment) >= ZIP_MAX_COMMENT:
if self.debug:
print('Archive comment is too long; truncating to %d bytes'
% ZIP_MAX_COMMENT)
comment = comment[:ZIP_MAX_COMMENT]
self._comment = comment
self._didModify = True
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
return self.open(name, "r", pwd).read()
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError, 'open() requires mode "r", "U", or "rU"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
# Only open a new file for instances where we were not
# given a file object in the constructor
if self._filePassed:
zef_file = self.fp
should_close = False
else:
zef_file = open(self.filename, 'rb')
should_close = True
try:
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
zef_file.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if len(fheader) != sizeFileHeader:
raise BadZipfile("Truncated file header")
fheader = struct.unpack(structFileHeader, fheader)
if fheader[_FH_SIGNATURE] != stringFileHeader:
raise BadZipfile("Bad magic number for file header")
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError, "File %s is encrypted, " \
"password required for extraction" % name
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd,
close_fileobj=should_close)
except:
if should_close:
zef_file.close()
raise
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a ZipInfo object. You can
specify a different directory using `path'.
"""
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path, pwd)
def extractall(self, path=None, members=None, pwd=None):
"""Extract all members from the archive to the current working
directory. `path' specifies a different directory to extract to.
`members' is optional and must be a subset of the list returned
by namelist().
"""
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path, pwd)
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
file on the path targetpath.
"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
arcname = member.filename.replace('/', os.path.sep)
if os.path.altsep:
arcname = arcname.replace(os.path.altsep, os.path.sep)
# interpret absolute pathname as relative, remove drive letter or
# UNC path, redundant separators, "." and ".." components.
arcname = os.path.splitdrive(arcname)[1]
arcname = os.path.sep.join(x for x in arcname.split(os.path.sep)
if x not in ('', os.path.curdir, os.path.pardir))
if os.path.sep == '\\':
# filter illegal characters on Windows
illegal = ':<>|"?*'
if isinstance(arcname, unicode):
table = {ord(c): ord('_') for c in illegal}
else:
table = string.maketrans(illegal, '_' * len(illegal))
arcname = arcname.translate(table)
# remove trailing dots
arcname = (x.rstrip('.') for x in arcname.split(os.path.sep))
arcname = os.path.sep.join(x for x in arcname if x)
targetpath = os.path.join(targetpath, arcname)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
with self.open(member, pwd=pwd) as source, \
file(targetpath, "wb") as target:
shutil.copyfileobj(source, target)
return targetpath
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print "Duplicate name:", zinfo.filename
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to write ZIP archive that was already closed"
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED):
raise RuntimeError, \
"That compression method is not supported"
if zinfo.file_size > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
if zinfo.header_offset > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Zipfile size would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
self.fp.write(zinfo.FileHeader(False))
return
with open(filename, "rb") as fp:
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
# Compressed size can be larger than uncompressed size
zip64 = self._allowZip64 and \
zinfo.file_size * 1.05 > ZIP64_LIMIT
self.fp.write(zinfo.FileHeader(zip64))
if zinfo.compress_type == ZIP_DEFLATED:
cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
else:
cmpr = None
file_size = 0
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
if not zip64 and self._allowZip64:
if file_size > ZIP64_LIMIT:
raise RuntimeError('File size has increased during compressing')
if compress_size > ZIP64_LIMIT:
raise RuntimeError('Compressed size larger than uncompressed size')
# Seek backwards and write file header (which will now include
# correct CRC and file sizes)
position = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset, 0)
self.fp.write(zinfo.FileHeader(zip64))
self.fp.seek(position, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
"""Write a file into the archive. The contents is the string
'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
zinfo.external_attr = 0600 << 16
else:
zinfo = zinfo_or_arcname
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if compress_type is not None:
zinfo.compress_type = compress_type
zinfo.file_size = len(bytes) # Uncompressed size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = crc32(bytes) & 0xffffffff # CRC-32 checksum
if zinfo.compress_type == ZIP_DEFLATED:
co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zip64 = zinfo.file_size > ZIP64_LIMIT or \
zinfo.compress_size > ZIP64_LIMIT
if zip64 and not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
self.fp.write(zinfo.FileHeader(zip64))
self.fp.write(bytes)
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
fmt = '<LQQ' if zip64 else '<LLL'
self.fp.write(struct.pack(fmt, zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.flush()
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
try:
if self.mode in ("w", "a") and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffffL
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<HH' + 'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
extract_version = max(45, zinfo.extract_version)
create_version = max(45, zinfo.create_version)
else:
extract_version = zinfo.extract_version
create_version = zinfo.create_version
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print >>sys.stderr, (structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
raise
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = count
centDirSize = pos2 - pos1
centDirOffset = pos1
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self._comment))
self.fp.write(endrec)
self.fp.write(self._comment)
self.fp.flush()
finally:
fp = self.fp
self.fp = None
if not self._filePassed:
fp.close()
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def writepy(self, pathname, basename = ""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print "Adding package in", pathname, "as", basename
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print "Adding files from directory", pathname
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError, \
'Files added with writepy() must end with ".py"'
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print "Adding file", arcname
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print "Compiling", file_py
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError,err:
print err.msg
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print USAGE
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
zf.printdir()
elif args[0] == '-t':
if len(args) != 2:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
badfile = zf.testzip()
if badfile:
print("The following enclosed file is corrupted: {!r}".format(badfile))
print "Done testing"
elif args[0] == '-e':
if len(args) != 3:
print USAGE
sys.exit(1)
with ZipFile(args[1], 'r') as zf:
out = args[2]
for path in zf.namelist():
if path.startswith('./'):
tgt = os.path.join(out, path[2:])
else:
tgt = os.path.join(out, path)
tgtdir = os.path.dirname(tgt)
if not os.path.exists(tgtdir):
os.makedirs(tgtdir)
with open(tgt, 'wb') as fp:
fp.write(zf.read(path))
elif args[0] == '-c':
if len(args) < 3:
print USAGE
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
with ZipFile(args[1], 'w', allowZip64=True) as zf:
for src in args[2:]:
addToZip(zf, src, os.path.basename(src))
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
eb5c2010387158948bc1e2996332dbd8a800d330 | 17bdb906c2c0886d6451b55ac84633d416d5c50a | /chapter_one/test_list.py | 28308ca52a00ce387a5426c39769e05cde52ba57 | [
"MIT"
]
| permissive | vyahello/unittest-bootcamp | 10053994dc834720b76df90a37d4756a6f1437c7 | af24c5c00032ab7265a0c00da5955a26d25dff33 | refs/heads/master | 2021-07-17T03:42:30.058662 | 2020-05-09T22:21:17 | 2020-05-09T22:21:17 | 139,250,120 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,026 | py | from typing import List
from unittest import TestCase
class TestListMethods(TestCase):
""" This test case is verifying basic list data type methods. """
def test_append(self) -> None:
""" Test checks if given elements are adding to array """
flist: List[...] = []
for i in range(1, 4):
flist.append(i)
self.assertEqual(flist, [1, 2, 3])
def test_extend(self) -> None:
""" Test checks if given elements extends an array """
flist: List[int] = [1, 2, 3]
flist.extend(range(4, 6))
self.assertEqual(flist[-2:], [4, 5])
def test_insert(self) -> None:
""" Test checks if given element is inserted into array """
flist: List[int] = [1, 2, 3]
flist.insert(3, 4)
self.assertEqual(flist, [1, 2, 3, 4])
def test_pop(self) -> None:
""" Test checks if given element is deleted from an array """
flist: List[int] = [1, 2, 3]
flist.pop(1)
self.assertEqual(flist, [1, 3])
| [
"[email protected]"
]
| |
86047464007e688dadd3b3c27012b467b686a566 | bb150497a05203a718fb3630941231be9e3b6a32 | /framework/api/optimizer/reader.py | ebbceb27d15008ded7a2c8cd080b7547fb67cd48 | []
| no_license | PaddlePaddle/PaddleTest | 4fb3dec677f0f13f7f1003fd30df748bf0b5940d | bd3790ce72a2a26611b5eda3901651b5a809348f | refs/heads/develop | 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 | Python | UTF-8 | Python | false | false | 222 | py | #!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
reader
"""
import numpy as np
reader = np.random.random(size=[1, 1, 10])
reader_img = np.random.random(size=[1, 3, 16, 16])
| [
"[email protected]"
]
| |
5679e7a79c5e5694fc959140e9c696630b307830 | 2a6dbece45c391e6dc3d28f04c7c02b18d17b24b | /myapi/views.py | e7798b0b48cf6abbd44a3179bd4bbeac4e5ba3e6 | []
| no_license | KassadReda/Rest_API_Blender | 3b1e43b2a488541d8d8a9aa9f95a39c4e70c34ae | ee9e5216462902a5cfee98a5502b4580b3af12e6 | refs/heads/main | 2023-04-17T06:35:49.204734 | 2021-05-03T22:45:52 | 2021-05-03T22:45:52 | 364,080,036 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 659 | py | """
class BlenderViewSet
this class define how to display a model.
by Reda
"""
# coding=utf-8
from django.shortcuts import render
from rest_framework import viewsets
from .serializers import BlenderModelSerializer
from .models import BlenderModel
# Create your views here.
class BlenderViewSet(viewsets.ModelViewSet) :
queryset = BlenderModel.objects.all().order_by('name')
serializer_class = BlenderModelSerializer
#serialise the uploaded file
def file(self, request,pk=None) :
blenderModel= self.get_object()
file = blenderModel.file
serializer = BlenderModelSerializer(file, data=request.data) | [
"="
]
| = |
d7e35795109593422c728043090178b3c899e3ec | de24f83a5e3768a2638ebcf13cbe717e75740168 | /moodledata/vpl_data/65/usersdata/159/31031/submittedfiles/investimento.py | 5b61ed3d81f3cbef16ed56e9d8cb401d0f95499a | []
| no_license | rafaelperazzo/programacao-web | 95643423a35c44613b0f64bed05bd34780fe2436 | 170dd5440afb9ee68a973f3de13a99aa4c735d79 | refs/heads/master | 2021-01-12T14:06:25.773146 | 2017-12-22T16:05:45 | 2017-12-22T16:05:45 | 69,566,344 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 385 | py | # -*- coding: utf-8 -*-
from __future__ import division
a=float(input('Valor do investimento'))
x=float(input(''))
b=a+(x*a)
print('%.2f' %b)
c=b+(x*b)
print('%.2f' %c)
d=c+(x*c)
print('%.2f' %d)
e=d+(x*d)
print('%.2f' %d)
f=e+(x*e)
print('%.2f' %f)
g=f+(x*f)
print('%.2f' %g)
h=g+(x*g)
print('%.2f' %h)
i=h+(x*h)
print('%.2f' %i)
j=i+(x*i)
print('%.2f' %j)
l=j+(x*j)
print('%.2f' %l) | [
"[email protected]"
]
| |
4062480923890be48ce91948af01567a73be9bed | 6573a45c4389688666821621c26a5a95a0765e4d | /archived_envs/20190625_100746/bin/google-oauthlib-tool | ed107688f59c3ccd9c6f360932ed99b926c0aff3 | []
| no_license | ilhmndn/Warehouse-Management-System-With-Frappe | 66a41be2286dbdb556ab51a4788fc42987d6ed2e | bd9864c5a04a6e2f2f625a8755fba3df4b6409be | refs/heads/master | 2022-10-23T11:13:57.810948 | 2019-07-02T05:18:19 | 2019-07-02T05:18:19 | 194,467,571 | 2 | 2 | null | 2022-10-15T16:16:10 | 2019-06-30T02:40:05 | Python | UTF-8 | Python | false | false | 264 | #!/home/ilhmndn/frappe-training/env/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from google_auth_oauthlib.tool.__main__ import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
| [
"[email protected]"
]
| ||
c82d3a37fc944204f5db277b2c98979ab8efef44 | 76d4430567b68151df1855f45ea4408f9bebe025 | /test/functional/test_framework/coverage.py | 7f4c1c66546c66896b4314f57e91dcf935f48336 | [
"MIT"
]
| permissive | MicroBitcoinOrg/MicroBitcoin | f761b2ff04bdcb650d7c0ddbef431ef95cd69541 | db7911968445606bf8899903322d5d818d393d88 | refs/heads/master | 2022-12-27T10:04:21.040945 | 2022-12-18T05:05:17 | 2022-12-18T05:05:17 | 132,959,214 | 21 | 33 | MIT | 2020-06-12T04:38:45 | 2018-05-10T22:07:51 | C++ | UTF-8 | Python | false | false | 3,386 | py | #!/usr/bin/env python3
# Copyright (c) 2015-2018 The MicroBitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Utilities for doing coverage analysis on the RPC interface.
Provides a way to track which RPC commands are exercised during
testing.
"""
import os
REFERENCE_FILENAME = 'rpc_interface.txt'
class AuthServiceProxyWrapper():
"""
An object that wraps AuthServiceProxy to record specific RPC calls.
"""
def __init__(self, auth_service_proxy_instance, coverage_logfile=None):
"""
Kwargs:
auth_service_proxy_instance (AuthServiceProxy): the instance
being wrapped.
coverage_logfile (str): if specified, write each service_name
out to a file when called.
"""
self.auth_service_proxy_instance = auth_service_proxy_instance
self.coverage_logfile = coverage_logfile
def __getattr__(self, name):
return_val = getattr(self.auth_service_proxy_instance, name)
if not isinstance(return_val, type(self.auth_service_proxy_instance)):
# If proxy getattr returned an unwrapped value, do the same here.
return return_val
return AuthServiceProxyWrapper(return_val, self.coverage_logfile)
def __call__(self, *args, **kwargs):
"""
Delegates to AuthServiceProxy, then writes the particular RPC method
called to a file.
"""
return_val = self.auth_service_proxy_instance.__call__(*args, **kwargs)
self._log_call()
return return_val
def _log_call(self):
rpc_method = self.auth_service_proxy_instance._service_name
if self.coverage_logfile:
with open(self.coverage_logfile, 'a+', encoding='utf8') as f:
f.write("%s\n" % rpc_method)
def __truediv__(self, relative_uri):
return AuthServiceProxyWrapper(self.auth_service_proxy_instance / relative_uri,
self.coverage_logfile)
def get_request(self, *args, **kwargs):
self._log_call()
return self.auth_service_proxy_instance.get_request(*args, **kwargs)
def get_filename(dirname, n_node):
"""
Get a filename unique to the test process ID and node.
This file will contain a list of RPC commands covered.
"""
pid = str(os.getpid())
return os.path.join(
dirname, "coverage.pid%s.node%s.txt" % (pid, str(n_node)))
def write_all_rpc_commands(dirname, node):
"""
Write out a list of all RPC functions available in `micro-cli` for
coverage comparison. This will only happen once per coverage
directory.
Args:
dirname (str): temporary test dir
node (AuthServiceProxy): client
Returns:
bool. if the RPC interface file was written.
"""
filename = os.path.join(dirname, REFERENCE_FILENAME)
if os.path.isfile(filename):
return False
help_output = node.help().split('\n')
commands = set()
for line in help_output:
line = line.strip()
# Ignore blanks and headers
if line and not line.startswith('='):
commands.add("%s\n" % line.split()[0])
with open(filename, 'w', encoding='utf8') as f:
f.writelines(list(commands))
return True
| [
"[email protected]"
]
| |
19c37356466ad944b8cb042d417054ce008b1f64 | 17bdf40c2bbdf3dd09bf0fa683d471f4e07159fd | /asymmetric_jwt_auth/apps.py | be5200a58c04ac73f83aa2863dfef64592b567c1 | [
"ISC"
]
| permissive | chiranthsiddappa/asymmetric_jwt_auth | c8c9f0a11b36994b72c87f2d834189df94ef6fee | a95d28ba61e38395da483243a6f536bf25a41e74 | refs/heads/master | 2020-12-25T17:56:18.972703 | 2016-05-24T05:16:06 | 2016-05-24T05:16:06 | 59,540,392 | 0 | 0 | null | 2016-05-24T04:30:18 | 2016-05-24T04:30:18 | null | UTF-8 | Python | false | false | 151 | py | from django.apps import AppConfig
class JWTAuthConfig(AppConfig):
name = 'asymmetric_jwt_auth'
verbose_name = "Asymmetric Key Authentication"
| [
"[email protected]"
]
| |
71986ad624234b73af60e4a768b4a74d431e3313 | 3fadc3b9353d1745dd3af9fc7fe7b2c4064a51e0 | /manage.py | be7718735b01234d68671099ff2cf955c2fe208b | []
| no_license | Ashish2831/Class-Based-CRUD-Project | 16a36ad9c4d615d0e6e517696c12312134953319 | c6445cfe298197e4050cf5f31ac332ff383b1237 | refs/heads/main | 2023-04-21T03:33:19.282851 | 2021-05-08T11:10:00 | 2021-05-08T11:10:00 | 365,490,100 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 680 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
"""Run administrative tasks."""
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'Class_Based_CRUD_Project.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"[email protected]"
]
| |
3018d83ac2e45b567d543161d4efa6c95141ef00 | f45cc0049cd6c3a2b25de0e9bbc80c25c113a356 | /LeetCode/石子游戏/5611. 石子游戏 VI.py | ea53be09b91192af8790730394fd8bcd26bf5197 | []
| no_license | yiming1012/MyLeetCode | 4a387d024969bfd1cdccd4f581051a6e4104891a | e43ee86c5a8cdb808da09b4b6138e10275abadb5 | refs/heads/master | 2023-06-17T06:43:13.854862 | 2021-07-15T08:54:07 | 2021-07-15T08:54:07 | 261,663,876 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,385 | py | """
5611. 石子游戏 VI
Alice 和 Bob 轮流玩一个游戏,Alice 先手。
一堆石子里总共有 n 个石子,轮到某个玩家时,他可以 移出 一个石子并得到这个石子的价值。Alice 和 Bob 对石子价值有 不一样的的评判标准 。
给你两个长度为 n 的整数数组 aliceValues 和 bobValues 。aliceValues[i] 和 bobValues[i] 分别表示 Alice 和 Bob 认为第 i 个石子的价值。
所有石子都被取完后,得分较高的人为胜者。如果两个玩家得分相同,那么为平局。两位玩家都会采用 最优策略 进行游戏。
请你推断游戏的结果,用如下的方式表示:
如果 Alice 赢,返回 1 。
如果 Bob 赢,返回 -1 。
如果游戏平局,返回 0 。
示例 1:
输入:aliceValues = [1,3], bobValues = [2,1]
输出:1
解释:
如果 Alice 拿石子 1 (下标从 0开始),那么 Alice 可以得到 3 分。
Bob 只能选择石子 0 ,得到 2 分。
Alice 获胜。
示例 2:
输入:aliceValues = [1,2], bobValues = [3,1]
输出:0
解释:
Alice 拿石子 0 , Bob 拿石子 1 ,他们得分都为 1 分。
打平。
示例 3:
输入:aliceValues = [2,4,3], bobValues = [1,6,7]
输出:-1
解释:
不管 Alice 怎么操作,Bob 都可以得到比 Alice 更高的得分。
比方说,Alice 拿石子 1 ,Bob 拿石子 2 , Alice 拿石子 0 ,Alice 会得到 6 分而 Bob 得分为 7 分。
Bob 会获胜。
提示:
n == aliceValues.length == bobValues.length
1 <= n <= 105
1 <= aliceValues[i], bobValues[i] <= 100
来源:力扣(LeetCode)
链接:https://leetcode-cn.com/problems/stone-game-vi
著作权归领扣网络所有。商业转载请联系官方授权,非商业转载请注明出处。
"""
from typing import List
"""
贪心做法:
证明:
假设只有两个石头,对于 a, b 的价值分别是 a1, a2, b1, b2
第一种方案是A取第一个,B取第二个,A与B的价值差是 c1 = a1 - b2
第二种方案是A取第二个,B取第一个,A与B的价值差是 c2 = a2 - b1
那么这两种方案对于A来说哪一种更优,就取决于两个方案的价值差的比较
记 c = c1 - c2 = (a1 - b2) - (a2 - b1) = (a1 + b1) - (a2 + b2)
如果c > 0 那么方案一更优,如果c == 0,那么两种方案价值一样,如果c < 0那么方案二更优
那么比较两个方案的优劣 == 比较 a1 + b1 与 a2 + b2 的优劣 ,
归纳一下就是比较每个下标 i 的 a[i] + b[i] 的优劣
所以贪心的策略:将两组石头的价值合并,每次取价值最大的那一组。
写法:先将两个数组的价值合并,并用下标去标记
对价值排序,A取偶数下标,B取奇数下标,最后比较A,B的价值总和
"""
class Solution:
def stoneGameVI(self, a: List[int], b: List[int]) -> int:
arr = list(zip(a, b))
arr.sort(key=lambda x: x[0] + x[1], reverse=True)
n = len(a)
res_a, res_b = 0, 0
for i in range(n):
if i & 1 == 0:
res_a += arr[i][0]
else:
res_b += arr[i][1]
if res_a > res_b:
return 1
elif res_a < res_b:
return -1
else:
return 0
if __name__ == '__main__':
aliceValues = [1, 3]
bobValues = [2, 1]
print(Solution().stoneGameVI(aliceValues, bobValues))
| [
"[email protected]"
]
| |
58c456b9e168ba17eb5cc5d6e3bc8715df702e0d | f4dd8aa4e5476ffde24e27273dd47913c7f9177a | /Dlv2_safe2/tests/parser/edbidb.2.test.py | 7ca82d647d28036317512550cf746da486a374b1 | [
"Apache-2.0"
]
| permissive | dave90/Dlv_safe2 | e56071ec1b07c45defda571cb721852e2391abfb | f127f413e3f35d599554e64aaa918bc1629985bc | refs/heads/master | 2020-05-30T10:44:13.473537 | 2015-07-12T12:35:22 | 2015-07-12T12:35:22 | 38,256,201 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 197 | py | input = """
g(3,4).
g(4,1).
h(X,Y):- h(Y,X).
g(X,Y):- h(X,Z), g(Z,Y).
h(3,4).
g(5,2).
"""
output = """
g(3,4).
g(4,1).
h(X,Y):- h(Y,X).
g(X,Y):- h(X,Z), g(Z,Y).
h(3,4).
g(5,2).
"""
| [
"davide@davide-All-Series"
]
| davide@davide-All-Series |
39488c26270cabe7fb0720f02e7f86e06baa8868 | db5264994305e8c926f89cb456f33bd3a4d64f76 | /Sklep zielarski/orders/migrations/0001_initial.py | c21bd1168a45aeac59c66f7e35c2afffd875dd47 | []
| no_license | marcinpelszyk/Django | 7842e20d5e8b213c4cd42c421c1db9ab7d5f01d5 | aff2b9bd20e978a22a4a98994bf8424892d3c82f | refs/heads/main | 2023-05-01T19:20:37.267010 | 2021-05-18T17:51:53 | 2021-05-18T17:51:53 | 356,532,628 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,201 | py | # Generated by Django 3.1.7 on 2021-05-08 19:15
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('store', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Order',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('full_name', models.CharField(max_length=50)),
('address1', models.CharField(max_length=250)),
('address2', models.CharField(max_length=250)),
('city', models.CharField(max_length=100)),
('phone', models.CharField(max_length=100)),
('post_code', models.CharField(max_length=20)),
('created', models.DateTimeField(auto_now_add=True)),
('updated', models.DateTimeField(auto_now=True)),
('total_paid', models.DecimalField(decimal_places=2, max_digits=5)),
('order_key', models.CharField(max_length=200)),
('billing_status', models.BooleanField(default=False)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_user', to=settings.AUTH_USER_MODEL)),
],
options={
'ordering': ('-created',),
},
),
migrations.CreateModel(
name='OrderItem',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('price', models.DecimalField(decimal_places=2, max_digits=5)),
('quantity', models.PositiveIntegerField(default=1)),
('order', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='items', to='orders.order')),
('product', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='order_items', to='store.product')),
],
),
]
| [
"[email protected]"
]
| |
7a2d804bfeae7d288dc2c166ea105a91da40ca97 | 3cd19164c17d9793ea506369454b8bacd5cebfa9 | /Backend/clubmg_bak_20190726/clubserver/urls.py | 48019cb3399c4c52f203ca02e80992ee2532ec11 | []
| no_license | Myxg/BadmintonClubSystem | 337a17728122ab929d37e7f2732850beb49d8be0 | 1c51236098ab3770cadd925212f9d3978ed83c2a | refs/heads/master | 2022-12-27T10:13:55.129630 | 2020-09-16T10:21:36 | 2020-09-16T10:21:36 | 295,998,099 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,500 | py | #coding: utf-8
from django.conf.urls import include, url
from rest_framework_jwt.views import obtain_jwt_token
from . import views
urlpatterns = [
url(r'^token-auth$', obtain_jwt_token),
url(r'^useradd$', views.UserAdd.as_view()),
url(r'^user$', views.UserView.as_view()),
url(r'^user/password$', views.UpdatePassword.as_view()),
url(r'^user/email$', views.UpdateEmail.as_view()),
url(r'^user/photo$', views.UpdatePhoto.as_view()),
url(r'^user/(?P<user_id>[0-9]+)$', views.EditUserView.as_view()),
url(r'^users$', views.UsersView.as_view()),
url(r'^group/(?P<pk_id>[0-9]+)$', views.GroupView.as_view()),
url(r'^groups$', views.GroupsView.as_view()),
url(r'^operations$', views.OperationsView.as_view()),
url(r'^permissions$', views.PermissionsView.as_view()),
url(r'^athlete/(?P<pk_id>[0-9]+)$', views.AthleteView.as_view()),
url(r'^athletes$', views.AthletesView.as_view()),
url(r'^athlete/company/(?P<pk_id>[0-9]+)$', views.AthleteCompanyView.as_view()),
url(r'^athlete/companys$', views.AthleteCompanysView.as_view()),
url(r'^athlete/sportevent/(?P<pk_id>[0-9]+)$', views.SportEventExpView.as_view()),
url(r'^athlete/group/(?P<pk_id>[0-9]+)$', views.AthleteGroupView.as_view()),
url(r'^athlete/groups$', views.AthleteGroupsView.as_view()),
url(r'^athlete/fitness/items$', views.FitnessItemsView.as_view()),
url(r'^athlete/fitness/datas$', views.FitnessDatasView.as_view()),
url(r'^athlete/fitness/data/(?P<pk_id>[0-9]+)$', views.FitnessDataView.as_view()),
url(r'^athlete/worldrankinglist$', views.WorldRankingListView.as_view()),
url(r'^athlete/worldranking/(?P<pk_id>[0-9]+)$', views.WorldRankingView.as_view()),
url(r'^athlete/olympicrankinglist$', views.OlympicRankingListView.as_view()),
url(r'^athlete/olympicranking/(?P<pk_id>[0-9]+)$', views.OlympicRankingView.as_view()),
url(r'^athlete/overview/(?P<pk_id>[0-9]+)$', views.AthleteOverViewView.as_view()),
url(r'^athlete/linkdocs/(?P<pk_id>[0-9]+)$', views.AthleteDocLinkView.as_view()),
url(r'^athlete/matchs/(?P<pk_id>[0-9]+)$', views.AthleteMatchVideosSearchView.as_view()),
url(r'^video/(?P<pk_id>[0-9]+)$', views.MatchVideoView.as_view()),
url(r'^videos$', views.MatchVideosSearchView.as_view()),
url(r'^matchinfo/(?P<pk_id>[0-9]+)$', views.MatchInfoView.as_view()),
url(r'^matchinfos$', views.MatchInfosView.as_view()),
url(r'^matchlist$', views.MatchListView.as_view()),
url(r'^matchlevel2list$', views.MatchLevel2NameView.as_view()),
url(r'^markdata/matchinfos$', views.MarkMatchInfosView.as_view()),
url(r'^markdata/show/(?P<name>(hits|scores|serverecord|playgroundrecord))/(?P<match_id>[0-9]{4}_[0-9]{4}_[0-9]{4})$', views.MarkDataShow.as_view()),
url(r'^markdata/sync/(?P<name>(hits|scores|serverecord|playgroundrecord))/(?P<match_id>[0-9]{4}_[0-9]{4}_[0-9]{4})$', views.MarkDataSync.as_view()),
url(r'^markdata/sync/(?P<name>(hits|scores))/(?P<match_id>[0-9]{4}_[0-9]{4}_[0-9]{4})/retry$', views.MarkDataSyncRetry.as_view()),
url(r'^docs/(?P<module_id>[a-zA-Z0-9_]+)(/)?$', views.DocsView.as_view()),
url(r'^docs/link/(?P<module_id>[a-zA-Z0-9_]+)(/)?$', views.DocLinkView.as_view()),
url(r'^history/(?P<module_id>[a-zA-Z0-9_]+)(/)?$', views.DocsView.as_view()),
url(r'^companylist$', views.CompanysListView.as_view()),
# test url
url(r'^sn/(?P<type_id>[a-z]+)$', views.NewSN.as_view()),
url(r'^test$', views.Test.as_view()),
]
| [
"[email protected]"
]
| |
882dd051b7a1fff21dee017e84de337271d6bcb6 | 39329ae5773c9b4c1f9c91eec393507f5e8ae1c0 | /server/.history/server_20200307213734.py | 40e4aa62a922652973d3ff4b8b8636ddb900856f | []
| no_license | dobreandrei1/legal | 083267aae7faa10775e5a634679869fce0ac3136 | dd05fad8df599f9fc34f56628ebd8861f7a004b4 | refs/heads/master | 2021-09-08T20:16:29.926214 | 2020-03-08T09:24:04 | 2020-03-08T09:24:04 | 245,785,262 | 0 | 0 | null | 2021-09-03T00:42:33 | 2020-03-08T09:22:37 | Python | UTF-8 | Python | false | false | 1,712 | py | from pathlib import Path
from flask import Flask, render_template, request, send_file, send_from_directory, safe_join, abort, current_app
# from werkzeug import secure_filename
import pandas as pd
import os
import time
import json
from flask_cors import CORS
from haikunator import Haikunator
import unidecode
import PyPDF2
import unidecode
haikunator = Haikunator()
app = Flask(__name__)
CORS(app)
applicationVersion = 0
@app.route('/upload')
def upload_file():
return render_template('upload.html')
@app.route('/api/titles', methods = ['GET', 'POST'])
def get_titles():
if request.method == 'POST':
f = request.files['file']
filename = request.form['filename']
# TODO: maybe check if file alreay exists and not save multipletime
# - get list of all files
# - if filename variable is a substr of any file name in folder: compare their contents
# - if match don`t save file again but use that one
name = filename + '.pdf'
if Path(name).exists():
name = filename + '.pdf'
f.save(name)
pdfFileObject = open('clauze.pdf', 'rb')
pdfReader = PyPDF2.PdfFileReader(pdfFileObject)
pages = pdfReader.numPages
clauzeDoc = ''
for page in pages:
clauzeDoc += pdfReader.getPage(page).extractText()
pdfFileObject1 = open(name, 'rb')
pdfReader1 = PyPDF2.PdfFileReader(pdfFileObject1)
pages1 = pdfReader1.numPages
contractDoc = ''
for page in pages1:
contractDoc += pdfReader1.getPage(page).extractText()
return 1
if __name__ == '__main__':
app.run(debug = False, host='0.0.0.0')
| [
"[email protected]"
]
| |
2ff3d6352d5241a08ded28a3f45e2e30c903eee7 | 1841c29ffb26901bc7830b2ce4ea712197f1b740 | /models/GraphNN/DTNN.py | e2ad09c182e6617d8bbdf55b57b5fb2b13b136e6 | [
"MIT"
]
| permissive | chubbymaggie/Deep_Learning_On_Code_With_A_Graph_Vocabulary | 756bdd54b17d351d31200cc0ceacf8f639e0c678 | 29ee2fdffc5bc05582a91025697e256980e75ef2 | refs/heads/master | 2020-03-28T12:33:33.820671 | 2018-09-10T22:54:14 | 2018-09-10T22:54:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,446 | py | # Copyright 2018 Amazon.com, Inc. or its affiliates. All Rights Reserved.
from collections import OrderedDict
from mxnet import gluon
from models.FITB.FITBModel import FITBModel
from models.GraphNN.MPNN import MPNN
class DTNN(MPNN):
'''
Deep Tensor Neural Network from https://www.nature.com/articles/ncomms13890
'''
def __init__(self, **kwargs):
super().__init__(**kwargs)
self.hidden_size = kwargs['hidden_size']
# Initializing model components
with self.name_scope():
self.hidden_message_dense = gluon.nn.Dense(self.hidden_size, in_units=self.hidden_size)
self.hidden_and_edge_dense = gluon.nn.Dense(self.hidden_size, in_units=self.hidden_size)
self.edge_type_weightings = OrderedDict()
for t in self.data_encoder.all_edge_types:
edge_type_weighting = self.params.get('edge_type_weighting_{}'.format(t), grad_req='write',
shape=(1, self.hidden_size))
self.__setattr__('edge_type_weighting_{}'.format(t), edge_type_weighting)
self.edge_type_weightings[t] = edge_type_weighting
if FITBModel in self.__class__.mro():
self.readout_mlp = gluon.nn.HybridSequential()
with self.readout_mlp.name_scope():
self.readout_mlp.add(gluon.nn.Dense(self.hidden_size, activation='tanh', in_units=self.hidden_size))
self.readout_mlp.add(gluon.nn.Dense(1, in_units=self.hidden_size))
def compute_messages(self, F, hidden_states, edges, t):
hidden_states = self.hidden_message_dense(hidden_states)
summed_msgs = []
for key in self.edge_type_weightings.keys():
adj_mat, edge_type_weighting = edges[key], self.edge_type_weightings[key]
# Compute the messages passed for this edge type
passed_msgs = F.tanh(
self.hidden_and_edge_dense(hidden_states * edge_type_weighting.data())) # n_vertices X hidden_size
# Sum messages from all neighbors
summed_msgs.append(F.dot(adj_mat, passed_msgs))
summed_msgs = F.sum(F.stack(*summed_msgs), axis=0)
return summed_msgs
def update_hidden_states(self, F, hidden_states, messages, t):
return hidden_states + messages
def readout(self, F, hidden_states):
return self.readout_mlp(hidden_states)
| [
"[email protected]"
]
| |
54fde6cf27909a8351ad62b64d290dbcb4045d4a | 648e5ea6722db2f29806e24f11cf169257dfc1c7 | /doorsadmin/migrations/0076_auto__add_field_doorway_priority__add_field_snippetsset_priority__add_.py | 20830d5176fda335cd25b5743aa423cf2137cfb5 | []
| no_license | cash2one/doorscenter | 30d4f65e3fb57c417df3f09d7feab721d8425faa | d2771bf04aa187dda6d468883a5a167237589369 | refs/heads/master | 2021-05-27T15:38:56.219907 | 2012-06-20T05:38:15 | 2012-06-20T05:38:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 33,100 | py | # encoding: utf-8
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Doorway.priority'
db.add_column('doorsadmin_doorway', 'priority', self.gf('django.db.models.fields.CharField')(default='std', max_length=20), keep_default=False)
# Adding field 'SnippetsSet.priority'
db.add_column('doorsadmin_snippetsset', 'priority', self.gf('django.db.models.fields.CharField')(default='std', max_length=20), keep_default=False)
# Adding field 'SpamTask.priority'
db.add_column('doorsadmin_spamtask', 'priority', self.gf('django.db.models.fields.CharField')(default='std', max_length=20), keep_default=False)
# Adding field 'XrumerBaseR.priority'
db.add_column('doorsadmin_xrumerbaser', 'priority', self.gf('django.db.models.fields.CharField')(default='std', max_length=20), keep_default=False)
def backwards(self, orm):
# Deleting field 'Doorway.priority'
db.delete_column('doorsadmin_doorway', 'priority')
# Deleting field 'SnippetsSet.priority'
db.delete_column('doorsadmin_snippetsset', 'priority')
# Deleting field 'SpamTask.priority'
db.delete_column('doorsadmin_spamtask', 'priority')
# Deleting field 'XrumerBaseR.priority'
db.delete_column('doorsadmin_xrumerbaser', 'priority')
models = {
'doorsadmin.agent': {
'Meta': {'object_name': 'Agent'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'currentTask': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateLastPing': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.IntegerField', [], {'default': '3', 'null': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'doorsadmin.domain': {
'Meta': {'object_name': 'Domain'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateExpires': ('django.db.models.fields.DateField', [], {'default': 'datetime.date(2012, 6, 7)', 'null': 'True', 'blank': 'True'}),
'dateRegistered': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Host']", 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'ipAddress': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.IPAddress']", 'null': 'True', 'blank': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'linkedDomains': ('django.db.models.fields.related.ManyToManyField', [], {'blank': 'True', 'related_name': "'linkedDomains_rel_+'", 'null': 'True', 'to': "orm['doorsadmin.Domain']"}),
'maxDoorsCount': ('django.db.models.fields.IntegerField', [], {'default': '25'}),
'maxLinkedDomains': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '200'}),
'nameServer1': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'nameServer2': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'net': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Net']", 'null': 'True', 'blank': 'True'}),
'netLevel': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'niche': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Niche']", 'null': 'True', 'blank': 'True'}),
'registrator': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'})
},
'doorsadmin.doorgenprofile': {
'Meta': {'object_name': 'DoorgenProfile'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'settings': ('django.db.models.fields.TextField', [], {'default': "''"}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'})
},
'doorsadmin.doorway': {
'Meta': {'object_name': 'Doorway'},
'agent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Agent']", 'null': 'True', 'blank': 'True'}),
'analyticsId': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}),
'cyclikId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'domain': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Domain']", 'null': 'True', 'blank': 'True'}),
'domainFolder': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'doorgenProfile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.DoorgenProfile']", 'null': 'True'}),
'doorwaySchedule': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.DoorwaySchedule']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywordsList': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'keywordsSet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.KeywordsSet']", 'null': 'True', 'blank': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'netLinksList': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'niche': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Niche']", 'null': 'True'}),
'pagesCount': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'piwikId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'std'", 'max_length': '20'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'runTime': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'spamLinksCount': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'stateManaged': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Template']", 'null': 'True', 'blank': 'True'})
},
'doorsadmin.doorwayschedule': {
'Meta': {'object_name': 'DoorwaySchedule'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateEnd': ('django.db.models.fields.DateField', [], {'null': 'True', 'blank': 'True'}),
'dateStart': ('django.db.models.fields.DateField', [], {'default': 'datetime.date.today', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'doorgenProfile': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.DoorgenProfile']", 'null': 'True'}),
'doorsPerDay': ('django.db.models.fields.IntegerField', [], {'default': '1', 'null': 'True'}),
'doorsToday': ('django.db.models.fields.IntegerField', [], {'default': '0', 'null': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywordsSet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.KeywordsSet']", 'null': 'True', 'blank': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'lastRun': ('django.db.models.fields.DateTimeField', [], {'null': 'True'}),
'maxPagesCount': ('django.db.models.fields.IntegerField', [], {'default': '900', 'null': 'True'}),
'maxSpamLinksPercent': ('django.db.models.fields.FloatField', [], {'default': '5'}),
'minPagesCount': ('django.db.models.fields.IntegerField', [], {'default': '500', 'null': 'True'}),
'minSpamLinksPercent': ('django.db.models.fields.FloatField', [], {'default': '4'}),
'net': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Net']", 'null': 'True'}),
'niche': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Niche']", 'null': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'template': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Template']", 'null': 'True', 'blank': 'True'})
},
'doorsadmin.event': {
'Meta': {'object_name': 'Event'},
'date': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'object': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'text': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'info'", 'max_length': '50', 'blank': 'True'})
},
'doorsadmin.host': {
'Meta': {'object_name': 'Host'},
'company': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'controlPanelType': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '50', 'blank': 'True'}),
'controlPanelUrl': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'costPerMonth': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'diskSpace': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'ftpLogin': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'ftpPassword': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'ftpPort': ('django.db.models.fields.IntegerField', [], {'default': '21', 'blank': 'True'}),
'hostName': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'rootDocumentTemplate': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'traffic': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'shared'", 'max_length': '50', 'blank': 'True'})
},
'doorsadmin.ipaddress': {
'Meta': {'object_name': 'IPAddress'},
'address': ('django.db.models.fields.IPAddressField', [], {'unique': 'True', 'max_length': '15'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'host': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Host']", 'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'})
},
'doorsadmin.keywordsset': {
'Meta': {'object_name': 'KeywordsSet'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'encoding': ('django.db.models.fields.CharField', [], {'default': "'cp1251'", 'max_length': '50'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'keywordsCount': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'localFolder': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'niche': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Niche']", 'null': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'})
},
'doorsadmin.net': {
'Meta': {'object_name': 'Net'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'analyticsId': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}),
'cyclikId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'piwikId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'settings': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'})
},
'doorsadmin.niche': {
'Meta': {'object_name': 'Niche'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'analyticsId': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '50', 'blank': 'True'}),
'cyclikId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '50'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'piwikId': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'stopwordsList': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'tdsSchemes': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'})
},
'doorsadmin.snippetsset': {
'Meta': {'object_name': 'SnippetsSet'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'agent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Agent']", 'null': 'True', 'blank': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateLastParsed': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'interval': ('django.db.models.fields.IntegerField', [], {'default': '100', 'null': 'True'}),
'keywordsCount': ('django.db.models.fields.IntegerField', [], {'default': '500', 'null': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'localFile': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'niche': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Niche']", 'null': 'True'}),
'phrasesCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'std'", 'max_length': '20'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'runTime': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'stateManaged': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'})
},
'doorsadmin.spamlink': {
'Meta': {'object_name': 'SpamLink'},
'anchor': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000'}),
'doorway': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Doorway']"}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'spamTask': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.SpamTask']", 'null': 'True', 'blank': 'True'}),
'url': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '1000'})
},
'doorsadmin.spamtask': {
'Meta': {'object_name': 'SpamTask'},
'agent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Agent']", 'null': 'True', 'blank': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'failsCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'halfSuccessCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'std'", 'max_length': '20'}),
'profilesCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'runTime': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'snippetsSet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.SnippetsSet']", 'null': 'True', 'blank': 'True'}),
'stateManaged': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'successCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'xrumerBaseR': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.XrumerBaseR']", 'null': 'True'})
},
'doorsadmin.template': {
'Meta': {'object_name': 'Template'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'localFolder': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'niche': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Niche']", 'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'type': ('django.db.models.fields.CharField', [], {'default': "'none'", 'max_length': '50', 'blank': 'True'})
},
'doorsadmin.xrumerbaser': {
'Meta': {'object_name': 'XrumerBaseR'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'agent': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Agent']", 'null': 'True', 'blank': 'True'}),
'baseNumber': ('django.db.models.fields.IntegerField', [], {'default': '42', 'unique': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'emailAddress': ('django.db.models.fields.CharField', [], {'default': "'[email protected]'", 'max_length': '200'}),
'emailLogin': ('django.db.models.fields.CharField', [], {'default': "'[email protected]'", 'max_length': '200'}),
'emailPassword': ('django.db.models.fields.CharField', [], {'default': "'kernel32'", 'max_length': '200'}),
'emailPopServer': ('django.db.models.fields.CharField', [], {'default': "'pop.gmail.com'", 'max_length': '200'}),
'failsCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'halfSuccessCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'linksCount': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'nextSpamTaskDomainsCount': ('django.db.models.fields.IntegerField', [], {'default': '4'}),
'niche': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.Niche']", 'null': 'True'}),
'nickName': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'password': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'priority': ('django.db.models.fields.CharField', [], {'default': "'std'", 'max_length': '20'}),
'profilesCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'realName': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'runTime': ('django.db.models.fields.IntegerField', [], {'null': 'True'}),
'snippetsSet': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.SnippetsSet']", 'null': 'True', 'blank': 'True'}),
'spamTaskDomainLinksMax': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'spamTaskDomainLinksMin': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'spamTaskDomainsMax': ('django.db.models.fields.IntegerField', [], {'default': '5'}),
'spamTaskDomainsMin': ('django.db.models.fields.IntegerField', [], {'default': '3'}),
'stateManaged': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'}),
'successCount': ('django.db.models.fields.IntegerField', [], {'null': 'True', 'blank': 'True'}),
'xrumerBaseRaw': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['doorsadmin.XrumerBaseRaw']", 'null': 'True'})
},
'doorsadmin.xrumerbaseraw': {
'Meta': {'object_name': 'XrumerBaseRaw'},
'active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'baseNumber': ('django.db.models.fields.IntegerField', [], {'default': '42', 'unique': 'True'}),
'dateAdded': ('django.db.models.fields.DateTimeField', [], {'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'dateChanged': ('django.db.models.fields.DateTimeField', [], {'auto_now': 'True', 'auto_now_add': 'True', 'null': 'True', 'blank': 'True'}),
'description': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'language': ('django.db.models.fields.CharField', [], {'max_length': '50', 'blank': 'True'}),
'lastError': ('django.db.models.fields.CharField', [], {'default': "''", 'max_length': '200', 'blank': 'True'}),
'linksCount': ('django.db.models.fields.FloatField', [], {'null': 'True', 'blank': 'True'}),
'remarks': ('django.db.models.fields.TextField', [], {'default': "''", 'blank': 'True'}),
'stateSimple': ('django.db.models.fields.CharField', [], {'default': "'new'", 'max_length': '50'})
}
}
complete_apps = ['doorsadmin']
| [
"[email protected]"
]
| |
f3e029ef5acbe8e796a4ba75d99292456d5d7dd7 | 8832f83436809e8e918e60e5526d95add9fe8dbd | /books_app/migrations/0069_auto_20191002_1610.py | 825b2b23a78d57c127bd9697fe680eaecabd9d58 | []
| no_license | HCDigitalScholarship/booksofduchesses | e31e56eaba253b92a1362de5918b5b005cb27f3c | 3f0e27515963c92a56714c5bada3b6a68a8665df | refs/heads/master | 2022-12-09T18:41:20.019687 | 2021-10-25T14:58:18 | 2021-10-25T14:58:18 | 190,254,161 | 0 | 3 | null | 2022-12-08T05:21:54 | 2019-06-04T18:05:08 | Python | UTF-8 | Python | false | false | 849 | py | # Generated by Django 2.2.2 on 2019-10-02 16:10
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("books_app", "0068_auto_20190930_1758")]
operations = [
migrations.AddField(
model_name="text",
name="estc_link",
field=models.CharField(
blank=True, max_length=800, verbose_name="ESTC Link"
),
),
migrations.AlterField(
model_name="text",
name="ihrt_link",
field=models.CharField(blank=True, max_length=800),
),
migrations.AlterField(
model_name="text",
name="me_compendium_link",
field=models.CharField(
blank=True, max_length=200, verbose_name="ME Compendium Link"
),
),
]
| [
"[email protected]"
]
| |
da05f206093955bc97ef19a62bc0a70e69711fc6 | 5e9dacbb7a9613b7c8d8c92398bb66926a314c38 | /script.py | ecff88305875f987118660b170ce2849290d9f87 | []
| no_license | pol9111/tencent_WechatOffAcc_auto | 645b95bfd893706df4651f1e8f67ea1dc57a03de | 3aa2a9a8a78796d5b829f9bf49cc849713ed41b7 | refs/heads/master | 2020-03-24T16:24:08.783424 | 2018-07-30T04:07:14 | 2018-07-30T04:07:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,250 | py | import json
import pymongo
import re
from config import *
def response(flow):
global like_num, title, pub_time, read_num, comment_num
client = pymongo.MongoClient(MONGO_URL)
db = client[MONGO_DB]
table = db[MONGO_COLLECTION]
# 获取微信广告json文件, 里面有阅读数和点赞数
url_msg = 'mp.weixin.qq.com/mp/getappmsgext?'
if url_msg in flow.request.url:
text_msg = flow.response.text
data_py = json.loads(text_msg)
content = data_py.get('appmsgstat')
like_num = content.get('like_num')
read_num = content.get('read_num')
comment_num = data_py.get('comment_count')
# 获取文章响应文件, 并匹配标题和发布时间
url_article = 'mp.weixin.qq.com/s?'
if url_article in flow.request.url:
text_arti = flow.response.text
pub_time = re.findall(r'publish_time.*"(\d+-\d+-\d+)".*', text_arti)[0]
title = re.findall(r'msg_title\s=\s"(.*?)";', text_arti)[0]
data = {
'文章标题': title,
'发布时间': pub_time,
'阅读数': read_num,
'点赞数': like_num,
'评论数': comment_num,
}
print(data)
table.update({'文章标题': title}, {'$set': data}, True)
| [
"[email protected]"
]
| |
a2d6c12a2bd7956f2c562f8cfe0e2ac7678d9769 | 3003a8663135aa10f5a152a8642bc6ab270995b9 | /ggCloudSDK/google-cloud-sdk/lib/googlecloudsdk/sql/lib/instances.py | 9580cc32edf3f272d2994243b0b16c424ce6e6fb | [
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
]
| permissive | bopopescu/packmybot | 1b4d199b36d196e5e769a781b520019bb4d0bdbc | 92de1e72cfc51b41447366ffc81a9dcd9a5e7870 | refs/heads/master | 2022-11-25T23:46:06.946645 | 2015-10-22T08:22:04 | 2015-10-22T08:22:04 | 282,313,675 | 0 | 0 | null | 2020-07-24T20:50:10 | 2020-07-24T20:50:10 | null | UTF-8 | Python | false | false | 7,040 | py | # Copyright 2015 Google Inc. All Rights Reserved.
"""Common utility functions for sql instances."""
from googlecloudsdk.calliope import exceptions
class _BaseInstances(object):
"""Common utility functions for sql instances."""
@classmethod
def _SetBackupConfiguration(cls, sql_messages, settings, args, original):
"""Sets the backup configuration for the instance."""
# these args are only present for the patch command
no_backup = not getattr(args, 'backup', True)
if original and (
any([args.backup_start_time, args.enable_bin_log is not None,
no_backup])):
if original.settings.backupConfiguration:
backup_config = original.settings.backupConfiguration[0]
else:
backup_config = sql_messages.BackupConfiguration(
startTime='00:00',
enabled=False),
elif not any([args.backup_start_time, args.enable_bin_log is not None,
no_backup]):
return
if not original:
backup_config = sql_messages.BackupConfiguration(
startTime='00:00',
enabled=False)
if args.backup_start_time:
backup_config.startTime = args.backup_start_time
backup_config.enabled = True
if no_backup:
if args.backup_start_time or args.enable_bin_log is not None:
raise exceptions.ToolException(
('Argument --no-backup not allowed with'
' --backup-start-time or --enable-bin-log'))
backup_config.enabled = False
if args.enable_bin_log is not None:
backup_config.binaryLogEnabled = args.enable_bin_log
cls.AddBackupConfigToSettings(settings, backup_config)
@staticmethod
def _SetDatabaseFlags(sql_messages, settings, args):
if args.database_flags:
settings.databaseFlags = []
for (name, value) in args.database_flags.items():
settings.databaseFlags.append(sql_messages.DatabaseFlags(
name=name,
value=value))
elif getattr(args, 'clear_database_flags', False):
settings.databaseFlags = []
@staticmethod
def _ConstructSettingsFromArgs(sql_messages, args):
"""Constructs instance settings from the command line arguments.
Args:
sql_messages: module, The messages module that should be used.
args: argparse.Namespace, The arguments that this command was invoked
with.
Returns:
A settings object representing the instance settings.
Raises:
ToolException: An error other than http error occured while executing the
command.
"""
settings = sql_messages.Settings(
tier=args.tier,
pricingPlan=args.pricing_plan,
replicationType=args.replication,
activationPolicy=args.activation_policy)
# these args are only present for the patch command
clear_authorized_networks = getattr(args, 'clear_authorized_networks',
False)
clear_gae_apps = getattr(args, 'clear_gae_apps', False)
if args.authorized_gae_apps:
settings.authorizedGaeApplications = args.authorized_gae_apps
elif clear_gae_apps:
settings.authorizedGaeApplications = []
if any([args.assign_ip is not None, args.require_ssl is not None,
args.authorized_networks, clear_authorized_networks]):
settings.ipConfiguration = sql_messages.IpConfiguration()
if args.assign_ip is not None:
settings.ipConfiguration.enabled = args.assign_ip
if args.authorized_networks:
settings.ipConfiguration.authorizedNetworks = args.authorized_networks
if clear_authorized_networks:
# For patch requests, this field needs to be labeled explicitly cleared.
settings.ipConfiguration.authorizedNetworks = []
if args.require_ssl is not None:
settings.ipConfiguration.requireSsl = args.require_ssl
if any([args.follow_gae_app, args.gce_zone]):
settings.locationPreference = sql_messages.LocationPreference(
followGaeApplication=args.follow_gae_app,
zone=args.gce_zone)
if getattr(args, 'enable_database_replication', None) is not None:
settings.databaseReplicationEnabled = args.enable_database_replication
return settings
@classmethod
def ConstructInstanceFromArgs(cls, sql_messages, args,
original=None, instance_ref=None):
"""Construct a Cloud SQL instance from command line args.
Args:
sql_messages: module, The messages module that should be used.
args: argparse.Namespace, The CLI arg namespace.
original: sql_messages.DatabaseInstance, The original instance, if some of
it might be used to fill fields in the new one.
instance_ref: reference to DatabaseInstance object, used to fill project
and instance information.
Returns:
sql_messages.DatabaseInstance, The constructed (and possibly partial)
database instance.
Raises:
ToolException: An error other than http error occured while executing the
command.
"""
settings = cls._ConstructSettingsFromArgs(sql_messages, args)
cls._SetBackupConfiguration(sql_messages, settings, args, original)
cls._SetDatabaseFlags(sql_messages, settings, args)
# these flags are only present for the create command
region = getattr(args, 'region', None)
database_version = getattr(args, 'database_version', None)
instance_resource = sql_messages.DatabaseInstance(
region=region,
databaseVersion=database_version,
masterInstanceName=getattr(args, 'master_instance_name', None),
settings=settings)
if hasattr(args, 'master_instance_name'):
if args.master_instance_name:
replication = 'ASYNCHRONOUS'
activation_policy = 'ALWAYS'
else:
replication = 'SYNCHRONOUS'
activation_policy = 'ON_DEMAND'
if not args.replication:
instance_resource.settings.replicationType = replication
if not args.activation_policy:
instance_resource.settings.activationPolicy = activation_policy
if instance_ref:
cls.SetProjectAndInstanceFromRef(instance_resource, instance_ref)
return instance_resource
class InstancesV1Beta3(_BaseInstances):
"""Common utility functions for sql instances V1Beta3."""
@staticmethod
def SetProjectAndInstanceFromRef(instance_resource, instance_ref):
instance_resource.project = instance_ref.project
instance_resource.instance = instance_ref.instance
@staticmethod
def AddBackupConfigToSettings(settings, backup_config):
settings.backupConfiguration = [backup_config]
class InstancesV1Beta4(_BaseInstances):
"""Common utility functions for sql instances V1Beta4."""
@staticmethod
def SetProjectAndInstanceFromRef(instance_resource, instance_ref):
instance_resource.project = instance_ref.project
instance_resource.name = instance_ref.instance
@staticmethod
def AddBackupConfigToSettings(settings, backup_config):
settings.backupConfiguration = backup_config
| [
"[email protected]"
]
| |
f9da954cdcb17cee51e9d873568d288fdf2c9cdb | f6f29c2fa719c53eee73de2acd86db9e1278182e | /code_wars/calculating_with_functions.py | a3f2c7e84a244f5b3dd4d6052494c5ab40d538cb | []
| no_license | byt3-m3/python_code_practice | ca08320e1778449d30204b65f15903d5830b7975 | 40e215c4d4ab62cf7d55d2456d94550335825906 | refs/heads/master | 2023-07-24T08:29:06.624850 | 2021-09-04T02:39:32 | 2021-09-04T02:39:32 | 256,984,457 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,290 | py | '''
This time we want to write calculations using functions and get the results. Let's have a look at some examples:
seven(times(five())) # must return 35
four(plus(nine())) # must return 13
eight(minus(three())) # must return 5
six(divided_by(two())) # must return 3
Requirements:
There must be a function for each number from 0 ("zero") to 9 ("nine")
There must be a function for each of the following mathematical operations: plus, minus, times, dividedBy (divided_by in Ruby and Python)
Each calculation consist of exactly one operation and two numbers
The most outer function represents the left operand, the most inner function represents the right operand
Divison should be integer division. For example, this should return 2, not 2.666666...:
'''
def _process(data, base):
num = data[0]
oper = data[1]
if oper == "*":
return base * num
if oper == "/":
return base // num
if oper == "+":
return base + num
if oper == "-":
return base - num
def zero(data=None):
if isinstance(data, tuple):
return _process(data, 0)
return 0
def one(data=None):
if isinstance(data, tuple):
return _process(data, 1)
return 1
def two(data=None):
if isinstance(data, tuple):
return _process(data, 2)
return 2
def three(data=None):
if isinstance(data, tuple):
return _process(data, 3)
return 3
def four(data=None):
if isinstance(data, tuple):
return _process(data, 4)
return 4
def five(data=None):
if isinstance(data, tuple):
return _process(data, 5)
return 5
def six(data=None):
if isinstance(data, tuple):
return _process(data, 6)
return 6
def seven(data=None):
if isinstance(data, tuple):
return _process(data, 7)
return 7
def eight(data=None):
if isinstance(data, tuple):
return _process(data, 8)
return 8
def nine(data=None):
if isinstance(data, tuple):
return _process(data, 9)
return 9
def plus(num):
return (num, "+")
def minus(num):
return (num, "-")
def times(num):
return (num, "*")
def divided_by(num):
return (num, "/")
result_1 = one(minus(five()))
result_2 = five(times(seven()))
print(result_1)
print(result_2)
| [
"[email protected]"
]
| |
e3baf698b803e39d4869c69af482d97836496848 | 91d96fc4084a55a74f761ed7bc7d0adba533618a | /projects/pset2.0_Forkable_Difficulty_Adjusting/blockchain-visualizer/visualize.py | 352b2f7230f8cd77c28efa64538cda9744295698 | [
"MIT"
]
| permissive | Averylamp/mas.s62 | 169bb76f1289a3d4569a952075bfb8e7842e1dca | 382dc036ae014785be4c464ed8c4aef533fd52ab | refs/heads/master | 2020-03-17T16:14:56.613227 | 2018-05-17T03:56:09 | 2018-05-17T03:56:09 | 133,741,785 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,143 | py | import pickle
from datetime import datetime
from datetime import timedelta
import matplotlib.pyplot as plt
import graphviz
def mine_rate_info(endpoint_block, origin_block, block_information, time_interval):
endpoint_dt = datetime.fromtimestamp(highest_block[0]['timestamp'])
origin_dt = datetime.fromtimestamp(block_information[origin_block]['timestamp'])
block_hash = endpoint_block
num_buckets = int((endpoint_dt - origin_dt).total_seconds() / time_interval) + 5
mined_buckets = [0]*num_buckets
times_list = [origin_dt + timedelta(seconds=x*time_interval) for x in range(0, num_buckets)]
assert len(times_list) == len(mined_buckets)
while block_hash != '':
block_info = block_information[block_hash]
timestamp = block_information[block_hash]['timestamp']
dt = datetime.fromtimestamp(timestamp)
bucket_ind = int((dt - origin_dt).total_seconds() / time_interval)
mined_buckets[bucket_ind] += 1
block_hash = block_info['blockInformation']['previousHash']
return times_list, mined_buckets
def aggregate_info(mined_buckets):
num_buckets = len(mined_buckets)
aggregate_buckets = [0]*num_buckets
for i in range(num_buckets):
if i == 0:
aggregate_buckets[0] = mined_buckets[0]
else:
aggregate_buckets[i] = aggregate_buckets[i-1] + mined_buckets[i]
return aggregate_buckets
def generate_graphviz(block_information):
g = graphviz.Digraph('G', filename='block_information.gv')
g.node("origin", "")
for block_hash in block_information:
g.node(block_hash, "")
prev_hash = block_information[block_hash]['blockInformation']['previousHash']
if prev_hash == '':
prev_hash = "origin"
g.edge(prev_hash, block_hash)
g.view()
block_information = pickle.load(open("../server-python/block_information.pickle", 'rb'))
highest_block = pickle.load(open("../server-python/highest_block.pickle", 'rb'))
print("Creating graphviz...")
# generate_graphviz(block_information)
print("Done.")
# exit()
# block height 0: 6c179f21e6f62b629055d8ab40f454ed02e48b68563913473b857d3638e23b28
origin_block = "6c179f21e6f62b629055d8ab40f454ed02e48b68563913473b857d3638e23b28"
forked_block = "00001d87846888b85e4b9b757b59a936b0ff33d8128518c78efaa092572efbfd" # Put the hash of another tip here to graph it as well
endpoint_block = highest_block[0]['blockHash']
print(endpoint_block)
time_interval = 0.5 # seconds
times_list, mined_buckets = mine_rate_info(endpoint_block, origin_block, block_information, time_interval)
forked_times_list, forked_mined_buckets = mine_rate_info(forked_block, origin_block, block_information, time_interval)
aggregate_buckets = aggregate_info(mined_buckets)
forked_aggregate_buckets = aggregate_info(forked_mined_buckets)
print("Plotting data...")
# line1, = plt.plot(times_list, mined_buckets, label="blocks mined / {}s".format(time_interval))
line2, = plt.plot(times_list, aggregate_buckets, label="total blocks mined")
# line3, = plt.plot(times_list, forked_mined_buckets, label="attacker blocks mined / {}s".format(time_interval))
line4, = plt.plot(times_list, forked_aggregate_buckets, label="attacker total blocks mined")
plt.legend(handles=[line2, line4])
plt.show()
print("Done")
| [
"[email protected]"
]
| |
447215391bd91ac4d5a721c47f8d0298d4eb5b3f | c001d8cff7e634bfa19d682ccdcf5261bc7bf397 | /cotizacionweb/migrations/0005_auto_20160420_1104.py | f9051f65ba22309b3fc40fa1bad989072d8ebdc8 | []
| no_license | yusnelvy/mtvmcotizacionv02 | 4053a6883519901e3652a141ef83c297c5aa0ccd | f0d94faff9c721f25018b7db12a07786508da565 | refs/heads/master | 2021-01-21T12:58:49.014716 | 2016-05-06T20:49:59 | 2016-05-06T20:49:59 | 50,135,715 | 0 | 0 | null | 2016-05-25T12:32:34 | 2016-01-21T20:48:27 | CSS | UTF-8 | Python | false | false | 2,464 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('contenedor', '0005_contenedortipicopormueble_tipo_de_contenido'),
('cotizacionweb', '0004_auto_20160414_1529'),
]
operations = [
migrations.RenameField(
model_name='serviciomueble',
old_name='porcentaje_complejidad',
new_name='cantidad',
),
migrations.RenameField(
model_name='serviciomueble',
old_name='descripcion_monto_servicio',
new_name='descripcion_cantidad',
),
migrations.RemoveField(
model_name='cotizacionhistoricofecha',
name='fecha_actual',
),
migrations.RemoveField(
model_name='cotizacionhistoricofecha',
name='hora_actual',
),
migrations.RemoveField(
model_name='serviciomueble',
name='complejidad_servicio',
),
migrations.RemoveField(
model_name='serviciomueble',
name='incluido',
),
migrations.RemoveField(
model_name='serviciomueble',
name='monto_servicio',
),
migrations.RemoveField(
model_name='serviciomueble',
name='monto_servicio_asignado',
),
migrations.AddField(
model_name='contenedormueble',
name='tipo_de_contenido',
field=models.ForeignKey(to='contenedor.TipoDeContenido', default=1),
preserve_default=False,
),
migrations.AddField(
model_name='cotizacionhistoricofecha',
name='fecha',
field=models.DateTimeField(default='2016-04-01 00:00:00'),
preserve_default=False,
),
migrations.AddField(
model_name='cotizacionservicio',
name='cantidad_servicio',
field=models.DecimalField(max_digits=7, decimal_places=2, default=1),
preserve_default=False,
),
migrations.AddField(
model_name='fechadecotizacion',
name='obligatoria',
field=models.BooleanField(default=None),
),
migrations.AlterField(
model_name='cotizacionestado',
name='fecha_registro',
field=models.DateTimeField(auto_now_add=True),
),
]
| [
"[email protected]"
]
| |
2bc647123df644c429a647698050cb197c682e88 | 5b5a49643c75aa43d5a876608383bc825ae1e147 | /tests/lists/p121_test.py | 22041a3cf5ee7085bd6f9c855959da66c5eaec06 | []
| no_license | rscai/python99 | 281d00473c0dc977f58ba7511c5bcb6f38275771 | 3fa0cb7683ec8223259410fb6ea2967e3d0e6f61 | refs/heads/master | 2020-04-12T09:08:49.500799 | 2019-10-06T07:47:17 | 2019-10-06T07:47:17 | 162,393,238 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 571 | py | from python99.lists.p121 import insert_at, insert_at_mutable
def test_insert_at():
assert insert_at([1, 2, 3, 4, 5, 6], 2, 'a') == [1, 'a', 2, 3, 4, 5, 6]
assert insert_at([1, 2, 3, 4, 5, 6], 1, 'a') == ['a', 1, 2, 3, 4, 5, 6]
assert insert_at([1, 2, 3, 4, 5, 6], 7, 'a') == [1, 2, 3, 4, 5, 6, 'a']
def test_insert_at_mutable():
assert insert_at([1, 2, 3, 4, 5, 6], 2, 'a') == [1, 'a', 2, 3, 4, 5, 6]
assert insert_at([1, 2, 3, 4, 5, 6], 1, 'a') == ['a', 1, 2, 3, 4, 5, 6]
assert insert_at([1, 2, 3, 4, 5, 6], 7, 'a') == [1, 2, 3, 4, 5, 6, 'a']
| [
"[email protected]"
]
| |
f340f6fe2ce9cef2755406e2d7327934041ad8c1 | 6fe477c7b32f0020a5fffe6affbc7546b16ab879 | /healthpoints/src/healthpoints/apps/tracks/migrations/0003_auto__add_field_activity_shard_id__add_field_activity_note_id.py | aca19183adb724bd430c79164d590c788b213d1b | []
| no_license | rootart/healthpoints | cb79cc4b8e3ceb9401eb5894518e026673f98545 | c33f8e2d0d62e66b3e967f3e464097482abebd91 | refs/heads/master | 2021-01-01T05:52:06.661165 | 2014-10-12T05:45:11 | 2014-10-12T05:45:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,555 | py | # -*- coding: utf-8 -*-
from south.utils import datetime_utils as datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Activity.shard_id'
db.add_column(u'tracks_activity', 'shard_id',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
# Adding field 'Activity.note_id'
db.add_column(u'tracks_activity', 'note_id',
self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Activity.shard_id'
db.delete_column(u'tracks_activity', 'shard_id')
# Deleting field 'Activity.note_id'
db.delete_column(u'tracks_activity', 'note_id')
models = {
u'auth.group': {
'Meta': {'object_name': 'Group'},
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': u"orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
u'auth.permission': {
'Meta': {'ordering': "(u'content_type__app_label', u'content_type__model', u'codename')", 'unique_together': "((u'content_type', u'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['contenttypes.ContentType']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
u'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Group']"}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'symmetrical': 'False', 'related_name': "u'user_set'", 'blank': 'True', 'to': u"orm['auth.Permission']"}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
u'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
u'tracks.activity': {
'Meta': {'object_name': 'Activity'},
'average_speed': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'calories': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'distance': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '16', 'decimal_places': '6', 'blank': 'True'}),
'guID': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '255'}),
u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'location_city': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'location_country': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'moving_time': ('timedelta.fields.TimedeltaField', [], {'null': 'True', 'blank': 'True'}),
'note_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'polyline': ('django.db.models.fields.TextField', [], {'null': 'True', 'blank': 'True'}),
'provider': ('django.db.models.fields.CharField', [], {'max_length': '100', 'db_index': 'True'}),
'resource_state': ('django.db.models.fields.PositiveIntegerField', [], {'null': 'True', 'blank': 'True'}),
'route': ('django.contrib.gis.db.models.fields.LineStringField', [], {'null': 'True', 'blank': 'True'}),
'shard_id': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'start_date': ('django.db.models.fields.DateTimeField', [], {'null': 'True', 'blank': 'True'}),
'start_point': ('django.contrib.gis.db.models.fields.PointField', [], {'null': 'True', 'blank': 'True'}),
'total_elevation_gain': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '10', 'decimal_places': '2', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': u"orm['auth.User']"})
}
}
complete_apps = ['tracks'] | [
"[email protected]"
]
| |
bf811162014e14e26b71ed53ffec58e618d594a3 | 2157782cf5875767f8d1fe0bb07243da2e87600d | /send_email/email_helper.py | 5012f4ab74d9a69b947ea3e386bf2d903abaa39f | []
| no_license | mouday/SomeCodeForPython | 9bc79e40ed9ed851ac11ff6144ea080020e01fcd | ddf6bbd8a5bd78f90437ffa718ab7f17faf3c34b | refs/heads/master | 2021-05-09T22:24:47.394175 | 2018-05-11T15:34:22 | 2018-05-11T15:34:22 | 118,750,143 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,112 | py | #email_helper.py
'''
参考:https://www.liaoxuefeng.com/wiki/0014316089557264a6b348958f449949df42a6d3a2e542c000/001432005226355aadb8d4b2f3f42f6b1d6f2c5bd8d5263000
封装成简单邮件发送模块
'''
from email import encoders
from email.header import Header
from email.mime.text import MIMEText
from email.utils import parseaddr, formataddr
import smtplib
debug = True # debug开关
def debug_info(text):
if debug == True:
print(text)
class EmailClient(object):
'邮件发送端初始化类'
def __init__(self, smtp_server):
'初始化服务器地址'
self._smtp_server = smtp_server
self.addrs = [] # 邮件地址列表, 格式(addr, name)
def login(self, from_addr, password, from_name="admin"):
'登录'
self._from_addr = from_addr
self._password = password
self._from_name = from_name
try:
self.server = smtplib.SMTP(self._smtp_server, 25)
#server.set_debuglevel(1)
self.server.login(self._from_addr, self._password)
except Exception as e:
return -1 # 登录失败
debug_info("登录失败")
else:
return 0 # 登录成功
debug_info("登录成功")
def send(self, title, text, to_addr, to_name=None):
'发送邮件'
if to_name == None: to_name=to_addr
try:
# 接受方信息
msg = MIMEText(text, 'plain', 'utf-8')
msg['From'] = self._format_addr('%s<%s>' % (self._from_name,self._from_addr))
msg['To'] = self._format_addr('%s <%s>' % (to_name,to_addr))
msg['Subject'] = Header(title, 'utf-8').encode()
# 发送内容
self.server.sendmail(self._from_addr, to_addr, msg.as_string())
return 0
except Exception as e:
debug_info(e)
return -1
def add_address(self, addr, name=None):
'增加地址到地址列表'
if name==None: name = addr
self.addrs.append((addr, name))
def send_all(self, title, text):
'发送所有人'
success = 0
fail = 0
for addr, name in self.addrs:
ret = self.send(title, text, addr, name)
if ret == 0:
success += 1
else:
fail += 1
return success, fail
def __del__(self):
'析构'
self.server.quit()
def _format_addr(self, s):
'格式化地址'
name, addr = parseaddr(s)
return formataddr((Header(name, 'utf-8').encode(), addr))
if __name__ == '__main__':
email_client=EmailClient("smtp.163.com") # 邮箱服务器地址
email_client.login("username", "password", "name") # 登陆
email_client.add_address("email") # 增加收件人
email_client.add_address("email")
email_client.add_address("email")
# 发送
success, fail = email_client.send_all("邮件标题", "邮件内容,试试看能不能发送出去")
print("success:", success, "fail:", fail) # 返回发送结果
| [
"[email protected]"
]
| |
762824112bf390cf4f8ff8ee2d484e6524fbca21 | c95f245a5252ec1185e13ef5d37ff599dd451fee | /telethon/network/connection/tcpfull.py | fd9fd1cf58e9bd9932053d283a5d676b226f6cd5 | [
"MIT"
]
| permissive | perryyo/Telethon | 6f95ce09ad86a94c44fe697ba6d49df4914cb321 | 0046291254f9c96f8824ff7b42fa695fa3f71fc5 | refs/heads/master | 2020-04-07T17:08:15.994174 | 2019-02-11T07:13:44 | 2019-02-11T07:13:44 | 158,558,142 | 0 | 0 | MIT | 2018-11-21T14:12:22 | 2018-11-21T14:12:21 | null | UTF-8 | Python | false | false | 1,463 | py | import struct
from zlib import crc32
from .connection import Connection
from ...errors import InvalidChecksumError
class ConnectionTcpFull(Connection):
"""
Default Telegram mode. Sends 12 additional bytes and
needs to calculate the CRC value of the packet itself.
"""
def __init__(self, ip, port, *, loop, proxy=None):
super().__init__(ip, port, loop=loop, proxy=proxy)
self._send_counter = 0
async def connect(self, timeout=None, ssl=None):
await super().connect(timeout=timeout, ssl=ssl)
self._send_counter = 0 # Important or Telegram won't reply
def _send(self, data):
# https://core.telegram.org/mtproto#tcp-transport
# total length, sequence number, packet and checksum (CRC32)
length = len(data) + 12
data = struct.pack('<ii', length, self._send_counter) + data
crc = struct.pack('<I', crc32(data))
self._send_counter += 1
self._writer.write(data + crc)
async def _recv(self):
packet_len_seq = await self._reader.readexactly(8) # 4 and 4
packet_len, seq = struct.unpack('<ii', packet_len_seq)
body = await self._reader.readexactly(packet_len - 8)
checksum = struct.unpack('<I', body[-4:])[0]
body = body[:-4]
valid_checksum = crc32(packet_len_seq + body)
if checksum != valid_checksum:
raise InvalidChecksumError(checksum, valid_checksum)
return body
| [
"[email protected]"
]
| |
77b98aa829530524b65763daa60caaacde33ebbb | 6a0ae86bca2d2ece6c92efd5594c0e3b1777ead7 | /EDBRCommon/python/datasets/summer12_MWp_350_gg_cff.py | 990eb65ae0bde2bdcbdbdfc05e23b0bd86d0be75 | []
| no_license | wangmengmeng/ExoDiBosonResonances | c4b5d277f744e1b1986df9317ac60b46d202a29f | bf5d2e79f59ad25c7a11e7f97552e2bf6a283428 | refs/heads/master | 2016-09-06T14:54:53.245508 | 2014-06-05T15:02:37 | 2014-06-05T15:02:37 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,971 | py | import FWCore.ParameterSet.Config as cms
readFiles = cms.untracked.vstring()
source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
duplicateCheckMode = cms.untracked.string("noDuplicateCheck"),
fileNames = readFiles
)
readFiles.extend([
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_10_1_BYR.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_11_1_mnN.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_12_1_p9d.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_13_1_mZc.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_14_1_I6o.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_15_1_NBO.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_16_1_L2E.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_17_1_WWV.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_18_1_O71.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_19_1_gsK.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_1_1_WXk.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_20_1_J7a.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_21_1_mmN.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_22_1_I5O.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_23_1_Viu.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_24_1_Voj.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_25_1_uHN.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_26_1_uew.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_27_1_Gaa.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_28_1_e7z.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_29_1_FfO.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_2_1_HVa.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_30_1_gOp.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_31_1_V1o.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_32_1_c1S.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_33_1_rI6.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_34_1_JGi.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_35_1_agq.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_36_1_Yfb.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_37_1_TY7.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_38_1_zd0.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_39_1_L7L.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_3_1_Scl.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_40_1_cVm.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_41_1_Wg0.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_42_1_ffs.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_43_1_heS.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_44_1_46A.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_45_1_YhP.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_46_1_OZD.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_47_1_f6v.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_48_1_A3s.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_49_1_bqW.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_4_1_dUY.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_50_1_GNz.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_51_1_bYJ.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_5_1_QW1.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_6_1_U9k.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_7_1_KVy.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_8_1_OQf.root',
'/store/cmst3/group/exovv/mwang/EDBR_PATtuple_edbr_wh_20140210_Summer12MC_WprimeWH_gg_20140213_153713/mwang/EXOWH_Wprime_M350_GENSIM_V2/EDBR_PATtuple_edbr_wh_20140210/0d71bd6eec2b8c7cc5eafcee05a85e30/EXOWH_Wprime_M350_GENSIM_V2__mwang-EXOWH_Wprime_M350_AODSIM_V2-2c74483358b1f8805e5601fc325d256c__USER_9_1_mjs.root',
] )
| [
"[email protected]"
]
| |
548341b3609c2e37fb84bdeb89713f9e20cfe4e5 | 160213f69c7e8e7fe3286a18e5844a043f3316c0 | /sirenorder/wsgi.py | 261723f33581245f1e7bf1b53b5857064b50727c | []
| no_license | wecode-bootcamp-korea/siren-order-backend | beb355fa911075a1912e6600ecadfed15b79cf0b | 7d80994190a8cb3b5b97019f6e5a4e693b442f02 | refs/heads/develop | 2022-12-10T09:38:54.748469 | 2019-07-26T07:50:03 | 2019-07-26T07:50:03 | 196,929,298 | 0 | 4 | null | 2022-12-08T05:53:04 | 2019-07-15T05:22:47 | Python | UTF-8 | Python | false | false | 397 | py | """
WSGI config for sirenorder project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sirenorder.settings')
application = get_wsgi_application()
| [
"[email protected]"
]
| |
5bf1c3fcd512c1e389e2f7280476b3433ecf2119 | c1b8ff60ed4d8c70e703f71b7c96a649a75c0cec | /ostPython1/multuple.py | c6e25d4906e5ab8d55c5aa5fce4761928a3d621c | []
| no_license | deepbsd/OST_Python | 836d4fae3d98661a60334f66af5ba3255a0cda5c | b32f83aa1b705a5ad384b73c618f04f7d2622753 | refs/heads/master | 2023-02-14T17:17:28.186060 | 2023-01-31T02:09:05 | 2023-01-31T02:09:05 | 49,534,454 | 1 | 2 | null | null | null | null | UTF-8 | Python | false | false | 685 | py | #!/usr/bin/env python3
#
#
# multiple.py
#
# Lesson 7: String Formatting
#
# by David S. Jackson
# 11/30/2014
#
# OST Python1: Beginning Python
# for Pat Barton, Instructor
#
"""
takes as data a tuple of two-element tuples, such as ((1,1), 2,2), (12,13),
(4,4), (99,98)). This and/or similar data should be hard-coded (no need for
user input). Loop over the tuple and print out the results of multiplying the
numbers together, and use string formatting to display nicely.
"""
my_tuple = ( (8, 9), (11, 13), (4, 5), (19, 23), (9, 18))
for n1, n2 in my_tuple :
print("{0:2d}{a:^5}{1:2d}{b:>4}{2:4d}".format(n1, n2, n1*n2, a="X", b="="))
| [
"[email protected]"
]
| |
1347ece238e08d92a8903165e9b040ea820981c3 | 9531e597cd3f865cc6b6f780498a18281c2413f8 | /user_notifications/views.py | 82f196088698131ef8e60ab25accfb76388764e8 | []
| no_license | dpitkevics/DevNet | 7133b80ce5d56b9c11aa4c500d530faed7cb13f4 | 98ebc3916346e6c2bda79711a3896f7c2a8e2ac8 | refs/heads/master | 2020-04-15T12:04:00.245848 | 2015-09-14T17:45:39 | 2015-09-14T17:45:39 | 41,320,800 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,663 | py | import json
import redis
from notifications import notify
from django.http import HttpResponse, JsonResponse
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.db.models import Q
from django.db.models.signals import post_save
from django.dispatch import receiver
from notifications.models import Notification
from .serializers import NotificationSerializer
@login_required
def get_notifications(request):
query = request.POST.get('query', None)
notification_serializer_set = []
if query is not None:
notifications = request.user.notifications.order_by('-timestamp').filter(Q(verb__contains=query) | Q(description__contains=query)).exclude(verb="")[:5]
else:
notifications = request.user.notifications.order_by('-timestamp').all().exclude(verb="")[:5]
for notification in notifications:
notification_serializer = NotificationSerializer(notification)
notification_serializer_set.append(notification_serializer.data)
return JsonResponse(notification_serializer_set, safe=False)
@login_required
def send_notification(request):
recipient_username = request.POST.get('recipient_username', None)
if recipient_username:
recipients = User.objects.filter(username=recipient_username)
else:
recipients = User.objects.all()
for recipient in recipients:
notify.send(
request.user,
recipient=recipient,
verb=request.POST.get('verb', ''),
description=request.POST.get('description', '')
)
return HttpResponse(json.dumps({"success": True}), content_type="application/json")
@login_required
def mark_as_read(request):
request.user.notifications.unread().mark_all_as_read()
redis_client = redis.StrictRedis(host='localhost', port=6379, db=0)
for session in request.user.session_set.all():
redis_client.publish(
'notifications.%s' % session.session_key,
json.dumps({"mark_as_read": True, "unread_count": 0})
)
return HttpResponse(json.dumps({"success": True}), content_type="application/json")
@receiver(post_save, sender=Notification)
def on_notification_post_save(sender, **kwargs):
redis_client = redis.StrictRedis(host='localhost', port=6379, db=0)
notification = kwargs['instance']
recipient = notification.recipient
for session in recipient.session_set.all():
redis_client.publish(
'notifications.%s' % session.session_key,
json.dumps(dict(
count=recipient.notifications.unread().count()
))
)
| [
"[email protected]"
]
| |
4d9685bae094c34f6844353f599ed8a19c912a5c | d305e9667f18127e4a1d4d65e5370cf60df30102 | /tests/st/ops/gpu/test_unpack_op.py | 9a0d8cfda90f7c500d8e6fae7395c0f17d50f593 | [
"Apache-2.0",
"MIT",
"Libpng",
"LicenseRef-scancode-proprietary-license",
"LGPL-2.1-only",
"AGPL-3.0-only",
"MPL-2.0-no-copyleft-exception",
"IJG",
"Zlib",
"MPL-1.1",
"BSD-3-Clause",
"BSD-3-Clause-Open-MPI",
"MPL-1.0",
"GPL-2.0-only",
"MPL-2.0",
"BSL-1.0",
"LicenseRef-scancode-unknown-license-reference",
"Unlicense",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause"
]
| permissive | imyzx2017/mindspore_pcl | d8e5bd1f80458538d07ef0a8fc447b552bd87420 | f548c9dae106879d1a83377dd06b10d96427fd2d | refs/heads/master | 2023-01-13T22:28:42.064535 | 2020-11-18T11:15:41 | 2020-11-18T11:15:41 | 313,906,414 | 6 | 1 | Apache-2.0 | 2020-11-18T11:25:08 | 2020-11-18T10:57:26 | null | UTF-8 | Python | false | false | 5,063 | py | # Copyright 2020 Huawei Technologies Co., Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
import numpy as np
import pytest
import mindspore.context as context
import mindspore.nn as nn
import mindspore.ops.operations.array_ops as P
from mindspore import Tensor
from mindspore.common.api import ms_function
from mindspore.common.initializer import initializer
from mindspore.common.parameter import Parameter
class UnpackNet(nn.Cell):
def __init__(self, nptype):
super(UnpackNet, self).__init__()
self.unpack = P.Unpack(axis=3)
self.data_np = np.array([[[[[0, 0],
[0, 1]],
[[0, 0],
[2, 3]]],
[[[0, 0],
[4, 5]],
[[0, 0],
[6, 7]]]],
[[[[0, 0],
[8, 9]],
[[0, 0],
[10, 11]]],
[[[0, 0],
[12, 13]],
[[0, 0],
[14, 15]]]]]).astype(nptype)
self.x1 = Parameter(initializer(Tensor(self.data_np), [2, 2, 2, 2, 2]), name='x1')
@ms_function
def construct(self):
return self.unpack(self.x1)
def unpack(nptype):
context.set_context(mode=context.GRAPH_MODE, device_target='GPU')
unpack_ = UnpackNet(nptype)
output = unpack_()
expect = (np.reshape(np.array([0] * 16).astype(nptype), (2, 2, 2, 2)),
np.arange(2 * 2 * 2 * 2).reshape(2, 2, 2, 2).astype(nptype))
for i, exp in enumerate(expect):
assert (output[i].asnumpy() == exp).all()
def unpack_pynative(nptype):
context.set_context(mode=context.PYNATIVE_MODE, device_target='GPU')
x1 = np.array([[[[[0, 0],
[0, 1]],
[[0, 0],
[2, 3]]],
[[[0, 0],
[4, 5]],
[[0, 0],
[6, 7]]]],
[[[[0, 0],
[8, 9]],
[[0, 0],
[10, 11]]],
[[[0, 0],
[12, 13]],
[[0, 0],
[14, 15]]]]]).astype(nptype)
x1 = Tensor(x1)
expect = (np.reshape(np.array([0] * 16).astype(nptype), (2, 2, 2, 2)),
np.arange(2 * 2 * 2 * 2).reshape(2, 2, 2, 2).astype(nptype))
output = P.Unpack(axis=3)(x1)
for i, exp in enumerate(expect):
assert (output[i].asnumpy() == exp).all()
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_graph_float32():
unpack(np.float32)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_graph_float16():
unpack(np.float16)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_graph_int32():
unpack(np.int32)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_graph_int16():
unpack(np.int16)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_graph_uint8():
unpack(np.uint8)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_graph_bool():
unpack(np.bool)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_pynative_float32():
unpack_pynative(np.float32)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_pynative_float16():
unpack_pynative(np.float16)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_pynative_int32():
unpack_pynative(np.int32)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_pynative_int16():
unpack_pynative(np.int16)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_pynative_uint8():
unpack_pynative(np.uint8)
@pytest.mark.level0
@pytest.mark.platform_x86_gpu_training
@pytest.mark.env_onecard
def test_unpack_pynative_bool():
unpack_pynative(np.bool)
| [
"[email protected]"
]
| |
123692f1d83c0d850298be8ebd18dc3df003f4e0 | fb4b70ad38d0fc810cb9ee034c8fb963c079f64b | /easy/Self_Dividing_Numbers.py | 33c51ce1cd72ee84467d7802a1ee8de8713c2bb0 | []
| no_license | ChrisLiu95/Leetcode | 0e14f0a7b7aa557bb2576589da8e73dbeeae8483 | baa3342ebe2600f365b9348455f6342e19866a44 | refs/heads/master | 2021-07-11T12:01:00.249208 | 2018-09-26T21:27:42 | 2018-09-26T21:27:42 | 117,451,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,069 | py | """
A self-dividing number is a number that is divisible by every digit it contains.
For example, 128 is a self-dividing number because 128 % 1 == 0, 128 % 2 == 0, and 128 % 8 == 0.
Also, a self-dividing number is not allowed to contain the digit zero.
Given a lower and upper number bound, output a list of every possible self dividing number, including the bounds if possible.
Example 1:
Input:
left = 1, right = 22
Output: [1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 15, 22]
Note:
The boundaries of each input argument are 1 <= left <= right <= 10000.
"""
class Solution(object):
def selfDividingNumbers(self, left, right):
res = []
for num in range(left, right + 1):
flag = True
temp = num
while temp != 0:
if temp % 10 == 0:
flag = False
break
elif num % (temp % 10) != 0:
flag = False
break
temp = temp / 10
if flag:
res.append(num)
return res
| [
"[email protected]"
]
| |
c5726f75c757c38f8cbd21289f63e73ea05370c2 | e23a4f57ce5474d468258e5e63b9e23fb6011188 | /140_gui/pyqt_pyside/examples/PyQt_PySide_book/004_Main components/001_Inscription/171_setScaledContents - toClass.py | 338077f7a2d487be688c7007a97764ffd712864a | []
| no_license | syurskyi/Python_Topics | 52851ecce000cb751a3b986408efe32f0b4c0835 | be331826b490b73f0a176e6abed86ef68ff2dd2b | refs/heads/master | 2023-06-08T19:29:16.214395 | 2023-05-29T17:09:11 | 2023-05-29T17:09:11 | 220,583,118 | 3 | 2 | null | 2023-02-16T03:08:10 | 2019-11-09T02:58:47 | Python | UTF-8 | Python | false | false | 522 | py | from PySide import QtCore, QtGui
import sys
class SampleWindow(QtGui.QWidget):
def __init__(self):
super(SampleWindow, self).__init__()
window.setWindowTitle("Класс QLabel")
window.resize(300, 150)
label = QtGui.QLabel()
label.setText("Текст надписи")
label.setFrameStyle(QtGui.QFrame.Box | QtGui.QFrame.Plain)
label.setPixmap(QtGui.QPixmap("foto.png"))
label.setAutoFillBackground(True)
label.setScaledContents(True)
vbox = QtGui.QVBoxLayout()
vbox.addWidget(label)
window.setLayout(vbox)
| [
"[email protected]"
]
| |
c54e99a0862974e1abc8b7eaf5a168c002dff248 | a5a4cee972e487512275c34f308251e6cc38c2fa | /dev/potential/EamPotential/dev_EamPotential.py | 1587069d5e39deda89368cb54c938837b9a44bfc | [
"MIT"
]
| permissive | eragasa/pypospack | 4f54983b33dcd2dce5b602bc243ea8ef22fee86b | 21cdecaf3b05c87acc532d992be2c04d85bfbc22 | refs/heads/master | 2021-06-16T09:24:11.633693 | 2019-12-06T16:54:02 | 2019-12-06T16:54:02 | 99,282,824 | 4 | 1 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | import pypospack.potential as potential
symbols = ['Ni']
pot = potential.EamPotential(symbols=symbols)
print('pot.potential_type == {}'.format(\
pot.potential_type))
print('pot.symbols == {}'.format(\
pot.symbols))
print('pot.param_names == {}'.format(\
pot.param_names))
print('pot.is_charge == {}'.format(\
pot.is_charge))
print('pot.param == {}'.format(\
pot.param))
print(80*'-')
symbols = ['Ni','Al']
pot = potential.EamPotential(symbols=symbols)
print('pot.potential_type == {}'.format(\
pot.potential_type))
print('pot.symbols == {}'.format(\
pot.symbols))
print('pot.param_names == {}'.format(\
pot.param_names))
print('pot.is_charge == {}'.format(\
pot.is_charge))
print('pot.param == {}'.format(\
pot.param))
| [
"[email protected]"
]
| |
7a529d56ccc005bfccfb9d8c19c6f483390fffa9 | 46bef3a57cb663991387e02f3cc6c0282bd17496 | /ie/si23tinyyolov2/tflite/Tensor.py | 0cc948eaa14ee73dcd9f9a7202d91d57e2d163ea | []
| no_license | k5iogura/vinosyp | 36964f4c51b9d695d46e19d64a49156eaaac0042 | 1ef35532c6ba392761f73504ed787c074781c400 | refs/heads/master | 2020-04-08T04:27:57.905968 | 2019-09-30T07:34:18 | 2019-09-30T07:34:18 | 159,017,659 | 7 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,271 | py | # automatically generated by the FlatBuffers compiler, do not modify
# namespace: tflite
import flatbuffers
class Tensor(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsTensor(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Tensor()
x.Init(buf, n + offset)
return x
@classmethod
def TensorBufferHasIdentifier(cls, buf, offset, size_prefixed=False):
return flatbuffers.util.BufferHasIdentifier(buf, offset, b"\x54\x46\x4C\x33", size_prefixed=size_prefixed)
# Tensor
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Tensor
def Shape(self, j):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
a = self._tab.Vector(o)
return self._tab.Get(flatbuffers.number_types.Int32Flags, a + flatbuffers.number_types.UOffsetTFlags.py_type(j * 4))
return 0
# Tensor
def ShapeAsNumpy(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.GetVectorAsNumpy(flatbuffers.number_types.Int32Flags, o)
return 0
# Tensor
def ShapeLength(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.VectorLen(o)
return 0
# Tensor
def Type(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(6))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int8Flags, o + self._tab.Pos)
return 0
# Tensor
def Buffer(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(8))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Uint32Flags, o + self._tab.Pos)
return 0
# Tensor
def Name(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(10))
if o != 0:
return self._tab.String(o + self._tab.Pos)
return None
# Tensor
def Quantization(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(12))
if o != 0:
x = self._tab.Indirect(o + self._tab.Pos)
from .QuantizationParameters import QuantizationParameters
obj = QuantizationParameters()
obj.Init(self._tab.Bytes, x)
return obj
return None
def TensorStart(builder): builder.StartObject(5)
def TensorAddShape(builder, shape): builder.PrependUOffsetTRelativeSlot(0, flatbuffers.number_types.UOffsetTFlags.py_type(shape), 0)
def TensorStartShapeVector(builder, numElems): return builder.StartVector(4, numElems, 4)
def TensorAddType(builder, type): builder.PrependInt8Slot(1, type, 0)
def TensorAddBuffer(builder, buffer): builder.PrependUint32Slot(2, buffer, 0)
def TensorAddName(builder, name): builder.PrependUOffsetTRelativeSlot(3, flatbuffers.number_types.UOffsetTFlags.py_type(name), 0)
def TensorAddQuantization(builder, quantization): builder.PrependUOffsetTRelativeSlot(4, flatbuffers.number_types.UOffsetTFlags.py_type(quantization), 0)
def TensorEnd(builder): return builder.EndObject()
| [
"[email protected]"
]
| |
2e6ecb54b480a398f319df68538b50b978a06dc3 | f34d3948b707e461151ee33296a61fb23a6d3f44 | /month01/day11/day11/day10_exercise/exercise01.py | 2661ccd6399fb82f85eed30d55de03d907cdb447 | []
| no_license | xiao-a-jian/python-study | f9c4e3ee7a2f9ae83bec6afa7c7b5434e8243ed8 | c8e8071277bcea8463bf6f2e8cd9e30ae0f1ddf3 | refs/heads/master | 2022-06-09T17:44:41.804228 | 2020-05-05T07:48:07 | 2020-05-05T07:48:07 | 256,927,969 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 620 | py | # 定义函数, 删除列表中相同元素(只保留一个)
# list01 = [6, 54, 65, 677, 6, 65, 6, 65]
# 更节省内存
# def delete_duplicates(list_target):
# for r in range(len(list_target) - 1, 0, -1):
# for c in range(r):
# if list_target[r] == list_target[c]:
# del list_target[r]
# break
#
# # 测试
# list01 = [6, 54, 65, 677, 6, 65, 6, 65]
# delete_all(list01)
# print(list01)
# 更简单
def delete_duplicates(list_target):
return set(list_target)
# 测试
list01 = [6, 54, 65, 677, 6, 65, 6, 65]
list01 = delete_duplicates(list01)
print(list01)
| [
"[email protected]"
]
| |
53c9aed9f7a93c6fe201f4664a845b48f15db2f9 | a81c07a5663d967c432a61d0b4a09de5187be87b | /chrome/installer/mac/signing/pipeline.py | 898c0d4542bb3474d654e71a1e4fd5a1355e3897 | [
"LGPL-2.0-or-later",
"MPL-2.0",
"GPL-2.0-only",
"BSD-3-Clause"
]
| permissive | junxuezheng/chromium | c401dec07f19878501801c9e9205a703e8643031 | 381ce9d478b684e0df5d149f59350e3bc634dad3 | refs/heads/master | 2023-02-28T17:07:31.342118 | 2019-09-03T01:42:42 | 2019-09-03T01:42:42 | 205,967,014 | 2 | 0 | BSD-3-Clause | 2019-09-03T01:48:23 | 2019-09-03T01:48:23 | null | UTF-8 | Python | false | false | 14,899 | py | # Copyright 2019 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
The pipeline module orchestrates the entire signing process, which includes:
1. Customizing build products for release channels.
2. Code signing the application bundle and all of its nested code.
3. Producing a packaged DMG.
4. Signing and packaging the installer tools.
"""
import os.path
from . import commands, model, modification, notarize, signing
def _customize_and_sign_chrome(paths, dist_config, dest_dir, signed_frameworks):
"""Does channel customization and signing of a Chrome distribution. The
resulting app bundle is moved into |dest_dir|.
Args:
paths: A |model.Paths| object.
dist_config: A |config.CodeSignConfig| for the |model.Distribution|.
dest_dir: The directory into which the product will be placed when
the operations are completed.
signed_frameworks: A dict that will store paths and change counts of
already-signed inner frameworks keyed by bundle ID. Paths are used
to recycle already-signed frameworks instead of re-signing them.
Change counts are used to verify equivalence of frameworks when
recycling them. Callers can pass an empty dict on the first call,
and reuse the same dict for subsequent calls. This function will
produce and consume entries in the dict. If this sharing is
undesired, pass None instead of a dict.
"""
# Copy the app to sign into the work dir.
commands.copy_files(
os.path.join(paths.input, dist_config.base_config.app_dir), paths.work)
# Customize the app bundle.
modification.customize_distribution(paths, dist_config.distribution,
dist_config)
work_dir_framework_path = os.path.join(paths.work,
dist_config.framework_dir)
if signed_frameworks is not None and dist_config.base_bundle_id in signed_frameworks:
# If the inner framework has already been modified and signed for this
# bundle ID, recycle the existing signed copy without signing a new
# copy. This ensures that bit-for-bit identical input will result in
# bit-for-bit identical signatures not affected by differences in, for
# example, the signature's timestamp. All variants of a product sharing
# the same bundle ID are assumed to have bit-for-bit identical
# frameworks.
#
# This is significant because of how binary diff updates work. Binary
# diffs are built between two successive versions on the basis of their
# inner frameworks being bit-for-bit identical without regard to any
# customizations applied only to the outer app. In order for these to
# apply to all installations regardless of the presence or specific
# values of any app-level customizations, all inner frameworks for a
# single version and base bundle ID must always remain bit-for-bit
# identical, including their signatures.
(signed_framework_path, signed_framework_change_count
) = signed_frameworks[dist_config.base_bundle_id]
actual_framework_change_count = commands.copy_dir_overwrite_and_count_changes(
os.path.join(dest_dir, signed_framework_path),
work_dir_framework_path,
dry_run=False)
if actual_framework_change_count != signed_framework_change_count:
raise ValueError(
'While customizing and signing {} ({}), actual_framework_change_count {} != signed_framework_change_count {}'
.format(dist_config.base_bundle_id,
dist_config.packaging_basename,
actual_framework_change_count,
signed_framework_change_count))
signing.sign_chrome(paths, dist_config, sign_framework=False)
else:
unsigned_framework_path = os.path.join(paths.work,
'modified_unsigned_framework')
commands.copy_dir_overwrite_and_count_changes(
work_dir_framework_path, unsigned_framework_path, dry_run=False)
signing.sign_chrome(paths, dist_config, sign_framework=True)
actual_framework_change_count = commands.copy_dir_overwrite_and_count_changes(
work_dir_framework_path, unsigned_framework_path, dry_run=True)
if signed_frameworks is not None:
dest_dir_framework_path = os.path.join(dest_dir,
dist_config.framework_dir)
signed_frameworks[dist_config.base_bundle_id] = (
dest_dir_framework_path, actual_framework_change_count)
app_path = os.path.join(paths.work, dist_config.app_dir)
commands.make_dir(dest_dir)
commands.move_file(app_path, os.path.join(dest_dir, dist_config.app_dir))
def _staple_chrome(paths, dist_config):
"""Staples all the executable components of the Chrome app bundle.
Args:
paths: A |model.Paths| object.
dist_config: A |config.CodeSignConfig| for the customized product.
"""
parts = signing.get_parts(dist_config)
# Only staple the signed, bundled executables.
part_paths = [
part.path
for part in parts.values()
# TODO(https://crbug.com/979725): Reinstate .xpc bundle stapling once
# the signing environment is on a macOS release that supports
# Xcode 10.2 or newer.
if part.path[-4:] in ('.app',)
]
# Reverse-sort the paths so that more nested paths are stapled before
# less-nested ones.
part_paths.sort(reverse=True)
for part_path in part_paths:
notarize.staple(os.path.join(paths.work, part_path))
def _package_and_sign_dmg(paths, dist_config):
"""Packages, signs, and verifies a DMG for a signed build product.
Args:
paths: A |model.Paths| object.
dist_config: A |config.CodeSignConfig| for the |dist|.
Returns:
The path to the signed DMG file.
"""
dist = dist_config.distribution
dmg_path = _package_dmg(paths, dist, dist_config)
# dmg_identifier is like dmg_name but without the .dmg suffix. If a
# brand code is in use, use the actual brand code instead of the
# name fragment, to avoid leaking the association between brand
# codes and their meanings.
dmg_identifier = dist_config.packaging_basename
if dist.branding_code:
dmg_identifier = dist_config.packaging_basename.replace(
dist.packaging_name_fragment, dist.branding_code)
product = model.CodeSignedProduct(
dmg_path, dmg_identifier, sign_with_identifier=True)
signing.sign_part(paths, dist_config, product)
signing.verify_part(paths, product)
return dmg_path
def _package_dmg(paths, dist, config):
"""Packages a Chrome application bundle into a DMG.
Args:
paths: A |model.Paths| object.
dist: The |model.Distribution| for which the product was customized.
config: The |config.CodeSignConfig| object.
Returns:
A path to the produced DMG file.
"""
packaging_dir = paths.packaging_dir(config)
if dist.channel_customize:
dsstore_file = 'chrome_{}_dmg_dsstore'.format(dist.channel)
icon_file = 'chrome_{}_dmg_icon.icns'.format(dist.channel)
else:
dsstore_file = 'chrome_dmg_dsstore'
icon_file = 'chrome_dmg_icon.icns'
dmg_path = os.path.join(paths.output,
'{}.dmg'.format(config.packaging_basename))
app_path = os.path.join(paths.work, config.app_dir)
# A locally-created empty directory is more trustworthy than /var/empty.
empty_dir = os.path.join(paths.work, 'empty')
commands.make_dir(empty_dir)
# Make the disk image. Don't include any customized name fragments in
# --volname because the .DS_Store expects the volume name to be constant.
# Don't put a name on the /Applications symbolic link because the same disk
# image is used for all languages.
# yapf: disable
commands.run_command([
os.path.join(packaging_dir, 'pkg-dmg'),
'--verbosity', '0',
'--tempdir', paths.work,
'--source', empty_dir,
'--target', dmg_path,
'--format', 'UDBZ',
'--volname', config.app_product,
'--icon', os.path.join(packaging_dir, icon_file),
'--copy', '{}:/'.format(app_path),
'--copy',
'{}/keystone_install.sh:/.keystone_install'.format(packaging_dir),
'--mkdir', '.background',
'--copy',
'{}/chrome_dmg_background.png:/.background/background.png'.format(
packaging_dir),
'--copy', '{}/{}:/.DS_Store'.format(packaging_dir, dsstore_file),
'--symlink', '/Applications:/ ',
])
# yapf: enable
return dmg_path
def _package_installer_tools(paths, config):
"""Signs and packages all the installer tools, which are not shipped to end-
users.
Args:
paths: A |model.Paths| object.
config: The |config.CodeSignConfig| object.
"""
DIFF_TOOLS = 'diff_tools'
tools_to_sign = signing.get_installer_tools(config)
other_tools = (
'dirdiffer.sh',
'dirpatcher.sh',
'dmgdiffer.sh',
'keystone_install.sh',
'pkg-dmg',
)
with commands.WorkDirectory(paths) as paths:
diff_tools_dir = os.path.join(paths.work, DIFF_TOOLS)
commands.make_dir(diff_tools_dir)
for part in tools_to_sign.values():
commands.copy_files(
os.path.join(paths.input, part.path), diff_tools_dir)
part.path = os.path.join(DIFF_TOOLS, os.path.basename(part.path))
signing.sign_part(paths, config, part)
for part in tools_to_sign.values():
signing.verify_part(paths, part)
for tool in other_tools:
commands.copy_files(
os.path.join(paths.packaging_dir(config), tool), diff_tools_dir)
zip_file = os.path.join(paths.output, DIFF_TOOLS + '.zip')
commands.run_command(['zip', '-9ry', zip_file, DIFF_TOOLS],
cwd=paths.work)
def sign_all(orig_paths, config, disable_packaging=False, do_notarization=True):
"""For each distribution in |config|, performs customization, signing, and
DMG packaging and places the resulting signed DMG in |orig_paths.output|.
The |paths.input| must contain the products to customize and sign.
Args:
orig_paths: A |model.Paths| object.
config: The |config.CodeSignConfig| object.
package_dmg: If True, the signed application bundle will be packaged
into a DMG, which will also be signed. If False, the signed app
bundle will be copied to |paths.output|.
do_notarization: If True, the signed application bundle will be sent for
notarization by Apple. The resulting notarization ticket will then
be stapled. If |package_dmg| is also True, the stapled application
will be packaged in the DMG and then the DMG itself will be
notarized and stapled.
"""
with commands.WorkDirectory(orig_paths) as notary_paths:
# First, sign all the distributions and optionally submit the
# notarization requests.
uuids_to_config = {}
signed_frameworks = {}
for dist in config.distributions:
with commands.WorkDirectory(orig_paths) as paths:
dist_config = dist.to_config(config)
do_packaging = (dist.package_as_dmg or
dist.package_as_pkg) and not disable_packaging
# If not packaging and not notarizing, then simply drop the
# signed bundle in the output directory when done signing.
if not do_packaging and not do_notarization:
dest_dir = paths.output
else:
dest_dir = notary_paths.work
dest_dir = os.path.join(dest_dir,
dist_config.packaging_basename)
_customize_and_sign_chrome(paths, dist_config, dest_dir,
signed_frameworks)
# If the build products are to be notarized, ZIP the app bundle
# and submit it for notarization.
if do_notarization:
zip_file = os.path.join(
notary_paths.work,
dist_config.packaging_basename + '.zip')
commands.run_command([
'zip', '--recurse-paths', '--symlinks', '--quiet',
zip_file, dist_config.app_dir
],
cwd=dest_dir)
uuid = notarize.submit(zip_file, dist_config)
uuids_to_config[uuid] = dist_config
# Wait for app notarization results to come back, stapling as they do.
if do_notarization:
for result in notarize.wait_for_results(uuids_to_config.keys(),
config):
dist_config = uuids_to_config[result]
dest_dir = os.path.join(notary_paths.work,
dist_config.packaging_basename)
_staple_chrome(notary_paths.replace_work(dest_dir), dist_config)
# After all apps are optionally notarized, package as required.
if not disable_packaging:
uuids_to_package_path = {}
for dist in config.distributions:
dist_config = dist.to_config(config)
if dist.package_as_dmg:
paths = orig_paths.replace_work(
os.path.join(notary_paths.work,
dist_config.packaging_basename))
dmg_path = _package_and_sign_dmg(paths, dist_config)
if do_notarization:
uuid = notarize.submit(dmg_path, dist_config)
uuids_to_package_path[uuid] = dmg_path
if dist.package_as_pkg:
# TODO(avi): Do packaging as a pkg here.
pass
# Wait for packaging notarization results to come back, stapling as
# they do.
if do_notarization:
for result in notarize.wait_for_results(
uuids_to_package_path.keys(), config):
package_path = uuids_to_package_path[result]
notarize.staple(package_path)
_package_installer_tools(orig_paths, config)
| [
"[email protected]"
]
| |
4cd9fac0659f565ca93a4ac5eb56440c5998707d | b77565a023a88480bb3330b18be929a19775f5dc | /정승호/키로거/solution.py | 570bd7078f7eb72449816e49fd2e0b55166a2674 | []
| no_license | Jeoungseungho/python-coding-study | 5af34bff429e24a93f6af4b0473d793ea2b791ee | 431e02d12d0834c71f423471701a2182f66a3776 | refs/heads/master | 2023-08-11T07:38:09.122123 | 2021-10-06T06:32:44 | 2021-10-06T06:32:44 | 283,200,892 | 20 | 12 | null | 2021-10-06T05:22:50 | 2020-07-28T12:07:21 | Python | UTF-8 | Python | false | false | 591 | py | import sys
input = sys.stdin.readline
N = int(input())
for _ in range(N):
pass_word = input().rstrip()
left_stack = []
right_stack = []
for word in pass_word:
if word == '<':
if left_stack:
right_stack.append(left_stack.pop())
elif word == '>':
if right_stack:
left_stack.append(right_stack.pop())
elif word == '-':
if left_stack:
left_stack.pop()
else: left_stack.append(word)
left_stack.extend(reversed(right_stack))
print(''.join(left_stack))
| [
"[email protected]"
]
| |
d29da2fa6b389a1e61c922b0468ca492e288956d | 3b84c4b7b16ccfd0154f8dcb75ddbbb6636373be | /google-cloud-sdk/lib/googlecloudsdk/third_party/apis/securitycenter/v1p1beta1/resources.py | 204b6a0c852d1fd7a975618ac6a38fa929b91cb3 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
]
| permissive | twistedpair/google-cloud-sdk | 37f04872cf1ab9c9ce5ec692d2201a93679827e3 | 1f9b424c40a87b46656fc9f5e2e9c81895c7e614 | refs/heads/master | 2023-08-18T18:42:59.622485 | 2023-08-15T00:00:00 | 2023-08-15T12:14:05 | 116,506,777 | 58 | 24 | null | 2022-02-14T22:01:53 | 2018-01-06T18:40:35 | Python | UTF-8 | Python | false | false | 2,453 | py | # -*- coding: utf-8 -*- #
# Copyright 2015 Google LLC. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Resource definitions for cloud platform apis."""
import enum
BASE_URL = 'https://securitycenter.googleapis.com/v1p1beta1/'
DOCS_URL = 'https://console.cloud.google.com/apis/api/securitycenter.googleapis.com/overview'
class Collections(enum.Enum):
"""Collections for all supported apis."""
ORGANIZATIONS = (
'organizations',
'organizations/{organizationsId}',
{},
['organizationsId'],
True
)
ORGANIZATIONS_ASSETS = (
'organizations.assets',
'organizations/{organizationsId}/assets/{assetsId}',
{},
['organizationsId', 'assetsId'],
True
)
ORGANIZATIONS_NOTIFICATIONCONFIGS = (
'organizations.notificationConfigs',
'{+name}',
{
'':
'organizations/{organizationsId}/notificationConfigs/'
'{notificationConfigsId}',
},
['name'],
True
)
ORGANIZATIONS_OPERATIONS = (
'organizations.operations',
'{+name}',
{
'':
'organizations/{organizationsId}/operations/{operationsId}',
},
['name'],
True
)
ORGANIZATIONS_SOURCES = (
'organizations.sources',
'{+name}',
{
'':
'organizations/{organizationsId}/sources/{sourcesId}',
},
['name'],
True
)
ORGANIZATIONS_SOURCES_FINDINGS = (
'organizations.sources.findings',
'organizations/{organizationsId}/sources/{sourcesId}/findings/'
'{findingId}',
{},
['organizationsId', 'sourcesId', 'findingId'],
True
)
def __init__(self, collection_name, path, flat_paths, params,
enable_uri_parsing):
self.collection_name = collection_name
self.path = path
self.flat_paths = flat_paths
self.params = params
self.enable_uri_parsing = enable_uri_parsing
| [
"[email protected]"
]
| |
3db27f60c4eb7ce5a20739d242ecf35db354cf90 | c329057d1561b8ffde0cf26677bb932b4c044826 | /py32.py | 25f958ea6850f7c2c14aa2456d3b6012da3874a1 | []
| no_license | kimotot/pe | b3611662110ca8a07b410a8e3d90c412c9decbd3 | 8d12cc64b0f9ad5156e2b1aed0245726acb9a404 | refs/heads/master | 2021-01-19T11:53:17.683814 | 2017-04-03T05:05:09 | 2017-04-03T05:05:09 | 82,271,607 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,341 | py | # coding:UTF-8
import copy
import time
def permutations(origin):
''' 与えられたリスト要素の順列を求める関数
引数はリストなど、イテーラブルなもの
戻値は全ての順列を要素としてリストにしたもの
再帰呼び出し関数'''
if len(origin) == 0:
return [[]]
else:
ans = []
for index,header in enumerate(origin):
new_orign = copy.deepcopy(origin)
del new_orign[index]
for cuder in permutations(new_orign):
cuder.insert(0,header)
ans.append(copy.deepcopy(cuder))
return ans
def permutationsIt(origin):
''' 与えられたリスト要素の順列を求める関数
引数はリストなど、イテーラブルなもの
戻値は全ての順列を要素としてリストにしたもの
再帰呼び出し関数'''
if len(origin) == 0:
yield []
else:
for index, header in enumerate(origin):
new_orign = copy.deepcopy(origin)
del new_orign[index]
for cuder in permutationsIt(new_orign):
cuder.insert(0, header)
yield cuder
def pandegi14(alist):
'''1から9の数字列が、1X4のパンデジタルであるか判定する関数'''
x = alist[0]
y = alist[1]*1000 + alist[2]*100 + alist[3]*10 + alist[4]
z = alist[5]*1000 + alist[6]*100 + alist[7]*10 + alist[8]
if x * y == z:
return True,z
else:
return False,0
def pandegi23(alist):
'''1から9の数字列が、2X3のパンデジタルであるか判定する関数'''
x = alist[0]*10 + alist[1]
y = alist[2]*100 + alist[3]*10 + alist[4]
z = alist[5]*1000 + alist[6]*100 + alist[7]*10 + alist[8]
if x * y == z:
return True,z
else:
return False,0
if __name__ == "__main__":
start = time.time()
s = set()
for n in permutationsIt([1,2,3,4,5,6,7,8,9]):
b,z = pandegi14(n)
if b:
print(14,n)
s.add(z)
b,z = pandegi23(n)
if b:
print(23,n)
s.add(z)
print("総和={0}".format(sum(s)))
elapsed_time = time.time() - start
print("処理時間={0:.4f}".format(elapsed_time))
| [
"[email protected]"
]
| |
ddc0b9c1fe3b67c0a5d6fd069262be7facd56757 | 532c5b6bd09b85e337cf568f6535299a773e15a5 | /Livid_DS1_v2/DS1.py | 08a31086ecd0094b54e892baf9419f88a5949836 | [
"MIT"
]
| permissive | thomasf/LiveRemoteScripts | ac796744c9694887f7a3c5ac3570630870c09054 | 23bb89fc62fce97fc7e46f6a94623ada1f255294 | refs/heads/master | 2021-01-18T09:27:51.838549 | 2016-03-14T05:12:08 | 2016-03-14T05:12:08 | 24,001,076 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 15,395 | py | # by amounra 0216 : http://www.aumhaa.com
# written against Live 9.6 release on 021516
from __future__ import absolute_import, print_function
import Live
import math
import sys
from re import *
from itertools import imap, chain, starmap
from ableton.v2.base import inject, listens, listens_group
from ableton.v2.control_surface import ControlSurface, ControlElement, Layer, Skin, PrioritizedResource, Component, ClipCreator, DeviceBankRegistry
from ableton.v2.control_surface.elements import ComboElement, ButtonMatrixElement, DoublePressElement, MultiElement, DisplayDataSource, SysexElement
from ableton.v2.control_surface.components import M4LInterfaceComponent, ClipSlotComponent, SceneComponent, SessionComponent, TransportComponent, BackgroundComponent, ViewControlComponent, SessionRingComponent, SessionRecordingComponent, SessionNavigationComponent, MixerComponent, PlayableComponent
from ableton.v2.control_surface.components.mixer import simple_track_assigner
from ableton.v2.control_surface.control import control_color
from ableton.v2.control_surface.mode import AddLayerMode, ModesComponent, DelayMode
from ableton.v2.control_surface.elements.physical_display import PhysicalDisplayElement
from ableton.v2.control_surface.components.session_recording import *
from ableton.v2.control_surface.control import PlayableControl, ButtonControl, control_matrix
from aumhaa.v2.base import initialize_debug
from aumhaa.v2.control_surface import SendLividSysexMode, MomentaryBehaviour, ExcludingMomentaryBehaviour, DelayedExcludingMomentaryBehaviour, ShiftedBehaviour, LatchingShiftedBehaviour, FlashingBehaviour
from aumhaa.v2.control_surface.mod_devices import *
from aumhaa.v2.control_surface.mod import *
from aumhaa.v2.control_surface.elements import MonoEncoderElement, MonoBridgeElement, generate_strip_string
from aumhaa.v2.control_surface.elements.mono_button import *
from aumhaa.v2.control_surface.components import MonoDeviceComponent, DeviceNavigator, TranslationComponent, MonoMixerComponent
from aumhaa.v2.control_surface.components.device import DeviceComponent
from aumhaa.v2.control_surface.components.mono_instrument import *
from aumhaa.v2.livid import LividControlSurface, LividSettings, LividRGB
from aumhaa.v2.control_surface.components.fixed_length_recorder import FixedLengthSessionRecordingComponent
from aumhaa.v2.control_surface.components.device import DeviceComponent
from .Map import *
ENCODER_SPEED = [0, 0, 1, 0, 2, 0, 3, 0, 4, 0, 5, 0, 6, 0, 7, 0, 8, 0, 9, 0, 10, 0, 11, 0, 12, 0, 13, 0, 14, 0, 15, 0, 16, 0, 17, 0, 18, 0, 19, 0, 20, 0, 21, 0, 22, 0, 23, 0, 24, 0, 127, 1, 26, 0, 127, 1, 127, 1]
MIDI_NOTE_TYPE = 0
MIDI_CC_TYPE = 1
MIDI_PB_TYPE = 2
MIDI_MSG_TYPES = (MIDI_NOTE_TYPE, MIDI_CC_TYPE, MIDI_PB_TYPE)
MIDI_NOTE_ON_STATUS = 144
MIDI_NOTE_OFF_STATUS = 128
MIDI_CC_STATUS = 176
MIDI_PB_STATUS = 224
def is_device(device):
return (not device is None and isinstance(device, Live.Device.Device) and hasattr(device, 'name'))
def make_pad_translations(chan):
return tuple((x%4, int(x/4), x+16, chan) for x in range(16))
def return_empty():
return []
debug = initialize_debug()
class DS1SessionComponent(SessionComponent):
def set_scene_launch_buttons(self, buttons):
assert(not buttons or buttons.width() == self._session_ring.num_scenes and buttons.height() == 1)
if buttons:
for button, (x, _) in buttons.iterbuttons():
scene = self.scene(x)
debug('setting scene launch for button:', button, 'scene:', scene)
scene.set_launch_button(button)
else:
for x in xrange(self._session_ring.num_scenes):
scene = self.scene(x)
scene.set_launch_button(None)
class DS1SessionNavigationComponent(SessionNavigationComponent):
def set_track_select_dial(self, dial):
self._on_track_select_dial_value.subject = dial
@listens('value')
def _on_track_select_dial_value(self, value):
self._can_bank_left() and self._bank_left() if value == 127 else self._can_bank_right() and self._bank_right()
class DS1TransportComponent(TransportComponent):
def _update_stop_button_color(self):
self._stop_button.color = 'Transport.StopOn' if self._play_toggle.is_toggled else 'Transport.StopOff'
class DS1(LividControlSurface):
_sysex_id = 16
_model_name = 'DS1'
def __init__(self, c_instance):
super(DS1, self).__init__(c_instance)
self._skin = Skin(DS1Colors)
with self.component_guard():
self._define_sysex()
self._setup_controls()
self._setup_background()
self._setup_m4l_interface()
self._setup_session_control()
self._setup_mixer_control()
self._setup_transport_control()
self._setup_device_control()
self._setup_session_recording_component()
self._setup_main_modes()
def _initialize_script(self):
super(DS1, self)._initialize_script()
self._main_modes.set_enabled(True)
self._main_modes.selected_mode = 'Main'
def _initialize_hardware(self):
super(DS1, self)._initialize_hardware()
self.local_control_off.enter_mode()
self.encoder_absolute_mode.enter_mode()
self.encoder_speed_sysex.enter_mode()
def _define_sysex(self):
self.encoder_speed_sysex = SendLividSysexMode(livid_settings = self._livid_settings, call = 'set_encoder_mapping', message = ENCODER_SPEED)
self.encoder_absolute_mode = SendLividSysexMode(livid_settings = self._livid_settings, call = 'set_encoder_encosion_mode', message = [2])
self.local_control_off = SendLividSysexMode(livid_settings = self._livid_settings, call = 'set_local_control', message = [0])
def _setup_controls(self):
is_momentary = True
optimized = True
resource = PrioritizedResource
self._fader = [MonoEncoderElement(mapping_feedback_delay = -1, msg_type = MIDI_CC_TYPE, channel = CHANNEL, identifier = DS1_FADERS[index], name = 'Fader_' + str(index), num = index, script = self, optimized_send_midi = optimized, resource_type = resource) for index in range(8)]
self._dial = [[MonoEncoderElement(mapping_feedback_delay = -1, msg_type = MIDI_CC_TYPE, channel = CHANNEL, identifier = DS1_DIALS[x][y], name = 'Dial_' + str(x) + '_' + str(y), num = x + (y*5), script = self, optimized_send_midi = optimized, resource_type = resource) for x in range(8)] for y in range(5)]
self._side_dial = [MonoEncoderElement(mapping_feedback_delay = -1, msg_type = MIDI_CC_TYPE, channel = CHANNEL, identifier = DS1_SIDE_DIALS[x], name = 'Side_Dial_' + str(x), num = x, script = self, optimized_send_midi = optimized, resource_type = resource) for x in range(4)]
self._encoder = [MonoEncoderElement(mapping_feedback_delay = -1, msg_type = MIDI_CC_TYPE, channel = CHANNEL, identifier = DS1_ENCODERS[x], name = 'Encoder_' + str(x), num = x, script = self, optimized_send_midi = optimized, resource_type = resource) for x in range(4)]
self._encoder_button = [MonoButtonElement(is_momentary = is_momentary, msg_type = MIDI_NOTE_TYPE, channel = CHANNEL, identifier = DS1_ENCODER_BUTTONS[index], name = 'EncoderButton_' + str(index), script = self, skin = self._skin, optimized_send_midi = optimized, resource_type = resource) for index in range(4)]
self._master_fader = MonoEncoderElement(msg_type = MIDI_CC_TYPE, channel = CHANNEL, identifier = DS1_MASTER, name = 'MasterFader', num = 0, script = self, optimized_send_midi = optimized, resource_type = resource)
self._button = [MonoButtonElement(is_momentary = is_momentary, msg_type = MIDI_NOTE_TYPE, channel = CHANNEL, identifier = DS1_BUTTONS[index], name = 'Button_' + str(index), script = self, skin = self._skin, optimized_send_midi = optimized, resource_type = resource) for index in range(16)]
self._grid = [[MonoButtonElement(is_momentary = is_momentary, msg_type = MIDI_NOTE_TYPE, channel = CHANNEL, identifier = DS1_GRID[x][y], name = 'Button_' + str(x) + '_' + str(y), script = self, skin = self._skin, optimized_send_midi = optimized, resource_type = resource) for x in range(3)] for y in range(3)]
self._dummy = [MonoEncoderElement(msg_type = MIDI_CC_TYPE, channel = CHANNEL, identifier = 120+x, name = 'Dummy_Dial_' + str(x), num = x, script = self, optimized_send_midi = optimized, resource_type = resource) for x in range(5)]
self._fader_matrix = ButtonMatrixElement(name = 'FaderMatrix', rows = [self._fader])
self._top_buttons = ButtonMatrixElement(name = 'TopButtonMatrix', rows = [self._button[:8]])
self._bottom_buttons = ButtonMatrixElement(name = 'BottomButtonMatrix', rows = [self._button[8:]])
self._dial_matrix = ButtonMatrixElement(name = 'DialMatrix', rows = self._dial)
self._side_dial_matrix = ButtonMatrixElement(name = 'SideDialMatrix', rows = [self._side_dial])
self._encoder_matrix = ButtonMatrixElement(name = 'EncoderMatrix', rows = [self._encoder])
self._encoder_button_matrix = ButtonMatrixElement(name = 'EncoderButtonMatrix', rows = [self._encoder_button])
self._grid_matrix = ButtonMatrixElement(name = 'GridMatrix', rows = self._grid)
self._selected_parameter_controls = ButtonMatrixElement(name = 'SelectedParameterControls', rows = [self._dummy + self._encoder[:1] + self._encoder[2:]])
def _setup_background(self):
self._background = BackgroundComponent(name = 'Background')
self._background.layer = Layer(priority = 0, fader_matrix = self._fader_matrix,
top_buttons = self._top_buttons,
bottom_buttons = self._bottom_buttons,
dial_matrix = self._dial_matrix,
side_dial_matrix = self._side_dial_matrix,
encoder_button_matrix = self._encoder_button_matrix,
grid_matrix = self._grid_matrix)
self._background.set_enabled(True)
def _setup_autoarm(self):
self._auto_arm = AutoArmComponent(name='Auto_Arm')
self._auto_arm.can_auto_arm_track = self._can_auto_arm_track
def _tracks_to_use(self):
return self.song.visible_tracks + self.song.return_tracks
def _setup_session_control(self):
self._session_ring = SessionRingComponent(num_tracks = 8, num_scenes = 1, tracks_to_use = self._tracks_to_use)
self._session_ring.set_enabled(True)
self._session_navigation = DS1SessionNavigationComponent(name = 'SessionNavigation', session_ring = self._session_ring)
self._session_navigation._vertical_banking.scroll_up_button.color = 'Session.NavigationButtonOn'
self._session_navigation._vertical_banking.scroll_down_button.color = 'Session.NavigationButtonOn'
self._session_navigation._horizontal_banking.scroll_up_button.color = 'Session.NavigationButtonOn'
self._session_navigation._horizontal_banking.scroll_down_button.color = 'Session.NavigationButtonOn'
self._session_navigation.layer = Layer(priority = 4, track_select_dial = ComboElement(control = self._encoder[1], modifier = [self._encoder_button[1]] ), up_button = self._grid[0][1], down_button = self._grid[0][2])
self._session_navigation.set_enabled(False)
self._session = DS1SessionComponent(session_ring = self._session_ring, auto_name = True)
hasattr(self._session, '_enable_skinning') and self._session._enable_skinning()
self._session.layer = Layer(priority = 4, scene_launch_buttons = self._grid_matrix.submatrix[1:2, 1:2])
self._session.clips_layer = AddLayerMode(self._session, Layer(priority = 4, clip_launch_buttons = self._top_buttons, stop_track_clip_buttons = self._bottom_buttons))
self._session.set_enabled(False)
def _setup_mixer_control(self):
self._mixer = MonoMixerComponent(name = 'Mixer', num_returns = 2, tracks_provider = self._session_ring, track_assigner = simple_track_assigner, invert_mute_feedback = True, auto_name = True, enable_skinning = True)
self._mixer.master_strip().set_volume_control(self._master_fader)
self._mixer.set_prehear_volume_control(self._side_dial[3])
self._mixer.layer = Layer(volume_controls = self._fader_matrix, track_select_dial = self._encoder[1])
self._strip = [self._mixer.channel_strip(index) for index in range(8)]
for index in range(8):
self._strip[index].layer = Layer(priority = 4, parameter_controls = self._dial_matrix.submatrix[index:index+1, :])
self._mixer.selected_strip().layer = Layer(priority = 4, parameter_controls = self._selected_parameter_controls)
self._mixer.master_strip().layer = Layer(priority = 4, parameter_controls = self._side_dial_matrix.submatrix[:3, :])
self._mixer.main_layer = AddLayerMode(self._mixer, Layer(priority = 4, solo_buttons = self._bottom_buttons, mute_buttons = self._top_buttons))
self._mixer.select_layer = AddLayerMode(self._mixer, Layer(priority = 4, arm_buttons = self._bottom_buttons, track_select_buttons = self._top_buttons))
self.song.view.selected_track = self._mixer.channel_strip(0)._track
self._mixer.set_enabled(False)
def _setup_transport_control(self):
self._transport = DS1TransportComponent()
self._transport.name = 'Transport'
self._transport._record_toggle.view_transform = lambda value: 'Transport.RecordOn' if value else 'Transport.RecordOff'
self._transport.layer = Layer(priority = 4, stop_button = self._grid[1][0], play_button = self._grid[0][0], record_button = self._grid[2][0])
self._transport.set_enabled(True)
def _setup_device_control(self):
self._device = DeviceComponent(name = 'Device_Component', device_provider = self._device_provider, device_bank_registry = DeviceBankRegistry())
self._device_navigator = DeviceNavigator(self._device_provider, self._mixer, self)
self._device_navigator.name = 'Device_Navigator'
def _setup_session_recording_component(self):
self._clip_creator = ClipCreator()
self._clip_creator.name = 'ClipCreator'
self._recorder = SessionRecordingComponent(self._clip_creator, ViewControlComponent())
self._recorder.set_enabled(True)
self._recorder.layer = Layer(priority = 4, automation_button = self._grid[1][2], record_button = self._grid[2][1],)
def _setup_m4l_interface(self):
self._m4l_interface = M4LInterfaceComponent(controls=self.controls, component_guard=self.component_guard, priority = 10)
self._m4l_interface.name = "M4LInterface"
self.get_control_names = self._m4l_interface.get_control_names
self.get_control = self._m4l_interface.get_control
self.grab_control = self._m4l_interface.grab_control
self.release_control = self._m4l_interface.release_control
def _setup_translations(self):
controls = []
for control in self.controls:
controls.append(control)
self._translations = TranslationComponent(controls, 10)
self._translations.name = 'TranslationComponent'
self._translations.set_enabled(False)
def _setup_main_modes(self):
self._main_modes = ModesComponent(name = 'MainModes')
self._main_modes.add_mode('Main', [self._mixer, self._mixer.main_layer, self._session, self._session_navigation], cycle_mode_button_color = 'ModeButtons.Main')
self._main_modes.add_mode('Select', [self._mixer, self._mixer.select_layer, self._session, self._session_navigation], cycle_mode_button_color = 'ModeButtons.Select')
self._main_modes.add_mode('Clips', [self._mixer, self._session, self._session.clips_layer, self._session_navigation], cycle_mode_button_color = 'ModeButtons.Clips')
self._main_modes.layer = Layer(priority = 4, cycle_mode_button = self._grid[2][2])
self._main_modes.selected_mode = 'Main'
self._main_modes.set_enabled(False)
def _can_auto_arm_track(self, track):
routing = track.current_input_routing
return routing == 'Ext: All Ins' or routing == 'All Ins' or routing.startswith('DS1 Input')
#self._main_modes.selected_mode in ['Sends', 'Device'] and
# a | [
"[email protected]"
]
| |
89855498cc5ffedc6599b095d035b074719742e2 | 0bed1250a4805866f871d037c1bce3e5c8757101 | /MATH PROB/factorsum.py | 49ad0da6391d712e6d693b28e7c0123975692580 | []
| no_license | Srinjana/CC_practice | 13018f5fd09f8a058e7b634a8626668a0058929a | 01793556c1c73e6c4196a0444e8840b5a0e2ab24 | refs/heads/main | 2023-08-02T05:42:49.016104 | 2021-09-20T15:39:24 | 2021-09-20T15:39:24 | 358,312,714 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 578 | py | # for a given number from a list of numbers find the factors and add the factors . If the sum of factors is present in the original list, sort the factors in acsending order
# and print them. If sum not in the original list, print -1.
# Author @Srinjana
def findFactorSum(n):
factor = [1]
for i in range (2 ,n+1):
if i%n ==0:
factor.append(i)
return sum(factor)
inplist = list(map(int, input().strip().split(",")))
flag = 0
for i in inplist:
if findFactorSum(i) in inplist:
flag = 1
print(i)
if flag==0:
print(-1)
| [
"[email protected]"
]
| |
38aef9c63c4f0620b06efcb7670a3ffe1b1044a4 | a8b0266fabd86ff4c1bc86d99a7b91856634f0ba | /cherrypy/test/test_conn.py | 98775a6259afed0d7c52cf36508f5015f2e42776 | []
| no_license | c3pb/wallhackctl | 5a704bc66a035898ed7d490ad6596257fffdc1e8 | 86e9ce09b32149566e50d7d1a880e6a7a86e4616 | refs/heads/master | 2016-09-06T14:57:31.967997 | 2011-02-16T18:54:36 | 2011-02-16T18:54:36 | 1,375,028 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 25,607 | py | """Tests for TCP connection handling, including proper and timely close."""
from cherrypy.test import test
test.prefer_parent_path()
from httplib import HTTPConnection, HTTPSConnection, NotConnected, BadStatusLine
import urllib
import socket
import sys
import time
timeout = 1
import cherrypy
from cherrypy.test import webtest
from cherrypy import _cperror
pov = 'pPeErRsSiIsStTeEnNcCeE oOfF vViIsSiIoOnN'
def setup_server():
def raise500():
raise cherrypy.HTTPError(500)
class Root:
def index(self):
return pov
index.exposed = True
page1 = index
page2 = index
page3 = index
def hello(self):
return "Hello, world!"
hello.exposed = True
def timeout(self, t):
return str(cherrypy.server.httpserver.timeout)
timeout.exposed = True
def stream(self, set_cl=False):
if set_cl:
cherrypy.response.headers['Content-Length'] = 10
def content():
for x in range(10):
yield str(x)
return content()
stream.exposed = True
stream._cp_config = {'response.stream': True}
def error(self, code=500):
raise cherrypy.HTTPError(code)
error.exposed = True
def upload(self):
if not cherrypy.request.method == 'POST':
raise AssertionError("'POST' != request.method %r" %
cherrypy.request.method)
return "thanks for '%s'" % cherrypy.request.body.read()
upload.exposed = True
def custom(self, response_code):
cherrypy.response.status = response_code
return "Code = %s" % response_code
custom.exposed = True
def err_before_read(self):
return "ok"
err_before_read.exposed = True
err_before_read._cp_config = {'hooks.on_start_resource': raise500}
def one_megabyte_of_a(self):
return ["a" * 1024] * 1024
one_megabyte_of_a.exposed = True
cherrypy.tree.mount(Root())
cherrypy.config.update({
'server.max_request_body_size': 1001,
'server.socket_timeout': timeout,
})
from cherrypy.test import helper
class ConnectionCloseTests(helper.CPWebCase):
def test_HTTP11(self):
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
self.PROTOCOL = "HTTP/1.1"
self.persistent = True
# Make the first request and assert there's no "Connection: close".
self.getPage("/")
self.assertStatus('200 OK')
self.assertBody(pov)
self.assertNoHeader("Connection")
# Make another request on the same connection.
self.getPage("/page1")
self.assertStatus('200 OK')
self.assertBody(pov)
self.assertNoHeader("Connection")
# Test client-side close.
self.getPage("/page2", headers=[("Connection", "close")])
self.assertStatus('200 OK')
self.assertBody(pov)
self.assertHeader("Connection", "close")
# Make another request on the same connection, which should error.
self.assertRaises(NotConnected, self.getPage, "/")
def test_Streaming_no_len(self):
self._streaming(set_cl=False)
def test_Streaming_with_len(self):
self._streaming(set_cl=True)
def _streaming(self, set_cl):
if cherrypy.server.protocol_version == "HTTP/1.1":
self.PROTOCOL = "HTTP/1.1"
self.persistent = True
# Make the first request and assert there's no "Connection: close".
self.getPage("/")
self.assertStatus('200 OK')
self.assertBody(pov)
self.assertNoHeader("Connection")
# Make another, streamed request on the same connection.
if set_cl:
# When a Content-Length is provided, the content should stream
# without closing the connection.
self.getPage("/stream?set_cl=Yes")
self.assertHeader("Content-Length")
self.assertNoHeader("Connection", "close")
self.assertNoHeader("Transfer-Encoding")
self.assertStatus('200 OK')
self.assertBody('0123456789')
else:
# When no Content-Length response header is provided,
# streamed output will either close the connection, or use
# chunked encoding, to determine transfer-length.
self.getPage("/stream")
self.assertNoHeader("Content-Length")
self.assertStatus('200 OK')
self.assertBody('0123456789')
chunked_response = False
for k, v in self.headers:
if k.lower() == "transfer-encoding":
if str(v) == "chunked":
chunked_response = True
if chunked_response:
self.assertNoHeader("Connection", "close")
else:
self.assertHeader("Connection", "close")
# Make another request on the same connection, which should error.
self.assertRaises(NotConnected, self.getPage, "/")
# Try HEAD. See http://www.cherrypy.org/ticket/864.
self.getPage("/stream", method='HEAD')
self.assertStatus('200 OK')
self.assertBody('')
self.assertNoHeader("Transfer-Encoding")
else:
self.PROTOCOL = "HTTP/1.0"
self.persistent = True
# Make the first request and assert Keep-Alive.
self.getPage("/", headers=[("Connection", "Keep-Alive")])
self.assertStatus('200 OK')
self.assertBody(pov)
self.assertHeader("Connection", "Keep-Alive")
# Make another, streamed request on the same connection.
if set_cl:
# When a Content-Length is provided, the content should
# stream without closing the connection.
self.getPage("/stream?set_cl=Yes",
headers=[("Connection", "Keep-Alive")])
self.assertHeader("Content-Length")
self.assertHeader("Connection", "Keep-Alive")
self.assertNoHeader("Transfer-Encoding")
self.assertStatus('200 OK')
self.assertBody('0123456789')
else:
# When a Content-Length is not provided,
# the server should close the connection.
self.getPage("/stream", headers=[("Connection", "Keep-Alive")])
self.assertStatus('200 OK')
self.assertBody('0123456789')
self.assertNoHeader("Content-Length")
self.assertNoHeader("Connection", "Keep-Alive")
self.assertNoHeader("Transfer-Encoding")
# Make another request on the same connection, which should error.
self.assertRaises(NotConnected, self.getPage, "/")
def test_HTTP10_KeepAlive(self):
self.PROTOCOL = "HTTP/1.0"
if self.scheme == "https":
self.HTTP_CONN = HTTPSConnection
else:
self.HTTP_CONN = HTTPConnection
# Test a normal HTTP/1.0 request.
self.getPage("/page2")
self.assertStatus('200 OK')
self.assertBody(pov)
# Apache, for example, may emit a Connection header even for HTTP/1.0
## self.assertNoHeader("Connection")
# Test a keep-alive HTTP/1.0 request.
self.persistent = True
self.getPage("/page3", headers=[("Connection", "Keep-Alive")])
self.assertStatus('200 OK')
self.assertBody(pov)
self.assertHeader("Connection", "Keep-Alive")
# Remove the keep-alive header again.
self.getPage("/page3")
self.assertStatus('200 OK')
self.assertBody(pov)
# Apache, for example, may emit a Connection header even for HTTP/1.0
## self.assertNoHeader("Connection")
class PipelineTests(helper.CPWebCase):
def test_HTTP11_Timeout(self):
# If we timeout without sending any data,
# the server will close the conn with a 408.
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
self.PROTOCOL = "HTTP/1.1"
# Connect but send nothing.
self.persistent = True
conn = self.HTTP_CONN
conn.auto_open = False
conn.connect()
# Wait for our socket timeout
time.sleep(timeout * 2)
# The request should have returned 408 already.
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.assertEqual(response.status, 408)
conn.close()
# Connect but send half the headers only.
self.persistent = True
conn = self.HTTP_CONN
conn.auto_open = False
conn.connect()
conn.send('GET /hello HTTP/1.1')
conn.send(("Host: %s" % self.HOST).encode('ascii'))
# Wait for our socket timeout
time.sleep(timeout * 2)
# The conn should have already sent 408.
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.assertEqual(response.status, 408)
conn.close()
def test_HTTP11_Timeout_after_request(self):
# If we timeout after at least one request has succeeded,
# the server will close the conn without 408.
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
self.PROTOCOL = "HTTP/1.1"
# Make an initial request
self.persistent = True
conn = self.HTTP_CONN
conn.putrequest("GET", "/timeout?t=%s" % timeout, skip_host=True)
conn.putheader("Host", self.HOST)
conn.endheaders()
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.assertEqual(response.status, 200)
self.body = response.read()
self.assertBody(str(timeout))
# Make a second request on the same socket
conn._output('GET /hello HTTP/1.1')
conn._output("Host: %s" % self.HOST)
conn._send_output()
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.assertEqual(response.status, 200)
self.body = response.read()
self.assertBody("Hello, world!")
# Wait for our socket timeout
time.sleep(timeout * 2)
# Make another request on the same socket, which should error
conn._output('GET /hello HTTP/1.1')
conn._output("Host: %s" % self.HOST)
conn._send_output()
response = conn.response_class(conn.sock, method="GET")
try:
response.begin()
except:
if not isinstance(sys.exc_info()[1],
(socket.error, BadStatusLine)):
self.fail("Writing to timed out socket didn't fail"
" as it should have: %s" % sys.exc_info()[1])
else:
if response.status != 408:
self.fail("Writing to timed out socket didn't fail"
" as it should have: %s" %
response.read())
conn.close()
# Make another request on a new socket, which should work
self.persistent = True
conn = self.HTTP_CONN
conn.putrequest("GET", "/", skip_host=True)
conn.putheader("Host", self.HOST)
conn.endheaders()
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.assertEqual(response.status, 200)
self.body = response.read()
self.assertBody(pov)
# Make another request on the same socket,
# but timeout on the headers
conn.send('GET /hello HTTP/1.1')
# Wait for our socket timeout
time.sleep(timeout * 2)
response = conn.response_class(conn.sock, method="GET")
try:
response.begin()
except:
if not isinstance(sys.exc_info()[1],
(socket.error, BadStatusLine)):
self.fail("Writing to timed out socket didn't fail"
" as it should have: %s" % sys.exc_info()[1])
else:
self.fail("Writing to timed out socket didn't fail"
" as it should have: %s" %
response.read())
conn.close()
# Retry the request on a new connection, which should work
self.persistent = True
conn = self.HTTP_CONN
conn.putrequest("GET", "/", skip_host=True)
conn.putheader("Host", self.HOST)
conn.endheaders()
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.assertEqual(response.status, 200)
self.body = response.read()
self.assertBody(pov)
conn.close()
def test_HTTP11_pipelining(self):
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
self.PROTOCOL = "HTTP/1.1"
# Test pipelining. httplib doesn't support this directly.
self.persistent = True
conn = self.HTTP_CONN
# Put request 1
conn.putrequest("GET", "/hello", skip_host=True)
conn.putheader("Host", self.HOST)
conn.endheaders()
for trial in range(5):
# Put next request
conn._output('GET /hello HTTP/1.1')
conn._output("Host: %s" % self.HOST)
conn._send_output()
# Retrieve previous response
response = conn.response_class(conn.sock, method="GET")
response.begin()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, "Hello, world!")
# Retrieve final response
response = conn.response_class(conn.sock, method="GET")
response.begin()
body = response.read()
self.assertEqual(response.status, 200)
self.assertEqual(body, "Hello, world!")
conn.close()
def test_100_Continue(self):
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
self.PROTOCOL = "HTTP/1.1"
self.persistent = True
conn = self.HTTP_CONN
# Try a page without an Expect request header first.
# Note that httplib's response.begin automatically ignores
# 100 Continue responses, so we must manually check for it.
conn.putrequest("POST", "/upload", skip_host=True)
conn.putheader("Host", self.HOST)
conn.putheader("Content-Type", "text/plain")
conn.putheader("Content-Length", "4")
conn.endheaders()
conn.send("d'oh")
response = conn.response_class(conn.sock, method="POST")
version, status, reason = response._read_status()
self.assertNotEqual(status, 100)
conn.close()
# Now try a page with an Expect header...
conn.connect()
conn.putrequest("POST", "/upload", skip_host=True)
conn.putheader("Host", self.HOST)
conn.putheader("Content-Type", "text/plain")
conn.putheader("Content-Length", "17")
conn.putheader("Expect", "100-continue")
conn.endheaders()
response = conn.response_class(conn.sock, method="POST")
# ...assert and then skip the 100 response
version, status, reason = response._read_status()
self.assertEqual(status, 100)
while True:
line = response.fp.readline().strip()
if line:
self.fail("100 Continue should not output any headers. Got %r" % line)
else:
break
# ...send the body
conn.send("I am a small file")
# ...get the final response
response.begin()
self.status, self.headers, self.body = webtest.shb(response)
self.assertStatus(200)
self.assertBody("thanks for 'I am a small file'")
conn.close()
class ConnectionTests(helper.CPWebCase):
def test_readall_or_close(self):
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
self.PROTOCOL = "HTTP/1.1"
if self.scheme == "https":
self.HTTP_CONN = HTTPSConnection
else:
self.HTTP_CONN = HTTPConnection
# Test a max of 0 (the default) and then reset to what it was above.
old_max = cherrypy.server.max_request_body_size
for new_max in (0, old_max):
cherrypy.server.max_request_body_size = new_max
self.persistent = True
conn = self.HTTP_CONN
# Get a POST page with an error
conn.putrequest("POST", "/err_before_read", skip_host=True)
conn.putheader("Host", self.HOST)
conn.putheader("Content-Type", "text/plain")
conn.putheader("Content-Length", "1000")
conn.putheader("Expect", "100-continue")
conn.endheaders()
response = conn.response_class(conn.sock, method="POST")
# ...assert and then skip the 100 response
version, status, reason = response._read_status()
self.assertEqual(status, 100)
while True:
skip = response.fp.readline().strip()
if not skip:
break
# ...send the body
conn.send("x" * 1000)
# ...get the final response
response.begin()
self.status, self.headers, self.body = webtest.shb(response)
self.assertStatus(500)
# Now try a working page with an Expect header...
conn._output('POST /upload HTTP/1.1')
conn._output("Host: %s" % self.HOST)
conn._output("Content-Type: text/plain")
conn._output("Content-Length: 17")
conn._output("Expect: 100-continue")
conn._send_output()
response = conn.response_class(conn.sock, method="POST")
# ...assert and then skip the 100 response
version, status, reason = response._read_status()
self.assertEqual(status, 100)
while True:
skip = response.fp.readline().strip()
if not skip:
break
# ...send the body
conn.send("I am a small file")
# ...get the final response
response.begin()
self.status, self.headers, self.body = webtest.shb(response)
self.assertStatus(200)
self.assertBody("thanks for 'I am a small file'")
conn.close()
def test_No_Message_Body(self):
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
self.PROTOCOL = "HTTP/1.1"
# Set our HTTP_CONN to an instance so it persists between requests.
self.persistent = True
# Make the first request and assert there's no "Connection: close".
self.getPage("/")
self.assertStatus('200 OK')
self.assertBody(pov)
self.assertNoHeader("Connection")
# Make a 204 request on the same connection.
self.getPage("/custom/204")
self.assertStatus(204)
self.assertNoHeader("Content-Length")
self.assertBody("")
self.assertNoHeader("Connection")
# Make a 304 request on the same connection.
self.getPage("/custom/304")
self.assertStatus(304)
self.assertNoHeader("Content-Length")
self.assertBody("")
self.assertNoHeader("Connection")
def test_Chunked_Encoding(self):
if cherrypy.server.protocol_version != "HTTP/1.1":
return self.skip()
if (hasattr(self, 'harness') and
"modpython" in self.harness.__class__.__name__.lower()):
# mod_python forbids chunked encoding
return self.skip()
self.PROTOCOL = "HTTP/1.1"
# Set our HTTP_CONN to an instance so it persists between requests.
self.persistent = True
conn = self.HTTP_CONN
# Try a normal chunked request (with extensions)
body = ("8;key=value\r\nxx\r\nxxxx\r\n5\r\nyyyyy\r\n0\r\n"
"Content-Type: application/json\r\n"
"\r\n")
conn.putrequest("POST", "/upload", skip_host=True)
conn.putheader("Host", self.HOST)
conn.putheader("Transfer-Encoding", "chunked")
conn.putheader("Trailer", "Content-Type")
# Note that this is somewhat malformed:
# we shouldn't be sending Content-Length.
# RFC 2616 says the server should ignore it.
conn.putheader("Content-Length", "3")
conn.endheaders()
conn.send(body)
response = conn.getresponse()
self.status, self.headers, self.body = webtest.shb(response)
self.assertStatus('200 OK')
self.assertBody("thanks for 'xx\r\nxxxxyyyyy'")
# Try a chunked request that exceeds server.max_request_body_size.
# Note that the delimiters and trailer are included.
body = "3e3\r\n" + ("x" * 995) + "\r\n0\r\n\r\n"
conn.putrequest("POST", "/upload", skip_host=True)
conn.putheader("Host", self.HOST)
conn.putheader("Transfer-Encoding", "chunked")
conn.putheader("Content-Type", "text/plain")
# Chunked requests don't need a content-length
## conn.putheader("Content-Length", len(body))
conn.endheaders()
conn.send(body)
response = conn.getresponse()
self.status, self.headers, self.body = webtest.shb(response)
self.assertStatus(413)
conn.close()
def test_Content_Length(self):
# Try a non-chunked request where Content-Length exceeds
# server.max_request_body_size. Assert error before body send.
self.persistent = True
conn = self.HTTP_CONN
conn.putrequest("POST", "/upload", skip_host=True)
conn.putheader("Host", self.HOST)
conn.putheader("Content-Type", "text/plain")
conn.putheader("Content-Length", "9999")
conn.endheaders()
response = conn.getresponse()
self.status, self.headers, self.body = webtest.shb(response)
self.assertStatus(413)
self.assertBody("")
conn.close()
def test_598(self):
remote_data_conn = urllib.urlopen('%s://%s:%s/one_megabyte_of_a/' %
(self.scheme, self.HOST, self.PORT,))
buf = remote_data_conn.read(512)
time.sleep(timeout * 0.6)
remaining = (1024 * 1024) - 512
while remaining:
data = remote_data_conn.read(remaining)
if not data:
break
else:
buf += data
remaining -= len(data)
self.assertEqual(len(buf), 1024 * 1024)
self.assertEqual(buf, "a" * 1024 * 1024)
self.assertEqual(remaining, 0)
remote_data_conn.close()
class BadRequestTests(helper.CPWebCase):
def test_No_CRLF(self):
self.persistent = True
conn = self.HTTP_CONN
conn.send('GET /hello HTTP/1.1\n\n')
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.body = response.read()
self.assertBody("HTTP requires CRLF terminators")
conn.close()
conn.connect()
conn.send('GET /hello HTTP/1.1\r\n\n')
response = conn.response_class(conn.sock, method="GET")
response.begin()
self.body = response.read()
self.assertBody("HTTP requires CRLF terminators")
conn.close()
if __name__ == "__main__":
helper.testmain()
| [
"[email protected]"
]
| |
4c2e0128f87a1e1cd437f60867570b90acb4259e | 714a22e87e5ae6a2b670a10437409100015f171b | /meshzoo/__init__.py | 2e6201faacd3e0de9e0015493737a24f245fd3a2 | [
"MIT"
]
| permissive | krober10nd/meshzoo | ce3aa71a8a87a0749df78c6939e7d893a05f91d1 | 5e8b04d81ee5c23887e3d0244273b3d90b2eba9a | refs/heads/master | 2021-02-17T00:04:36.319498 | 2020-02-24T15:52:48 | 2020-02-24T15:52:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 811 | py | from meshzoo.__about__ import __author__, __author_email__, __version__, __website__
from .cube import cube
from .helpers import create_edges, plot2d, show2d
from .hexagon import hexagon
from .moebius import moebius
from .rectangle import rectangle
from .simple_arrow import simple_arrow
from .simple_shell import simple_shell
from .sphere import icosa_sphere, octa_sphere, tetra_sphere, uv_sphere
from .triangle import triangle
from .tube import tube
__all__ = [
"__version__",
"__author__",
"__author_email__",
"__website__",
#
"cube",
"hexagon",
"moebius",
"rectangle",
"simple_arrow",
"simple_shell",
"uv_sphere",
"icosa_sphere",
"octa_sphere",
"tetra_sphere",
"triangle",
"tube",
#
"show2d",
"plot2d",
"create_edges",
]
| [
"[email protected]"
]
| |
a292d226c79e5613f782f0ea465e9a03c06b0e6d | de725b742e69f38318c04cd44ac970e7135857a5 | /assets/forms.py | 0173d7e2fd8182e88243ee75191332c9c8f1868c | []
| no_license | haochenxiao666/itelftool | e5c0811b48e01d0eeff13d15d33b89960091960a | 8558dce6d97e7443c95513aa1389910c3902043f | refs/heads/master | 2020-04-14T22:55:46.732111 | 2018-10-18T09:00:44 | 2018-10-18T09:00:44 | 164,183,750 | 1 | 0 | null | 2019-01-05T05:05:32 | 2019-01-05T05:05:31 | null | UTF-8 | Python | false | false | 4,458 | py | #! /usr/bin/env python
# -*- coding: utf-8 -*-
from django import forms
from django.forms.widgets import *
from .models import Asset, IDC, HostGroup, Cabinet
'''
class AssetForm(forms.ModelForm):
class Meta:
model = Asset
exclude = ("id",)
widgets = {
'hostname': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;', 'placeholder': u'必填项'}),
'ip': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;', 'placeholder': u'必填项'}),
'other_ip': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'group': Select(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'asset_no': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'asset_type': Select(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'status': Select(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'os': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'vendor': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'up_time': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'cpu_model': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'cpu_num': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'memory': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'disk': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'sn': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'idc': Select(attrs={'class': 'form-control', 'style': 'width:530px;'}),
'position': TextInput(attrs={'class': 'form-control', 'style': 'width:530px;', 'placeholder': u'物理机写位置,虚机写宿主'}),
'memo': Textarea(attrs={'rows': 4, 'cols': 15, 'class': 'form-control', 'style': 'width:530px;'}),
}
'''
class IdcForm(forms.ModelForm):
# def clean(self):
# cleaned_data = super(IdcForm, self).clean()
# value = cleaned_data.get('ids')
# try:
# Idc.objects.get(name=value)
# self._errors['ids'] = self.error_class(["%s的信息已经存在" % value])
# except Idc.DoesNotExist:
# pass
# return cleaned_data
class Meta:
model = IDC
exclude = ("id",)
widgets = {
'ids': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'name': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'address': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'tel': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'contact': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'contact_phone': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'ip_range': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'jigui': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
'bandwidth': TextInput(attrs={'class': 'form-control','style': 'width:450px;'}),
}
class GroupForm(forms.ModelForm):
def clean(self):
cleaned_data = super(GroupForm, self).clean()
value = cleaned_data.get('name')
try:
Cabinet.objects.get(name=value)
self._errors['name'] = self.error_class(["%s的信息已经存在" % value])
except Cabinet.DoesNotExist:
pass
return cleaned_data
class Meta:
model = HostGroup
exclude = ("id", )
widgets = {
'name': TextInput(attrs={'class': 'form-control', 'style': 'width:450px;'}),
'desc': Textarea(attrs={'rows': 4, 'cols': 15, 'class': 'form-control', 'style': 'width:450px;'}),
}
class CabinetForm(forms.ModelForm):
class Meta:
model = Cabinet
exclude = ("id", )
widgets = {
'name': TextInput(attrs={'class': 'form-control', 'style': 'width:450px;'}),
'idc': Select(attrs={'class': 'form-control', 'style': 'width:450px;'}),
'desc': Textarea(attrs={'rows': 4, 'cols': 15, 'class': 'form-control', 'style': 'width:450px;'}),
}
| [
"[email protected]"
]
| |
22851ce7e83e2aef32c5620caf346fae7a63488a | e2f507e0b434120e7f5d4f717540e5df2b1816da | /097-yield-2.py | e7bd0b03ff61e85bbac2470ad044513187273938 | []
| no_license | ash/amazing_python3 | 70984bd32ae325380382b1fe692c4b359ef23395 | 64c98940f8a8da18a8bf56f65cc8c8e09bd00e0c | refs/heads/master | 2021-06-23T14:59:37.005280 | 2021-01-21T06:56:33 | 2021-01-21T06:56:33 | 182,626,874 | 76 | 25 | null | null | null | null | UTF-8 | Python | false | false | 330 | py | # Using yield
def f():
for i in range(100):
yield i # not "return"
# Why do you need this assignment?
g1 = f() # generator object
g2 = f() # another generator
print(next(g1)) # value from generator 1
print(next(g2)) # from generator 2
print(next(g1)) # again 1
print(next(g2)) # ...
print(next(g1))
print(next(g2))
| [
"[email protected]"
]
| |
b4f738393d2222e9668e9e7f689cb0733806ef01 | 87dc1f3fc40565138c1e7dc67f1ca7cb84b63464 | /03_Hard/10_Knapsack_Problem/Knapsack_Problem.py | 4151b0f9027ccdcdfa7f6ccba3270994d39e40ac | []
| no_license | CodeInDna/Algo_with_Python | 8424f79fd3051dbc5861ba171ac2b33c76eec8b9 | a238e9e51effe76c530a4e0da7df871e45ec268a | refs/heads/master | 2021-07-25T08:33:42.475255 | 2021-07-20T16:53:48 | 2021-07-20T16:53:48 | 229,921,183 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,045 | py | # ---------------------------------- PROBLEM 10 (HARD)--------------------------------------#
# Knapsack Problem
# You are given an array of arrays. Each subarray in this array holds two integer values and
# represents an item; the first integer is the item's value, and the second integer is the item's
# weight. You are also given an integer representing the maximum capacity of a knapsack that you have.
# Your goal is to fit items in your knapsack, all the while maximizing their combined value. Note that
# the sum of the weights of the items that you pick cannot exceed the knapsack's capacity. Write a
# function that returns the maximized combined value of the items that you should pick, as well as an
# array of the indices of each item picked. Assume that there will only be one combination of items
# that maximizes the total value in the knapsack.
# Sample input: [[1, 2], [4, 3], [5, 6], [6, 7]], 10
# Sample output: [10, [1, 3]]
# ----------------METHOD 01---------------------#
# COMPLEXITY = TIME: O(Nc), SPACE: O(Nc), where N i the number of items and c is the capacity
def knapsackProblem(lst_Items, target_cap):
knapsackValues = [[0 for _ in range(target_cap + 1)] for _ in range(len(lst_Items) + 1)]
for i in range(1, len(lst_Items) + 1):
currentWeight = lst_Items[i - 1][1]
currentValue = lst_Items[i - 1][0]
for cap in range(target_cap + 1):
if currentWeight > cap:
knapsackValues[i][cap] = knapsackValues[i - 1][cap]
else:
knapsackValues[i][cap] = max(knapsackValues[i - 1][cap], knapsackValues[i - 1][cap - currentWeight] + currentValue)
return [knapsackValues[-1][-1], getKnapsackItems(knapsackValues, lst_Items)]
def getKnapsackItems(knapsackValues, items):
result = []
i = len(knapsackValues) - 1
c = len(knapsackValues[0]) - 1
while i > 0:
if knapsackValues[i][c] == knapsackValues[i - 1][c]:
i -= 1
else:
result.append(i - 1)
c -= items[i - 1][1]
i -= 1
if c == 0:
break
return list(reversed(result))
# ----------------METHOD 01---------------------#
| [
"[email protected]"
]
| |
48329fba254e4b07d3988292bb905c7739573dfe | 56f5b2ea36a2258b8ca21e2a3af9a5c7a9df3c6e | /CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/DY1JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0_1377544841/HTT_24Jul_newTES_manzoni_Up_Jobs/Job_217/run_cfg.py | 589bc3616bc9bbb96cd7a0726131bdbacc21691c | []
| no_license | rmanzoni/HTT | 18e6b583f04c0a6ca10142d9da3dd4c850cddabc | a03b227073b2d4d8a2abe95367c014694588bf98 | refs/heads/master | 2016-09-06T05:55:52.602604 | 2014-02-20T16:35:34 | 2014-02-20T16:35:34 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,500 | py | import FWCore.ParameterSet.Config as cms
import os,sys
sys.path.append('/afs/cern.ch/user/m/manzoni/summer13/CMGTools/CMSSW_5_3_9/src/CMGTools/H2TauTau/prod/25aug_corrMC/up/mc/DY1JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0_1377544841/HTT_24Jul_newTES_manzoni_Up_Jobs')
from base_cfg import *
process.source = cms.Source("PoolSource",
noEventSort = cms.untracked.bool(True),
inputCommands = cms.untracked.vstring('keep *',
'drop cmgStructuredPFJets_cmgStructuredPFJetSel__PAT'),
duplicateCheckMode = cms.untracked.string('noDuplicateCheck'),
fileNames = cms.untracked.vstring('/store/cmst3/user/cmgtools/CMG/DY1JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_1973.root',
'/store/cmst3/user/cmgtools/CMG/DY1JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_1974.root',
'/store/cmst3/user/cmgtools/CMG/DY1JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_1975.root',
'/store/cmst3/user/cmgtools/CMG/DY1JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_1976.root',
'/store/cmst3/user/cmgtools/CMG/DY1JetsToLL_M-50_TuneZ2Star_8TeV-madgraph/Summer12_DR53X-PU_S10_START53_V7A-v1/AODSIM/V5_B/PAT_CMG_V5_16_0/cmgTuple_1977.root')
)
| [
"[email protected]"
]
| |
65b1a3b59c48e5efeb1e7f5cdd75370358d4b584 | aebc347ff9a8ad739111f13aa8d4cf9d48a1e4bd | /data/170818/170818_125942_normal_trap_sweep/0027_normal_trap_sweep_E5071C.py | 348f43e7772521a0bba15513497252a31eeb63b7 | []
| no_license | geyang/170422_EonHe_M018V6 | f01a60d3b8a911ba815a0fcc0bf1b6e2aa8f5f17 | ce189e22f99942e46fce84a0dca714888e44bc69 | refs/heads/master | 2021-06-22T21:01:23.257239 | 2017-08-25T09:48:17 | 2017-08-25T09:48:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,742 | py | from data_cache import dataCacheProxy
from time import sleep, time, strftime
from setup_instruments import fridge, seekat, yoko1, nwa, filament
from resonance_fitting import fit_res_gerwin
import numpy as np
import matplotlib.pyplot as plt
from tqdm import tqdm
import os
from shutil import copyfile
import quicktimetrace_4D as fastsweep
this_script = r"0027_normal_trap_sweep_E5071C.py"
expt = 'normal_trap_sweep'
Vtg_bias = 0.00
do_reload_waveforms = True
do_check_f0 = True
t0 = time()
# The start and stop points of the fast sweep
mu_guess = -0.265
def f_mu(mu):
return -1.755 - 7*mu
x = 0.150
Vtrap_parks = 0.180
Vtrap_stops = 0.170
Vtrap_backs = 0.360
Vrg_starts = -1/1.15 * (Vtrap_parks - (f_mu(mu_guess)-0.120)) - x
Vrg_stops = -1/1.15 * (Vtrap_stops -(f_mu(mu_guess)-0.120)) - x
Vrg_backs = -1/1.15 * (Vtrap_backs -(f_mu(mu_guess)-0.120)) - x
N1 = 50
N2 = 25
N3 = 275
N4 = 50
# if np.any(Vtrap_stops == Vtrap_parks):
# raise ValueError("Stop value for Vtrap cannot be equal to Start value for Vtrap!")
if do_reload_waveforms:
# Load waveform into memory of the BNCAWG
for bnc, ch_voltage_params in zip([fastsweep.bnc1, fastsweep.bnc2],
[(0.00, Vrg_starts, Vrg_stops, Vrg_backs), (Vtrap_parks, Vtrap_parks, Vtrap_stops, Vtrap_backs)]):
bnc.set_output(False)
fastsweep.setup_waveforms(bnc, ch_voltage_params, (N1, N2, N3, N4), sweep_time=100E-3)
bnc.set_output(True)
# This is for the get_voltages function:
bnc1_sweeps_up = Vrg_starts > 0
bnc2_sweeps_up = Vtrap_stops < Vtrap_parks
print bnc1_sweeps_up, bnc2_sweeps_up
if __name__ == "__main__":
today = strftime("%y%m%d")
now = strftime("%H%M%S")
expt_path = os.path.join(r'C:\Users\slab\Desktop\Gerwin\data', today, "%s_%s_%s" % (today, now, expt))
print "Saving data in %s" % expt_path
if not os.path.isdir(expt_path):
os.makedirs(expt_path)
sleep(1)
try:
nwa.read_data()
except:
pass
copyfile(os.path.join(r"C:\Users\slab\Desktop\Gerwin\experiment", this_script),
os.path.join(expt_path, this_script))
dataCache = dataCacheProxy(file_path=os.path.join(expt_path, os.path.split(expt_path)[1] + ".h5"))
prefix = "electron_loading"
fridgeParams = {'wait_for_temp': 0.080,
'min_temp_wait_time': 60}
filamentParams = {"amplitude": 4.2,
"offset": -0.5,
"frequency": 113e3,
"duration": 40e-3}
pulseParams = {"delay": .00,
"pulses": 200}
# for yoko in [yoko1]:
# yoko.set_mode('VOLT')
# yoko.set_voltage_limit(10)
# yoko.set_output(True)
def set_voltages(res, trap, res_guard, trap_guard, pinch=None, verbose=True):
if res is not None:
seekat.set_voltage(1, res, verbose=verbose)
if trap is not None:
if bnc2_sweeps_up:
fastsweep.change_sweep_bounds(fastsweep.bnc2, trap+0.10, trap)
else:
fastsweep.change_sweep_bounds(fastsweep.bnc2, trap, trap-0.10)
if res_guard is not None:
if bnc1_sweeps_up:
fastsweep.change_sweep_bounds(fastsweep.bnc1, res_guard+0.10, res_guard)
else:
fastsweep.change_sweep_bounds(fastsweep.bnc1, res_guard, res_guard-0.10)
if trap_guard is not None:
seekat.set_voltage(4, trap_guard, verbose=verbose)
if pinch is not None:
seekat.set_voltage(5, pinch, verbose=verbose)
dataCache.post("voltage_log", np.array([time(),
seekat.get_voltage(1),
fastsweep.get_idle_value(fastsweep.bnc2, sweep_up=bnc2_sweeps_up),
fastsweep.get_idle_value(fastsweep.bnc1, sweep_up=bnc1_sweeps_up),
seekat.get_voltage(4),
seekat.get_voltage(5)]))
def get_voltages(active_electrodes=[np.nan]*5):
ret = active_electrodes
for k in np.where(np.isnan(active_electrodes))[0]:
if k == 1:
# Trap electrode
ret[1] = fastsweep.get_idle_value(fastsweep.bnc2, sweep_up=bnc2_sweeps_up)
elif k == 2:
# Resonator guard electrode
ret[2] = fastsweep.get_idle_value(fastsweep.bnc1, sweep_up=bnc1_sweeps_up)
else:
ret[k] = seekat.get_voltage(k+1)
return ret
filament.setup_driver(**filamentParams)
filament.set_timeout(10000)
print filament.get_id()
def unload():
print "********************"
print "UNLOADING ELECTRONS!"
print "********************"
for k in range(5):
print "\tStep %d"%(k+1)
for volts in [-1, -2, -3, -4, -3, -2, -1]:
set_voltages(volts, volts, volts, volts, verbose=False)
sleep(0.5)
def unload_trap(start=-3.0, stop=-5.0):
print "********************"
print "UNLOADING TRAP ONLY!"
print "********************"
res_init, trap_init, res_guard_init, trap_guard_init, pinch = get_voltages()
vs = list(np.arange(start, stop, -1)) +\
list(np.arange(stop, start, +1))
for k in range(5):
print "\tStep %d"%(k+1)
for volts in vs:
set_voltages(res_init, volts, res_guard_init, trap_guard_init, verbose=False)
sleep(0.5)
set_voltages(res_init, trap_init, res_guard_init, trap_guard_init)
def take_trace_and_save(averages, active_electrodes=[np.nan]*5):
temperature = fridge.get_mc_temperature()
dataCache.post('temperature', temperature)
Vres, Vtrap, Vrg, Vtg, Vpinch = get_voltages(active_electrodes)
dataCache.post('Vres', Vres)
dataCache.post('Vtrap', Vtrap)
dataCache.post('Vrg', Vrg)
dataCache.post('Vtg', Vtg)
dataCache.post('Vpinch', Vpinch)
if averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
dataCache.post('fpts', fpts)
dataCache.post('mags', mags)
dataCache.post('phases', phases)
dataCache.post('time', time() - t0)
return temperature, fpts, mags
def unload_with_filament():
# First loading to get rid of most electrons!
if load_electrons:
set_voltages(-3.0, -3.0, 0.0, 0.0)
sleep(2.0)
temperature = fridge.get_mc_temperature()
print "Waiting for consistent electron loading temperature of < 550 mK...."
while temperature > 0.550:
temperature = fridge.get_mc_temperature()
sleep(2)
print '.',
filament.fire_filament(100, 0.01)
print "Fired filament!"
sleep(10.0)
def load_resonator_not_trap():
print "\n"
print "********************"
print "LOADING ELECTRONS..."
print "********************"
set_voltages(2.0, -3.0, 0.0, 0.0)
sleep(2.0)
temperature = fridge.get_mc_temperature()
print "Waiting for consistent electron loading temperature of < 550 mK...."
while temperature > 0.550:
temperature = fridge.get_mc_temperature()
sleep(2)
print '.',
filament.fire_filament(57, 0.01)
print "Fired filament!"
sleep(15.0)
def conditional_load(target_deltaf=7.0E6, target_Q=9000):
"""
Fires the filament until a minimum resonance frequency difference has been satisfied
and a Q > 9000 has been satisfied.
:param target_deltaf: Positive frequency difference in Hz
:return:
"""
abs_deltaf = 1e9
Q = 0
# Set both the Q and deltaf threshold to something low if you want it to continue after the first load
while not (Q > target_Q and abs_deltaf > target_deltaf):
unload_with_filament()
load_resonator_not_trap()
set_voltages(0.6, -2.0, None, None)
sleep(2.0)
if calibration_averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
f0, Q = fit_res_gerwin(fpts, mags, span=3E6)
if np.abs(f0-6.40511e9) - target_deltaf > 1E6:
abs_deltaf = 0
print "Fit result after loading: delta f = %.2f MHz (too high) and Q = %.0f" % (np.abs(f0-6.40511e9)/1E6, Q)
else:
abs_deltaf = np.abs(f0-6.40511e9)
print "Fit result after loading: delta f = %.2f MHz and Q = %.0f" % (abs_deltaf/1E6, Q)
not_settled = True
stable_temp = 0.550
# print "Waiting for temperature to stabilize to %.0f mK..." % (stable_temp * 1E3)
while not_settled:
temperature = fridge.get_mc_temperature()
if temperature <= stable_temp:
not_settled = False
return f0, Q
nwa.set_measure('S21')
calibration_power = -40
calibration_averages = 25
calibration_sweep_points = 401
calibration_ifbw = 10E3
nwa.set_trigger_source('BUS')
nwa.set_format('SLOG')
nwa_calibration_config = {'start' : 6.385E9,
'stop': 6.407E9,
'sweep_points': calibration_sweep_points,
'power': calibration_power,
'averages': calibration_averages,
'ifbw': calibration_ifbw}
nwa.configure(**nwa_calibration_config)
nwa.set_trigger_continuous(True)
fastsweep.setup_calibration_trace(calibration_averages, calibration_sweep_points)
nwa.set_electrical_delay(68E-9)
nwa.set_phase_offset(180.0)
dataCache.set_dict('nwa_calibration_config', nwa_calibration_config)
#dataCache.set_dict('nwa_sweep_config', nwa_sweep_config)
nwa.auto_scale()
# Define the sweep here
v1 = np.arange(0.600, 0.800, 0.050).tolist() + [0.800]
v2 = np.arange(-2.0, 0.0, 0.25).tolist() + np.arange(0.000, 1.0, 0.005).tolist()
Vress = v1 + list(0.80 * np.ones(len(v2)))
Vtraps = np.array(list(-2.0 * np.ones(len(v1))) + v2)
Vresguards = np.zeros(len(v1) + len(v2))
fig = plt.figure(figsize=(8., 12.))
plt.subplot(311)
plt.plot(Vress, 'o', ms=3, color="#23aaff", markeredgecolor="none", label="Resonator")
plt.plot(Vtraps, 'o', ms=3, color="#f4b642", markeredgecolor="none", label='Trap')
plt.plot(Vresguards, 'o', ms=3, color="lawngreen", markeredgecolor="none", label='Res guard')
plt.ylabel("Voltage")
plt.xlim(0, len(Vress))
plt.legend(loc=0, prop={'size' : 8})
if calibration_averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
plt.subplot(312)
current_vres, current_vtrap, current_vrg, current_vtg, pinch = get_voltages()
plt.text(np.min(fpts) + 0.10*(np.max(fpts)-np.min(fpts)),
np.min(mags) + 0.85*(np.max(mags) - np.min(mags)),
"res, trap, rg, tg = (%.2fV, %.2fV, %.2fV, %.2fV)" % (current_vres, current_vtrap, current_vrg, current_vtg))
plt.plot(fpts, mags)
plt.xlabel('Frequency (Hz)')
plt.ylabel('Magnitude (dB)')
plt.xlim(np.min(fpts), np.max(fpts))
plt.subplot(313)
plt.plot(fpts, phases)
plt.xlabel('Frequency (Hz)')
plt.ylabel('Phase (deg)')
plt.xlim(np.min(fpts), np.max(fpts))
fig.savefig(os.path.join(expt_path, "pre_electron_loading.png"), dpi=200)
# plt.show()
nwa.set_format('MLOG')
nwa.auto_scale()
nwa.set_trigger_source('INT')
nwa.set_trigger_source('BUS')
nwa.set_format('SLOG')
nwa.set_average_state(True)
f0, Q = fit_res_gerwin(fpts, mags, span=2E6)
target_deltaf = 7.00E6
change_readout_freq = True
target_Q = 9200
print "delta f = %.2f MHz and Q = %.0f" % (np.abs(f0 - 6.40511E9) / 1E6, Q)
if do_check_f0 and (not((target_deltaf-0.15E6) < np.abs(f0-6.40511E9) < (target_deltaf+0.05E6)) or Q < target_Q):
unload()
load_electrons = True
change_readout_freq = True
if load_electrons:
# Unload and then load once
f0, Q = conditional_load(target_deltaf=target_deltaf, target_Q=target_Q)
Q_pre_meas = 0
while Q_pre_meas < target_Q:
# Try to adjust the electron density on the resonator:
tries = 0
dataCache.post("f0_pre_meas", f0)
dataCache.post("Q_pre_meas", Q)
abs_deltaf = np.abs(f0 - 6.40511e9)
while (abs_deltaf > target_deltaf) and (tries < 15):
tries += 1
if (abs_deltaf - target_deltaf) < 0.30E6 and tries < 5:
unload_voltage = -0.15
#The first unload shows a really strong decrease.
else:
unload_voltage = -0.25
for i, poo in enumerate([unload_voltage, 0.6]):
set_voltages(poo, None, None, None)
sleep(2.0)
if poo == 0.6:
if calibration_averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
f0, Q = fit_res_gerwin(fpts, mags, span=3E6)
dataCache.post("f0_pre_meas", f0)
dataCache.post("Q_pre_meas", Q)
abs_deltaf = np.abs(f0 - 6.40511e9)
print "\t%d. delta f = %.2f MHz and Q = %.0f" % (i, abs_deltaf / 1E6, Q)
# Force another reload if f0 doesn't fall in between the following values
Q_pre_meas = Q if (target_deltaf-0.20E6) < np.abs(f0-6.40511E9) < (target_deltaf+0.00E6) else target_Q-1000
# If after adjusting the density the Q falls below 9000, start over
if Q < target_Q:
print "Retrying load, Q < %.0f after adjusting electron density..." % (target_Q)
f0, Q = conditional_load(target_deltaf=target_deltaf, target_Q=target_Q)
# sleep(300)
fridge_temp = fridge.get_mc_temperature()
while fridge_temp > 0.550:
sleep(10.0)
fridge_temp = fridge.get_mc_temperature()
if calibration_averages > 1:
fpts, mags, phases = nwa.take_one_averaged_trace()
else:
fpts, mags, phases = nwa.take_one()
else:
change_readout_freq = True
print "Target deltaf and target Q already satisfied. Starting sweep right away!"
nwa.configure(**nwa_calibration_config)
set_voltages(Vress[0], Vtraps[0], Vresguards[0], Vtg_bias, pinch=-1.00)
f0, Q = fit_res_gerwin(fpts, mags, span=2E6)
if change_readout_freq or np.abs(current_readout_freq - f0) > 150E3:
nwa.set_center_frequency(f0+0.25E6)
print "Drive frequency set to new value: Delta f = %.3f MHz"%((f0-6.40511E9)/1E6)
else:
nwa.set_center_frequency(f0+0.25E6)
print "Drive frequency set to new value: Delta f = %.3f MHz" % ((f0 - 6.40511E9) / 1E6)
p1, p2, p3, constant_Vtrapguard, constant_Vpinch = get_voltages()
# Actual sweep
nwa.set_span(1.5E6)
for k, voltages in tqdm(enumerate(zip(Vress, Vtraps, Vresguards))):
Vres, Vtrap, Vresguard = voltages[0], voltages[1], voltages[2]
print Vres, Vtrap, Vresguard
set_voltages(Vres, Vtrap, Vresguard, None)
active_electrodes = [np.nan]*5
active_electrodes[0] = Vres if Vress[k] == Vress[k-1] else np.nan
active_electrodes[1] = Vtrap if Vtraps[k] == Vtraps[k-1] else np.nan
active_electrodes[2] = Vresguard if Vresguards[k] == Vresguards[k - 1] else np.nan
active_electrodes[3] = constant_Vtrapguard
active_electrodes[4] = constant_Vpinch
T, F, M = take_trace_and_save(calibration_averages, active_electrodes=active_electrodes)
if k == (np.argmin(np.diff(Vress))+1):
print "Adjusting NWA center frequency..."
f0, Q = fit_res_gerwin(F, M, span=2E6)
nwa.set_center_frequency(f0+0.25E6)
nwa.set_format('MLOG')
nwa.auto_scale()
nwa.set_trigger_source('INT')
| [
"[email protected]"
]
| |
9ba139bcaa98b5c19be7ed4307c47d44abe13cff | 2db1a0038d26ccb6adc572b536cb5cd401fd7498 | /tryTen/Lib/site-packages/setuptools/py31compat.py | 0f1753a87be81de04522e4b1d674aee34dfb2e8c | []
| no_license | syurk/labpin | e795c557e7d7bcd4ff449cb9a3de32959a8c4968 | 04070dd5ce6c0a32c9ed03765f4f2e39039db411 | refs/heads/master | 2022-12-12T02:23:54.975797 | 2018-11-29T16:03:26 | 2018-11-29T16:03:26 | 159,692,630 | 0 | 1 | null | 2022-11-19T12:15:55 | 2018-11-29T16:04:20 | Python | UTF-8 | Python | false | false | 1,646 | py | import sys
import unittest
__all__ = ['get_config_vars', 'get_path']
try:
# Python 2.7 or >=3.2
from sysconfig import get_config_vars, get_path
except ImportError:
from distutils.sysconfig import get_config_vars, get_python_lib
def get_path(name):
if name not in ('platlib', 'purelib'):
raise ValueError("Name must be purelib or platlib")
return get_python_lib(name == 'platlib')
try:
# Python >=3.2
from tempfile import TemporaryDirectory
except ImportError:
import shutil
import tempfile
class TemporaryDirectory(object):
"""
Very simple temporary directory context manager.
Will try to delete afterward, but will also ignore OS and similar
errors on deletion.
"""
def __init__(self):
self.name = None # Handle mkdtemp raising an exception
self.name = tempfile.mkdtemp()
def __enter__(self):
return self.name
def __exit__(self, exctype, excvalue, exctrace):
try:
shutil.rmtree(self.name, True)
except OSError: # removal errors are not the only possible
pass
self.name = None
unittest_main = unittest.main
_PY31 = (3, 1) <= sys.version_info[:2] < (3, 2)
if _PY31:
# on Python 3.1, translate testRunner==None to TextTestRunner
# for compatibility with Python 2.6, 2.7, and 3.2+
def unittest_main(*args, **kwargs):
if 'testRunner' in kwargs and kwargs['testRunner'] is None:
kwargs['testRunner'] = unittest.TextTestRunner
return unittest.main(*args, **kwargs)
| [
"[email protected]"
]
| |
2e445e4f56c622f6f5d41a6de407c6c9d92f5b20 | 83b8b30ebb633eecd29ca0a7a20cc43a293c9333 | /tests/basics/subclass_native2_list.py | 9ad0b77ef6dd1c7659097492eec0ebb77099b017 | [
"MIT",
"GPL-1.0-or-later"
]
| permissive | adafruit/circuitpython | 430ec895149d1eb814b505db39b4977a35ee88a7 | 506dca71b0cbb7af749bb51f86b01021db5483b3 | refs/heads/main | 2023-08-21T16:30:46.781068 | 2023-08-20T00:39:44 | 2023-08-20T00:39:44 | 66,166,069 | 3,806 | 1,560 | MIT | 2023-09-14T19:23:51 | 2016-08-20T20:10:40 | C | UTF-8 | Python | false | false | 587 | py | class Base1:
def __init__(self, *args):
print("Base1.__init__", args)
class Clist1(Base1, list):
pass
a = Clist1()
print(len(a))
# Not compliant - list assignment should happen in list.__init__, which is not called
# because there's Base1.__init__, but we assign in list.__new__
#a = Clist1([1, 2, 3])
#print(len(a))
print("---")
class Clist2(list, Base1):
pass
# Not compliant - should call list.__init__, but we don't have it
#a = Clist2()
#print(len(a))
# Not compliant - should call list.__init__, but we don't have it
#a = Clist2([1, 2, 3])
#print(len(a))
| [
"[email protected]"
]
| |
357a3f80b42be32bab6b22b6cf20867bf994258b | f26521284741a1f730e2d52de7426807247e08b6 | /Python/From comment to code/main.py | 2bdaedd8e5de364f363e71f1890a5162d37eb455 | [
"MIT"
]
| permissive | drtierney/hyperskill-problems | 0e6fe8ca418d1af700a5a1b1b2eed1f1f07b8e9e | b74da993f0ac7bcff1cbd5d89a3a1b06b05f33e0 | refs/heads/main | 2021-10-25T07:02:50.838216 | 2021-10-16T19:08:49 | 2021-10-16T19:08:49 | 253,045,232 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 41 | py | # prints "ok" without quotes
print("ok")
| [
"[email protected]"
]
| |
1943cb6e60e864e2e178eb3f9d8f20d70a05a0e5 | f576f0ea3725d54bd2551883901b25b863fe6688 | /sdk/securitydevops/azure-mgmt-securitydevops/generated_samples/azure_dev_ops_connector_get.py | c2e7527e4a6368c1b2ba061346255a98ba052933 | [
"LicenseRef-scancode-generic-cla",
"MIT",
"LGPL-2.1-or-later"
]
| permissive | Azure/azure-sdk-for-python | 02e3838e53a33d8ba27e9bcc22bd84e790e4ca7c | c2ca191e736bb06bfbbbc9493e8325763ba990bb | refs/heads/main | 2023-09-06T09:30:13.135012 | 2023-09-06T01:08:06 | 2023-09-06T01:08:06 | 4,127,088 | 4,046 | 2,755 | MIT | 2023-09-14T21:48:49 | 2012-04-24T16:46:12 | Python | UTF-8 | Python | false | false | 1,627 | py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from azure.identity import DefaultAzureCredential
from azure.mgmt.securitydevops import MicrosoftSecurityDevOps
"""
# PREREQUISITES
pip install azure-identity
pip install azure-mgmt-securitydevops
# USAGE
python azure_dev_ops_connector_get.py
Before run the sample, please set the values of the client ID, tenant ID and client secret
of the AAD application as environment variables: AZURE_CLIENT_ID, AZURE_TENANT_ID,
AZURE_CLIENT_SECRET. For more info about how to get the value, please see:
https://docs.microsoft.com/azure/active-directory/develop/howto-create-service-principal-portal
"""
def main():
client = MicrosoftSecurityDevOps(
credential=DefaultAzureCredential(),
subscription_id="00000000-0000-0000-0000-000000000000",
)
response = client.azure_dev_ops_connector.get(
resource_group_name="westusrg",
azure_dev_ops_connector_name="testconnector",
)
print(response)
# x-ms-original-file: specification/securitydevops/resource-manager/Microsoft.SecurityDevOps/preview/2022-09-01-preview/examples/AzureDevOpsConnectorGet.json
if __name__ == "__main__":
main()
| [
"[email protected]"
]
| |
34a1e201add585aa04483afc9282d5dd3ebcab53 | 60d5ea4f007d49768d250ef394003f554003e4d0 | /python/Linked List/148.Sort List.py | df0485a4e3990534fe5b2bb38f8196871282c2ac | []
| no_license | EvanJamesMG/Leetcode | dd7771beb119ea1250dbb3b147a09053298cd63b | fa638c7fda3802e9f4e0751a2c4c084edf09a441 | refs/heads/master | 2021-01-10T17:11:10.896393 | 2017-12-01T16:04:44 | 2017-12-01T16:04:44 | 46,968,756 | 5 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,437 | py | # coding=utf-8
'''
Sort a linked list in O(n log n) time using constant space complexity.
'''
# Definition for singly-linked list.
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
'''
归并排序,最佳时间复杂度O(n log n) 最坏的时间复杂度O(n log n)
由于题目对时间复杂度和空间复杂度要求比较高,所以查看了各种解法,最好的解法就是归并排序,由于
链表在归并操作时并不需要像数组的归并操作那样分配一个临时数组空间,所以这样就是常数空间复杂度了,当然这里不考虑递归所产生的系统调用的栈。
这里涉及到一个链表常用的操作,即快慢指针的技巧。设置slow和fast指针,
开始它们都指向表头,fast每次走两步,slow每次走一步,fast到链表尾部时,slow正好到中间,这样就将链表截为两段。
'''
class Solution:
# @param head, a ListNode
# @return a ListNode
def merge(self, head1, head2):
if head1 == None: return head2
if head2 == None: return head1
dummy = ListNode(0) #归并时,新建一个链表头结点
p = dummy
while head1 and head2:
if head1.val <= head2.val:
p.next = head1
head1 = head1.next
p = p.next
else:
p.next = head2
head2 = head2.next
p = p.next
if head1 == None:
p.next = head2
if head2 == None:
p.next = head1
return dummy.next
def sortList(self, head):
if head == None or head.next == None:
return head
slow = head; fast = head #快慢指针技巧的运用,用来截断链表。
while fast.next and fast.next.next:
slow = slow.next
fast = fast.next.next
head1 = head
head2 = slow.next
slow.next = None #head1和head2为截为两条链表的表头
head1 = self.sortList(head1)
head2 = self.sortList(head2)
head = self.merge(head1, head2)
return head
# Your Codec object will be instantiated and called as such:
# codec = Codec()
# codec.deserialize(codec.serialize(root))
#
# if __name__ == "__main__":
#
# result = Solution().numTrees(3)
# print result
| [
"[email protected]"
]
| |
d8e06bb45fd1f90be90bb45e0c0cc52f227b3187 | 551b75f52d28c0b5c8944d808a361470e2602654 | /huaweicloud-sdk-eps/huaweicloudsdkeps/v1/model/link.py | a9a92750cec83aea4939f5cad6e9fa7a51be5167 | [
"Apache-2.0"
]
| permissive | wuchen-huawei/huaweicloud-sdk-python-v3 | 9d6597ce8ab666a9a297b3d936aeb85c55cf5877 | 3683d703f4320edb2b8516f36f16d485cff08fc2 | refs/heads/master | 2023-05-08T21:32:31.920300 | 2021-05-26T08:54:18 | 2021-05-26T08:54:18 | 370,898,764 | 0 | 0 | NOASSERTION | 2021-05-26T03:50:07 | 2021-05-26T03:50:07 | null | UTF-8 | Python | false | false | 3,044 | py | # coding: utf-8
import pprint
import re
import six
class Link:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'href': 'str',
'rel': 'str'
}
attribute_map = {
'href': 'href',
'rel': 'rel'
}
def __init__(self, href=None, rel=None):
"""Link - a model defined in huaweicloud sdk"""
self._href = None
self._rel = None
self.discriminator = None
self.href = href
self.rel = rel
@property
def href(self):
"""Gets the href of this Link.
API的URL地址。
:return: The href of this Link.
:rtype: str
"""
return self._href
@href.setter
def href(self, href):
"""Sets the href of this Link.
API的URL地址。
:param href: The href of this Link.
:type: str
"""
self._href = href
@property
def rel(self):
"""Gets the rel of this Link.
self。
:return: The rel of this Link.
:rtype: str
"""
return self._rel
@rel.setter
def rel(self, rel):
"""Sets the rel of this Link.
self。
:param rel: The rel of this Link.
:type: str
"""
self._rel = rel
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, Link):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
]
| |
0508b18ea031c12502a6dff30485a63fa71a0660 | d17a8870ff8ac77b82d0d37e20c85b23aa29ca74 | /lite/tests/unittest_py/pass/common/test_conv_scale_fuse_pass_base.py | a071233f2ff5d5725c9fc9aede18f373c5baff9c | [
"Apache-2.0"
]
| permissive | PaddlePaddle/Paddle-Lite | 4ab49144073451d38da6f085a8c56822caecd5b2 | e241420f813bd91f5164f0d9ee0bc44166c0a172 | refs/heads/develop | 2023-09-02T05:28:14.017104 | 2023-09-01T10:32:39 | 2023-09-01T10:32:39 | 104,208,128 | 2,545 | 1,041 | Apache-2.0 | 2023-09-12T06:46:10 | 2017-09-20T11:41:42 | C++ | UTF-8 | Python | false | false | 3,376 | py | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
sys.path.append('..')
sys.path.append('.')
from program_config import TensorConfig, ProgramConfig, OpConfig, CxxConfig, TargetType, PrecisionType, DataLayoutType, Place
import numpy as np
from functools import partial
from typing import Optional, List, Callable, Dict, Any, Set
from test_conv_util import UpdatePaddingAndDilation, ConvOutputSize
import unittest
import hypothesis
from hypothesis import given, settings, seed, example, assume, reproduce_failure
import hypothesis.strategies as st
def sample_program_configs(draw):
in_shape = draw(
st.lists(
st.integers(
min_value=1, max_value=64), min_size=4, max_size=4))
weight_shape = draw(
st.lists(
st.integers(
min_value=1, max_value=64), min_size=4, max_size=4))
paddings = draw(st.sampled_from([[1, 2], [4, 2]]))
dilations = draw(st.sampled_from([[1, 1]]))
groups = draw(st.sampled_from([1, 2, in_shape[1]]))
padding_algorithm = draw(st.sampled_from(["VALID", "SAME"]))
strides = draw(st.sampled_from([[1, 1], [2, 2]]))
scale = draw(st.floats(min_value=0.5, max_value=5))
scale_bias = draw(st.floats(min_value=0.0, max_value=1.0))
assume(in_shape[1] == weight_shape[1] * groups)
assume(weight_shape[0] % groups == 0)
paddings_, dilations_ = UpdatePaddingAndDilation(
in_shape, weight_shape, paddings, dilations, groups, padding_algorithm,
strides)
out_shape = [in_shape[0], weight_shape[0]]
oh, ow = ConvOutputSize(in_shape, weight_shape, dilations_, paddings_,
strides)
out_shape = out_shape + [oh, ow]
assume(oh > 0 and ow > 0)
conv_op = OpConfig(
type="conv2d",
inputs={
"Input": ["input_data"],
"Filter": ["weight_data"],
"Bias": ["conv_bias"]
},
outputs={"Output": ["conv_output_data"]},
attrs={
"data_format": 'nchw',
"dilations": dilations,
"padding_algorithm": padding_algorithm,
"groups": groups,
"paddings": paddings,
"strides": strides
})
scale_op = OpConfig(
type="scale",
inputs={"X": ["conv_output_data"]},
outputs={"Out": ["output_data"]},
attrs={"scale": scale,
"bias": scale_bias,
"bias_after_scale": True})
ops = [conv_op, scale_op]
program_config = ProgramConfig(
ops=ops,
weights={
"conv_bias": TensorConfig(shape=[weight_shape[0]]),
"weight_data": TensorConfig(shape=weight_shape)
},
inputs={"input_data": TensorConfig(shape=in_shape)},
outputs=["output_data"])
return program_config
| [
"[email protected]"
]
| |
fec483ec7ffc645dc6d83b08f1f7592805d9a5fc | f0d713996eb095bcdc701f3fab0a8110b8541cbb | /Jx4mjwEoFdfYuF9ky_10.py | a723d6a308abbe962a25372403471fc9bbe9f518 | []
| no_license | daniel-reich/turbo-robot | feda6c0523bb83ab8954b6d06302bfec5b16ebdf | a7a25c63097674c0a81675eed7e6b763785f1c41 | refs/heads/main | 2023-03-26T01:55:14.210264 | 2021-03-23T16:08:01 | 2021-03-23T16:08:01 | 350,773,815 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 572 | py | """
Write a function that takes an integer and:
* If the number is a multiple of 3, return `"Hello"`.
* If the number is a multiple of 5, return `"World"`.
* If the number is a multiple of both 3 and 5, return `"Hello World"`.
### Examples
hello_world(3) ➞ "Hello"
hello_world(5) ➞ "World"
hello_world(15) ➞ "Hello World"
### Notes
Don't forget to `return` the result.
"""
def hello_world(num):
if num%15==0:
return ("Hello World")
elif num%5==0:
return ("World")
elif num%3==0:
return ("Hello")
| [
"[email protected]"
]
| |
3ecf9b834c4eb9b27f4030875f86d478ca91f7a7 | f8dd1dfb0f81de16b9c8f681c85c6995b63ce037 | /tensorflow/contrib/estimator/__init__.py | 6b9f9575b606f1822d760e8597c55994dd8af04c | [
"Apache-2.0"
]
| permissive | DandelionCN/tensorflow | 74688926778ae06da1f406967baf6b251b3f3c4e | 1712002ad02f044f7569224bf465e0ea00e6a6c4 | refs/heads/master | 2020-03-06T19:10:37.847848 | 2018-03-27T17:11:49 | 2018-03-27T17:11:49 | 127,022,134 | 1 | 0 | Apache-2.0 | 2018-03-27T17:24:51 | 2018-03-27T17:24:51 | null | UTF-8 | Python | false | false | 2,130 | py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Experimental utilities re:tf.estimator.*."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
# pylint: disable=unused-import,line-too-long,wildcard-import
from tensorflow.contrib.estimator.python.estimator.dnn import *
from tensorflow.contrib.estimator.python.estimator.dnn_linear_combined import *
from tensorflow.contrib.estimator.python.estimator.extenders import *
from tensorflow.contrib.estimator.python.estimator.head import *
from tensorflow.contrib.estimator.python.estimator.linear import *
from tensorflow.contrib.estimator.python.estimator.logit_fns import *
from tensorflow.contrib.estimator.python.estimator.multi_head import *
from tensorflow.contrib.estimator.python.estimator.replicate_model_fn import *
from tensorflow.python.util.all_util import remove_undocumented
# pylint: enable=unused-import,line-too-long,wildcard-import
_allowed_symbols = [
'add_metrics',
'binary_classification_head',
'clip_gradients_by_norm',
'forward_features',
'multi_class_head',
'multi_head',
'multi_label_head',
'poisson_regression_head',
'regression_head',
'DNNEstimator',
'DNNLinearCombinedEstimator',
'LinearEstimator',
'call_logit_fn',
'dnn_logit_fn_builder',
'linear_logit_fn_builder',
'replicate_model_fn',
'TowerOptimizer',
]
remove_undocumented(__name__, allowed_exception_list=_allowed_symbols)
| [
"[email protected]"
]
| |
4d1e52f60ebc4c8d4a60d85a6e0d46289da1a4c4 | f176975a314b6f8f4c7b931c6057caf20988d12d | /problems/uri_2448_postman/uri_2448_postman.py | dbf9f4c68510cb044c5e4c8853107f7c203b51a4 | []
| no_license | fgmacedo/problems_ads | 4b3226307e66a37fd1848dcc25f3fa6c78567d98 | d510a9f8788f99c2559efddd54235cb3a134989a | refs/heads/main | 2023-05-20T03:29:57.814018 | 2021-06-15T01:54:56 | 2021-06-15T01:54:56 | 352,163,858 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 394 | py | #!/usr/bin/env python3
import sys
rl = sys.stdin.readline
rl() # discart n, m info
houses = {x: idx for idx, x in enumerate(rl().split())}
cum_time = 0
current_house_index = 0
for order in rl().split():
order_house_index = houses[order]
cum_time = cum_time + abs(order_house_index - current_house_index)
current_house_index = order_house_index
sys.stdout.write(f"{cum_time}\n")
| [
"[email protected]"
]
| |
3aa4f597847a981fc4c28f61c442c768e551b919 | 2d3aba0bf1d3a5e018ded78218859b31dd0930dd | /3.문자열/str_to_int.py | 253d92331801b779fa6170d23a73a965f2dfaee0 | []
| no_license | CS-for-non-CS/Data-Structure | 7018203de7d14a0be7da2308963082b93fac8e21 | efce4c13578bd3d143aa570e9317c505b6424c40 | refs/heads/master | 2022-12-17T14:04:05.521164 | 2020-09-21T02:18:24 | 2020-09-21T02:18:24 | 297,205,549 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 400 | py |
str1 = "123"
str2 = "12.3"
print(int(str1),type(int(str1))) # 123
print(float(str2),type(float(str2))) # 12.3
str3 = "1+2"
print(str3)
print(repr(str3))
print(eval(str3))
print(eval(repr(str3)))
print(eval(eval(repr(str3))))
num1 = 123
num2 = 12.3
print(str(num1),type(str(num1)))
print(repr(num1),type(repr(num1)))
print(str(num2),type(str(num2)))
print(repr(num2),type(repr(num2))) | [
"[email protected]"
]
| |
4ffe88ba899c6533dbf898c44501f57ee3a17dcc | 714b28c006b3c60aa87714f8777a37486b94e995 | /accounts/migrations/0006_auto_20210522_1401.py | 992a110f8f0095db91399a635e9b3b4465af91f9 | []
| no_license | kyrios213/django_tutorial | 3f0bdce5c0e5faa4f7e08a238ac6d77bba35c92e | 771d209c4b198df9361254deefd1c9a49c4a0746 | refs/heads/main | 2023-04-25T23:11:06.356823 | 2021-05-30T05:31:32 | 2021-05-30T05:31:32 | 368,026,339 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 561 | py | # Generated by Django 3.2.3 on 2021-05-22 06:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('accounts', '0005_auto_20210519_1035'),
]
operations = [
migrations.AddField(
model_name='order',
name='note',
field=models.CharField(max_length=255, null=True),
),
migrations.AlterField(
model_name='product',
name='description',
field=models.TextField(blank=True, null=True),
),
]
| [
"[email protected]"
]
| |
decc0276a133d3ca4d2bfdc0f34fc1ff7ee92055 | a1730de4b50c17ecd388a995a1526c2eab80cb7d | /Plugins/Aspose-Cells-Java-for-Python/setup.py | 49e93716140f94069b1d526135d2a7a8348415f5 | [
"MIT"
]
| permissive | aspose-cells/Aspose.Cells-for-Java | 2dcba41fc99b0f4b3c089f2ff1a3bcd32591eea1 | 42d501da827058d07df7399ae104bb2eb88929c3 | refs/heads/master | 2023-09-04T21:35:15.198721 | 2023-08-10T09:26:41 | 2023-08-10T09:26:41 | 2,849,714 | 133 | 89 | MIT | 2023-03-07T09:39:29 | 2011-11-25T13:16:33 | Java | UTF-8 | Python | false | false | 705 | py | __author__ = 'fahadadeel'
from setuptools import setup, find_packages
setup(
name = 'aspose-cells-java-for-python',
packages = find_packages(),
version = '1.0',
description = 'Aspose.cells Java for Python is a project that demonstrates / provides the Aspose.Cells for Java API usage examples in Python.',
author='Fahad Adeel',
author_email='[email protected]',
url='https://github.com/asposecells/Aspose_Cells_Java/tree/master/Plugins/Aspose-Cells-Java-for-Python',
classifiers=[
'Programming Language :: Python',
'Programming Language :: Python :: 2',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent'
]
)
| [
"[email protected]"
]
| |
d615b760898802dc9155d05c5fee311838b3ece0 | 485be21ebe0a956b7f4a681968e160a463903ecc | /KnowledgedRank/BoePRFReranker.py | 59319910dfd622e0a334dfd716a1ba920c9b8fb2 | []
| no_license | xiongchenyan/cxPyLib | e49da79345006d75a4261a8bbd4cc9a7f730fad2 | 8d87f5a872458d56276a2a2b0533170ede4d5851 | refs/heads/master | 2021-01-10T20:43:20.147286 | 2016-01-14T04:02:45 | 2016-01-14T04:02:45 | 17,610,431 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,418 | py | '''
Created on Dec 7, 2015 7:24:56 PM
@author: cx
what I do:
I rerank doc in the BOE space
with simple PRF re-ranking
what's my input:
doc with hEntity
what's my output:
evaluation results
'''
import site
site.addsitedir('/bos/usr0/cx/PyCode/cxPyLib')
from cxBase.base import cxBaseC
from cxBase.Conf import cxConfC
import logging,json
import math
from KnowledgedRank.BoeReranker import *
class BoePRFRerankerC(BoeLmRankerC):
def Init(self):
BoeLmRankerC.Init(self)
self.WOrigQ = 0.5
self.NumOfExpEntity = 20
def SetConf(self, ConfIn):
BoeLmRankerC.SetConf(self, ConfIn)
self.WOrigQ = float(self.conf.GetConf('worigq', self.WOrigQ))
self.NumOfExpEntity = int(self.conf.GetConf('numofexp', self.NumOfExpEntity))
@staticmethod
def ShowConf():
BoeLmRankerC.ShowConf()
print 'worigq 0.5\nnumofexp 20'
def QExp(self,qid,query,lDoc):
hEntityScore = {} #ObjId -> prf score
for doc in lDoc:
if not doc.DocNo in self.hDocKg:
continue
hDocEntity = self.hDocKg[doc.DocNo]
for ObjId,score in hDocEntity.items():
score += doc.score #log(a) + log(b)
if not ObjId in hEntityScore:
hEntityScore[ObjId] = math.exp(score)
else:
hEntityScore[ObjId] += math.exp(score)
lEntityScore = hEntityScore.items()
lEntityScore.sort(key=lambda item:item[1],reverse = True)
lEntityScore = lEntityScore[:self.NumOfExpEntity]
Z = sum([item[1] for item in lEntityScore])
if Z == 0:
lEntityScore = []
else:
lEntityScore = [[item[0],item[1] / float(Z)] for item in lEntityScore]
logging.info(
'[%s][%s] exp entity: %s',
qid,
query,
json.dumps(lEntityScore)
)
return lEntityScore
def RankScoreForDoc(self,lQObjScore,doc):
if not doc.DocNo in self.hDocKg:
return self.Inferencer.MinWeight
hDocEntity = self.hDocKg[doc.DocNo]
score = 0
for ObjId,weight in lQObjScore:
ObjScore = self.Inferencer.inference(ObjId, hDocEntity,doc)
score += ObjScore * weight
# logging.info('[%s] [%s] - [%s] obj score: %f',qid,doc.DocNo,ObjId,ObjScore)
# logging.info('[%s] [%s] ranking score: %f',qid,doc.DocNo,score)
return score
def Rank(self, qid, query, lDoc):
lQObj = []
if qid in self.hQObj:
lQObj = self.hQObj[qid]
lExpEntityScore = self.QExp(qid, query, lDoc)
lQExpObjScore = [[ObjId,self.WOrigQ * score] for ObjId,score in lQObj]
lQExpObjScore += [
[ObjId,score * (1.0 - self.WOrigQ)]
for ObjId,score in lExpEntityScore
]
lScore = [self.RankScoreForDoc(lQExpObjScore, doc) for doc in lDoc]
lMid = zip(lDoc,lScore)
lDocNoScore = [[item[0].DocNo,item[1],item[0].score] for item in lMid]
#sort doc by two keys, if boe scores tie, use original ranking score
lDocNoScore.sort(key=lambda item: (item[1],item[2]), reverse = True)
lRankRes = [item[0] for item in lDocNoScore]
return lRankRes
if __name__=='__main__':
import sys,os
from AdhocEva.RankerEvaluator import RankerEvaluatorC
if 2 != len(sys.argv):
print 'I evaluate Boe exp model '
print 'in\nout'
BoePRFRerankerC.ShowConf()
RankerEvaluatorC.ShowConf()
sys.exit()
root = logging.getLogger()
root.setLevel(logging.DEBUG)
ch = logging.StreamHandler(sys.stdout)
# ch.setLevel(logging.DEBUG)
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
root.addHandler(ch)
conf = cxConfC(sys.argv[1])
QIn = conf.GetConf('in')
EvaOut = conf.GetConf('out')
Ranker = BoePRFRerankerC(sys.argv[1])
Evaluator = RankerEvaluatorC(sys.argv[1])
Evaluator.Evaluate(QIn, Ranker.Rank, EvaOut)
| [
"[email protected]"
]
| |
e758759b714c65ed9bcc448e5fe5615004c2826b | 336d52bb53eb24d09e8433018525fa54aa7f1592 | /Agents/Actor_Critic_Agents/DDPG.py | ad6aa0593f8c9d0c9925aaa9282afb929428cf7d | []
| no_license | crashmatt/Deep-Reinforcement-Learning-Algorithms-with-PyTorch | 8a1901344df0fc499731515cbd53670c77c9c677 | 9c487dc51a483d2130cb9bb2a4d771f9748949cb | refs/heads/master | 2020-05-16T06:20:14.048294 | 2019-04-22T16:38:02 | 2019-04-22T16:38:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,653 | py | import copy
import torch
import torch.nn.functional as functional
from nn_builder.pytorch.NN import NN
from torch import optim
from Base_Agent import Base_Agent
from Replay_Buffer import Replay_Buffer
from Utilities.OU_Noise import OU_Noise
class DDPG(Base_Agent):
"""A DDPG Agent"""
agent_name = "DDPG"
def __init__(self, config):
Base_Agent.__init__(self, config)
self.hyperparameters = config.hyperparameters
self.critic_local = self.create_NN(input_dim=self.state_size + self.action_size, output_dim=1, key_to_use="Critic")
self.critic_target = self.create_NN(input_dim=self.state_size + self.action_size, output_dim=1, key_to_use="Critic")
self.critic_target.load_state_dict(copy.deepcopy(self.critic_local.state_dict()))
self.critic_optimizer = optim.Adam(self.critic_local.parameters(),
lr=self.hyperparameters["Critic"]["learning_rate"])
self.memory = Replay_Buffer(self.hyperparameters["Critic"]["buffer_size"], self.hyperparameters["batch_size"],
self.config.seed)
self.actor_local = self.create_NN(input_dim=self.state_size, output_dim=self.action_size, key_to_use="Actor")
self.actor_target = self.create_NN(input_dim=self.state_size, output_dim=self.action_size, key_to_use="Actor")
self.actor_target.load_state_dict(copy.deepcopy(self.actor_local.state_dict()))
self.actor_optimizer = optim.Adam(self.actor_local.parameters(),
lr=self.hyperparameters["Actor"]["learning_rate"])
self.noise = OU_Noise(self.action_size, self.config.seed, self.hyperparameters["mu"],
self.hyperparameters["theta"], self.hyperparameters["sigma"])
def reset_game(self):
"""Resets the game information so we are ready to play a new episode"""
Base_Agent.reset_game(self)
self.noise.reset()
def step(self):
"""Runs a step in the game"""
while not self.done:
self.action = self.pick_action()
self.conduct_action(self.action)
if self.time_for_critic_and_actor_to_learn():
for _ in range(self.hyperparameters["learning_updates_per_learning_session"]):
states, actions, rewards, next_states, dones = self.memory.sample() # Sample experiences
self.critic_learn(states, actions, rewards, next_states, dones)
self.actor_learn(states)
self.save_experience()
self.state = self.next_state #this is to set the state for the next iteration
self.global_step_number += 1
self.episode_number += 1
def pick_action(self):
"""Picks an action using the actor network and then adds some noise to it to ensure exploration"""
state = torch.from_numpy(self.state).float().unsqueeze(0).to(self.device)
self.actor_local.eval()
with torch.no_grad():
action = self.actor_local(state).cpu().data.numpy()
self.actor_local.train()
action += self.noise.sample()
return action.squeeze(0)
def critic_learn(self, states, actions, rewards, next_states, dones):
"""Runs a learning iteration for the critic"""
loss = self.compute_loss(states, next_states, rewards, actions, dones)
self.take_optimisation_step(self.critic_optimizer, self.critic_local, loss, self.hyperparameters["Critic"]["gradient_clipping_norm"])
self.soft_update_of_target_network(self.critic_local, self.critic_target, self.hyperparameters["Critic"]["tau"])
def compute_loss(self, states, next_states, rewards, actions, dones):
"""Computes the loss for the critic"""
with torch.no_grad():
critic_targets = self.compute_critic_targets(next_states, rewards, dones)
critic_expected = self.compute_expected_critic_values(states, actions)
loss = functional.mse_loss(critic_expected, critic_targets)
return loss
def compute_critic_targets(self, next_states, rewards, dones):
"""Computes the critic target values to be used in the loss for the critic"""
critic_targets_next = self.compute_critic_values_for_next_states(next_states)
critic_targets = self.compute_critic_values_for_current_states(rewards, critic_targets_next, dones)
return critic_targets
def compute_critic_values_for_next_states(self, next_states):
"""Computes the critic values for next states to be used in the loss for the critic"""
with torch.no_grad():
actions_next = self.actor_target(next_states)
critic_targets_next = self.critic_target(torch.cat((next_states, actions_next), 1))
return critic_targets_next
def compute_critic_values_for_current_states(self, rewards, critic_targets_next, dones):
"""Computes the critic values for current states to be used in the loss for the critic"""
critic_targets_current = rewards + (self.hyperparameters["discount_rate"] * critic_targets_next * (1.0 - dones))
return critic_targets_current
def compute_expected_critic_values(self, states, actions):
"""Computes the expected critic values to be used in the loss for the critic"""
critic_expected = self.critic_local(torch.cat((states, actions), 1))
return critic_expected
def time_for_critic_and_actor_to_learn(self):
"""Returns boolean indicating whether there are enough experiences to learn from and it is time to learn for the
actor and critic"""
return self.enough_experiences_to_learn_from() and self.global_step_number % self.hyperparameters["update_every_n_steps"] == 0
def actor_learn(self, states):
"""Runs a learning iteration for the actor"""
if self.done: #we only update the learning rate at end of each episode
self.update_learning_rate(self.hyperparameters["Actor"]["learning_rate"], self.actor_optimizer)
actor_loss = self.calculate_actor_loss(states)
self.take_optimisation_step(self.actor_optimizer, self.actor_local, actor_loss,
self.hyperparameters["Actor"]["gradient_clipping_norm"])
self.soft_update_of_target_network(self.actor_local, self.actor_target, self.hyperparameters["Actor"]["tau"])
def calculate_actor_loss(self, states):
"""Calculates the loss for the actor"""
actions_pred = self.actor_local(states)
actor_loss = -self.critic_local(torch.cat((states, actions_pred), 1)).mean()
return actor_loss | [
"[email protected]"
]
| |
71ac3b38241ab179de7aa4edc58a6750b7cb02a3 | 4ddc6604f0c8160c7637d036b835faf974d48556 | /nova/policies/networks.py | a4d065f47d0a291902d07878202cf7f44eb9cdf1 | [
"Apache-2.0"
]
| permissive | tjjh89017/nova | a8513a806f24ca0d1c60495fd1f192b7d402b05d | 49b85bd2e9c77c6e0bd8141b38cd49efa5c06dc2 | refs/heads/master | 2021-01-21T10:16:18.970238 | 2017-05-18T10:35:32 | 2017-05-18T10:35:32 | 91,682,422 | 1 | 0 | null | 2017-05-18T10:50:38 | 2017-05-18T10:50:38 | null | UTF-8 | Python | false | false | 1,072 | py | # Copyright 2016 Cloudbase Solutions Srl
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_policy import policy
from nova.policies import base
BASE_POLICY_NAME = 'os_compute_api:os-networks'
POLICY_ROOT = 'os_compute_api:os-networks:%s'
networks_policies = [
policy.RuleDefault(
name=BASE_POLICY_NAME,
check_str=base.RULE_ADMIN_API),
policy.RuleDefault(
name=POLICY_ROOT % 'view',
check_str=base.RULE_ADMIN_OR_OWNER),
]
def list_rules():
return networks_policies
| [
"[email protected]"
]
| |
0980ec9b29cae8ca8eb4d166d4157dbe4b3c392b | 4ce5022078c53b3bd75493b12a38237618b52fc8 | /prodsys/migrations/0068_job_number_of_events.py | c18fcdcbc318a34d2627aee7d52bbe11aa900c43 | []
| no_license | virthead/COMPASS-ProdSys | 90180e32c3a23d9fd05b252a6f8ded234525a780 | 6dfaa3e9ca40845282d3004ac61f386db5abdbe9 | refs/heads/master | 2023-02-23T18:16:02.789709 | 2022-09-28T09:37:59 | 2022-09-28T09:37:59 | 144,685,667 | 0 | 1 | null | 2018-10-13T10:07:42 | 2018-08-14T07:38:34 | Python | UTF-8 | Python | false | false | 452 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11 on 2017-12-14 13:48
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('prodsys', '0067_task_files_source'),
]
operations = [
migrations.AddField(
model_name='job',
name='number_of_events',
field=models.IntegerField(default=0),
),
]
| [
"[email protected]"
]
| |
afe2e3497fcf2748a39df150b3000ee0cd199b92 | 48e124e97cc776feb0ad6d17b9ef1dfa24e2e474 | /sdk/python/pulumi_azure_native/documentdb/v20210301preview/get_sql_resource_sql_stored_procedure.py | ad0982c8a942b15814d0f916e6958ee808ba44f3 | [
"BSD-3-Clause",
"Apache-2.0"
]
| permissive | bpkgoud/pulumi-azure-native | 0817502630062efbc35134410c4a784b61a4736d | a3215fe1b87fba69294f248017b1591767c2b96c | refs/heads/master | 2023-08-29T22:39:49.984212 | 2021-11-15T12:43:41 | 2021-11-15T12:43:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,150 | py | # coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
__all__ = [
'GetSqlResourceSqlStoredProcedureResult',
'AwaitableGetSqlResourceSqlStoredProcedureResult',
'get_sql_resource_sql_stored_procedure',
'get_sql_resource_sql_stored_procedure_output',
]
@pulumi.output_type
class GetSqlResourceSqlStoredProcedureResult:
"""
An Azure Cosmos DB storedProcedure.
"""
def __init__(__self__, id=None, identity=None, location=None, name=None, resource=None, tags=None, type=None):
if id and not isinstance(id, str):
raise TypeError("Expected argument 'id' to be a str")
pulumi.set(__self__, "id", id)
if identity and not isinstance(identity, dict):
raise TypeError("Expected argument 'identity' to be a dict")
pulumi.set(__self__, "identity", identity)
if location and not isinstance(location, str):
raise TypeError("Expected argument 'location' to be a str")
pulumi.set(__self__, "location", location)
if name and not isinstance(name, str):
raise TypeError("Expected argument 'name' to be a str")
pulumi.set(__self__, "name", name)
if resource and not isinstance(resource, dict):
raise TypeError("Expected argument 'resource' to be a dict")
pulumi.set(__self__, "resource", resource)
if tags and not isinstance(tags, dict):
raise TypeError("Expected argument 'tags' to be a dict")
pulumi.set(__self__, "tags", tags)
if type and not isinstance(type, str):
raise TypeError("Expected argument 'type' to be a str")
pulumi.set(__self__, "type", type)
@property
@pulumi.getter
def id(self) -> str:
"""
The unique resource identifier of the ARM resource.
"""
return pulumi.get(self, "id")
@property
@pulumi.getter
def identity(self) -> Optional['outputs.ManagedServiceIdentityResponse']:
"""
Identity for the resource.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> Optional[str]:
"""
The location of the resource group to which the resource belongs.
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> str:
"""
The name of the ARM resource.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter
def resource(self) -> Optional['outputs.SqlStoredProcedureGetPropertiesResponseResource']:
return pulumi.get(self, "resource")
@property
@pulumi.getter
def tags(self) -> Optional[Mapping[str, str]]:
"""
Tags are a list of key-value pairs that describe the resource. These tags can be used in viewing and grouping this resource (across resource groups). A maximum of 15 tags can be provided for a resource. Each tag must have a key no greater than 128 characters and value no greater than 256 characters. For example, the default experience for a template type is set with "defaultExperience": "Cassandra". Current "defaultExperience" values also include "Table", "Graph", "DocumentDB", and "MongoDB".
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter
def type(self) -> str:
"""
The type of Azure resource.
"""
return pulumi.get(self, "type")
class AwaitableGetSqlResourceSqlStoredProcedureResult(GetSqlResourceSqlStoredProcedureResult):
# pylint: disable=using-constant-test
def __await__(self):
if False:
yield self
return GetSqlResourceSqlStoredProcedureResult(
id=self.id,
identity=self.identity,
location=self.location,
name=self.name,
resource=self.resource,
tags=self.tags,
type=self.type)
def get_sql_resource_sql_stored_procedure(account_name: Optional[str] = None,
container_name: Optional[str] = None,
database_name: Optional[str] = None,
resource_group_name: Optional[str] = None,
stored_procedure_name: Optional[str] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetSqlResourceSqlStoredProcedureResult:
"""
An Azure Cosmos DB storedProcedure.
:param str account_name: Cosmos DB database account name.
:param str container_name: Cosmos DB container name.
:param str database_name: Cosmos DB database name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str stored_procedure_name: Cosmos DB storedProcedure name.
"""
__args__ = dict()
__args__['accountName'] = account_name
__args__['containerName'] = container_name
__args__['databaseName'] = database_name
__args__['resourceGroupName'] = resource_group_name
__args__['storedProcedureName'] = stored_procedure_name
if opts is None:
opts = pulumi.InvokeOptions()
if opts.version is None:
opts.version = _utilities.get_version()
__ret__ = pulumi.runtime.invoke('azure-native:documentdb/v20210301preview:getSqlResourceSqlStoredProcedure', __args__, opts=opts, typ=GetSqlResourceSqlStoredProcedureResult).value
return AwaitableGetSqlResourceSqlStoredProcedureResult(
id=__ret__.id,
identity=__ret__.identity,
location=__ret__.location,
name=__ret__.name,
resource=__ret__.resource,
tags=__ret__.tags,
type=__ret__.type)
@_utilities.lift_output_func(get_sql_resource_sql_stored_procedure)
def get_sql_resource_sql_stored_procedure_output(account_name: Optional[pulumi.Input[str]] = None,
container_name: Optional[pulumi.Input[str]] = None,
database_name: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
stored_procedure_name: Optional[pulumi.Input[str]] = None,
opts: Optional[pulumi.InvokeOptions] = None) -> pulumi.Output[GetSqlResourceSqlStoredProcedureResult]:
"""
An Azure Cosmos DB storedProcedure.
:param str account_name: Cosmos DB database account name.
:param str container_name: Cosmos DB container name.
:param str database_name: Cosmos DB database name.
:param str resource_group_name: The name of the resource group. The name is case insensitive.
:param str stored_procedure_name: Cosmos DB storedProcedure name.
"""
...
| [
"[email protected]"
]
| |
6456afdcfb72444d01ad09e4f851c86cb9b4ddef | d3cabb25e9af022fa3ca7818668a3267c16f31ed | /queroMeiaWebapp/settings.py | 1501cd1e7a333c3b286235d56186badea80dcd3e | []
| no_license | fafaschiavo/mobileQueroMeiaWebapp | 6e8df6bdb17ad82b0d1c43a8d78f71e4fd4dccb4 | 83584cf81f7a28b36fa9a699986aaf111d4b3eb5 | refs/heads/master | 2021-01-09T20:52:37.544906 | 2016-07-11T16:31:49 | 2016-07-11T16:31:49 | 58,693,307 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,420 | py | """
Django settings for queroMeiaWebapp project.
Generated by 'django-admin startproject' using Django 1.10.dev20160307181939.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '_gaqx%)0dc8=hd4m5!_v5a4sn)egl1#k21_kqs0*mxz571!zyq'
#Paypal Information
# EMAIL_PAYPAL_ACCOUNT = '[email protected]'
# EMAIL_PAYPAL_ACCOUNT = '[email protected]'
EMAIL_PAYPAL_ACCOUNT = '[email protected]'
PRODUCT_ID_1 = 3
PRODUCT_ID_2 = 4
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# PAYPAL_TEST = True
MANDRILL_API_KEY = "PQsvG3uAlMUoboU2fQoGHg"
EMAIL_BACKEND = "djrill.mail.backends.djrill.DjrillBackend"
DEFAULT_FROM_EMAIL = '[email protected]'
MANDRILL_API_URL = "https://mandrillapp.com/api/1.0"
# Application definition
INSTALLED_APPS = [
'paypal.standard.ipn',
'djrill',
'cinema.apps.CinemaConfig',
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE_CLASSES = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'queroMeiaWebapp.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'queroMeiaWebapp.wsgi.application'
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'quero_meia',
'USER': 'root',
'PASSWORD': 'root',
'HOST': '127.0.0.1', # Or an IP Address that your DB is hosted on
'PORT': '3306',
}
}
# DATABASES = {
# 'default': {
# 'ENGINE': 'django.db.backends.mysql', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
# 'NAME': 'quero_meia', # Or path to database file if using sqlite3.
# 'USER': 'fafaschiavo', # Not used with sqlite3.
# 'PASSWORD': '310308Fah!', # Not used with sqlite3.
# 'HOST': 'mysql.queromeia.com', # Set to empty string for localhost. Not used with sqlite3.
# 'PORT': '', # Set to empty string for default. Not used with sqlite3.
# }
# }
# Password validation
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_URL = '/static/'
| [
"[email protected]"
]
| |
9a497a06ee18928dfc7bc17f59d25523f920e47e | 671067c93d251635ed1360936c7ec84a59ece10c | /doublecop.py | 419ad0c817fd5955ddadc9233606416bb494dcd7 | [
"BSD-2-Clause"
]
| permissive | nd1511/ccw_tutorial_theano | 48773052ec99da95aa50300399c943834ca29435 | f92aa8edbb567c9ac09149a382858f841a4a7749 | refs/heads/master | 2020-04-03T13:10:35.753232 | 2017-02-01T21:54:14 | 2017-02-01T21:54:14 | 155,276,374 | 1 | 0 | BSD-2-Clause | 2018-10-29T20:25:01 | 2018-10-29T20:25:01 | null | UTF-8 | Python | false | false | 577 | py | from theano import Apply
from theano.gof import COp
from theano.tensor import as_tensor_variable
class DoubleCOp(COp):
__props__ = ()
def __init__(self):
COp.__init__(self, ["doublecop.c"],
"APPLY_SPECIFIC(doublecop)")
def make_node(self, x):
x = as_tensor_variable(x)
if x.ndim != 1:
raise TypeError("DoubleCOp only works with 1D")
return Apply(self, [x], [x.type()])
def infer_shape(self, input_shapes):
return input_shapes
def grad(self, inputs, g):
return [g[0] * 2]
| [
"[email protected]"
]
| |
aa342583e7f64224e167db39abc398760268e22e | 187ec84de1e03e2fe1e154dcb128b5886b4d0547 | /chapter_05/exercises/05_alien_colors_3.py | bba30d284c7891d8e409c681d5c751e6804d47bc | []
| no_license | xerifeazeitona/PCC_Basics | fcbc1b8d5bc06e82794cd9ff0061e6ff1a38a64e | 81195f17e7466c416f97acbf7046d8084829f77b | refs/heads/main | 2023-03-01T07:50:02.317941 | 2021-01-27T21:08:28 | 2021-01-27T21:08:28 | 330,748,942 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,052 | py | # 5-5. Alien Colors #3
# Turn your if-else chain from Exercise 5-4 into an if-elif-else chain.
# If the alien is green, print a message that the player earned 5
# points.
alien_color = 'green'
if alien_color == 'green':
print('You just earned 5 points!')
elif alien_color == 'yellow':
print('You just earned 10 points!')
else:
print('You just earned 15 points!')
# If the alien is yellow, print a message that the player earned 10
# points.
alien_color = 'yellow'
if alien_color == 'green':
print('You just earned 5 points!')
elif alien_color == 'yellow':
print('You just earned 10 points!')
else:
print('You just earned 15 points!')
# If the alien is red, print a message that the player earned 15 points.
alien_color = 'red'
if alien_color == 'green':
print('You just earned 5 points!')
elif alien_color == 'yellow':
print('You just earned 10 points!')
else:
print('You just earned 15 points!')
# Write three versions of this program, making sure each message is
# printed for the appropriate color alien. | [
"[email protected]"
]
| |
cf1c95226b738e88e5ece8b394896f8d6b81bf09 | d806dd4a6791382813d2136283a602207fb4b43c | /sirius/blueprints/api/remote_service/tula/passive/hospitalization/xform.py | e1eb90b2defe6e898e8d65d353d0acc7a8ea2d35 | []
| no_license | MarsStirner/sirius | 5bbf2a03dafb7248db481e13aff63ff989fabbc2 | 8839460726cca080ca8549bacd3a498e519c8f96 | refs/heads/master | 2021-03-24T12:09:14.673193 | 2017-06-06T16:28:53 | 2017-06-06T16:28:53 | 96,042,947 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 498 | py | #! coding:utf-8
"""
@author: BARS Group
@date: 13.10.2016
"""
from sirius.lib.xform import XForm
from sirius.blueprints.api.remote_service.tula.entities import TulaEntityCode
from sirius.blueprints.api.remote_service.tula.passive.hospitalization.schemas import \
HospitalizationSchema
from sirius.models.system import SystemCode
class HospitalizationTulaXForm(HospitalizationSchema, XForm):
remote_system_code = SystemCode.TULA
entity_code = TulaEntityCode.MEASURE_HOSPITALIZATION
| [
"[email protected]"
]
| |
22c4d7f96a6349a7d19d0b2069f885a37474aa47 | 010279e2ba272d09e9d2c4e903722e5faba2cf7a | /contrib/python/plotly/py2/plotly/validators/sankey/textfont/__init__.py | 7a16a4ec501428eb068d80f780f31eca40f57f29 | [
"MIT",
"Apache-2.0"
]
| permissive | catboost/catboost | 854c1a1f439a96f1ae6b48e16644be20aa04dba2 | f5042e35b945aded77b23470ead62d7eacefde92 | refs/heads/master | 2023-09-01T12:14:14.174108 | 2023-09-01T10:01:01 | 2023-09-01T10:22:12 | 97,556,265 | 8,012 | 1,425 | Apache-2.0 | 2023-09-11T03:32:32 | 2017-07-18T05:29:04 | Python | UTF-8 | Python | false | false | 1,471 | py | import _plotly_utils.basevalidators
class SizeValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="size", parent_name="sankey.textfont", **kwargs):
super(SizeValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
min=kwargs.pop("min", 1),
role=kwargs.pop("role", "style"),
**kwargs
)
import _plotly_utils.basevalidators
class FamilyValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(self, plotly_name="family", parent_name="sankey.textfont", **kwargs):
super(FamilyValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
no_blank=kwargs.pop("no_blank", True),
role=kwargs.pop("role", "style"),
strict=kwargs.pop("strict", True),
**kwargs
)
import _plotly_utils.basevalidators
class ColorValidator(_plotly_utils.basevalidators.ColorValidator):
def __init__(self, plotly_name="color", parent_name="sankey.textfont", **kwargs):
super(ColorValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "calc"),
role=kwargs.pop("role", "style"),
**kwargs
)
| [
"[email protected]"
]
| |
a83c3362a529d970c8d74dc9a41e928ad7f6aa12 | 36764bbdbe3dd6bb12cd8eb78e4b8f889bd65af0 | /mysortmat.py | b231fa573b16f020be2aaa0e3b636ee9e073a985 | []
| no_license | tristaaa/lcproblems | 18e01da857c16f69d33727fd7dcc821c09149842 | 167a196a9c36f0eaf3d94b07919f4ed138cf4728 | refs/heads/master | 2020-05-21T14:38:14.920465 | 2020-02-23T01:49:23 | 2020-02-23T01:49:23 | 186,085,490 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,274 | py | class Solution(object):
def mySortMat(self, mat):
"""
sort the input matrix, size of n*n, and the output should be in this order
[[9,8,6],
[7,5,3],
[4,2,1]]
:type mat: List[List[int]]
:rtype: List[List[int]]
"""
n = len(mat)
arr = []
for i in range(n):
arr+=mat[i]
arr.sort(reverse=True)
# print(arr)
result=[[0]*n for i in range(n)]
for i in range(n):
fn=i*(i+1)//2
if i!=n-1:
for j in range(i+1):
result[j][i-j] = arr[fn+j]
result[n-1-j][n-1-i+j] = arr[n*n-1-fn-j]
else:
for j in range(i//2+1):
result[j][i-j] = arr[fn+j]
result[n-1-j][n-1-i+j] = arr[n*n-1-fn-j]
return result
sol=Solution()
mat=[
[ 5, 1, 9, 11],
[ 2, 4, 8, 10],
[13, 3, 6, 7],
[15, 14, 12, 0]
]
mat1=[
[ 5, 1, 9],
[ 2, 4, 8],
[13, 3, 6]
]
print("Given the input matrix: [")
for i in range(len(mat)):
print(mat[i])
print("]")
print("the sorted matrix is: [")
res=sol.mySortMat(mat)
for i in range(len(res)):
print(res[i])
print("]")
print("Given the input matrix: [")
for i in range(len(mat1)):
print(mat1[i])
print("]")
print("the sorted matrix is: [")
res=sol.mySortMat(mat1)
for i in range(len(res)):
print(res[i])
print("]")
| [
"[email protected]"
]
| |
131da4ef6887fa5704722436717046f8e50c0a34 | 2f0bde4d37b7ea1aad91ab44b5b4526d0bec30ce | /examples/strike-slip-example/okada_driver.py | b09fae0728d457d440530d09f1f90b57ca4f9062 | [
"MIT"
]
| permissive | kmaterna/Elastic_stresses_py | 5c78a628136f610ec68e7ee38d8bc76515319e4f | 549a13c6c7fa3c80aac9d63548fdbf3b1ec7b082 | refs/heads/master | 2023-08-28T21:54:42.500337 | 2023-08-18T01:45:18 | 2023-08-18T01:45:18 | 141,371,162 | 42 | 11 | MIT | 2022-08-09T14:22:15 | 2018-07-18T02:37:59 | Python | UTF-8 | Python | false | false | 1,128 | py | #!/usr/bin/env python
import Elastic_stresses_py.PyCoulomb.fault_slip_object as fso
from Elastic_stresses_py.PyCoulomb import run_dc3d, configure_calc, output_manager, io_additionals
# Definitions
lon0_sys, lat0_sys = -120.5, 36;
bbox = (-121.5, -119.5, 35.2, 36.8);
lonlatfile = "Inputs/lon_lats.txt";
source_slip_dist = "Inputs/s2004PARKFI01CUST.fsp";
# Inputs
parkfield_faults = fso.file_io.io_srcmod.read_srcmod_distribution(source_slip_dist);
coulomb_fault_model = fso.fault_slip_object.fault_object_to_coulomb_fault(parkfield_faults, lon0_sys, lat0_sys);
disp_points = io_additionals.read_disp_points(lonlatfile);
# Configure, Compute, Output
params = configure_calc.configure_default_displacement_params();
inputs = configure_calc.configure_default_displacement_input(coulomb_fault_model, zerolon=lon0_sys,
zerolat=lat0_sys, bbox=bbox, domainsize=100);
outobj = run_dc3d.do_stress_computation(params, inputs, disp_points=disp_points, strain_points=[]);
output_manager.produce_outputs(params, inputs, disp_points, obs_strain_points=[], out_object=outobj);
| [
"[email protected]"
]
| |
f3e8df6eeb1ec9952a151a19f157255fcab78423 | 1ee9081e345c125eddaa88931197aed0265aafb8 | /glearn/task_scheduler/__init__.py | 0dcd17d4997e7fd770ca54277a53d1ef15fe2dca | []
| no_license | WeiShiwei/tornado_classify | 1d45bc16473842fea8d853ba5e2c57a773fed978 | 57faa997c205630c7f84a64db0c2f5ffd8fda12a | refs/heads/master | 2021-01-01T04:44:53.981312 | 2016-05-02T12:06:29 | 2016-05-02T12:06:29 | 57,887,029 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 519 | py | # -*- coding: utf-8 -*-
from tasks_classify import gldjc, gldzb
IDENTITY_APP_DICT = {
'gldjc':gldjc,
'gldzb':gldzb
}
class TaskScheduler(object):
"""docstring for TaskScheduler"""
def __init__(self, arg):
super(TaskScheduler, self).__init__()
self.arg = arg
@classmethod
def apply_async(self, identity, docs):
# import pdb;pdb.set_trace()
try:
res = IDENTITY_APP_DICT[identity].predict.apply_async( (identity, docs), queue = identity )
except KeyError, e:
print e
res = None
return res
| [
"[email protected]"
]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.