blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
616
| content_id
stringlengths 40
40
| detected_licenses
sequencelengths 0
112
| license_type
stringclasses 2
values | repo_name
stringlengths 5
115
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 777
values | visit_date
timestamp[us]date 2015-08-06 10:31:46
2023-09-06 10:44:38
| revision_date
timestamp[us]date 1970-01-01 02:38:32
2037-05-03 13:00:00
| committer_date
timestamp[us]date 1970-01-01 02:38:32
2023-09-06 01:08:06
| github_id
int64 4.92k
681M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 22
values | gha_event_created_at
timestamp[us]date 2012-06-04 01:52:49
2023-09-14 21:59:50
⌀ | gha_created_at
timestamp[us]date 2008-05-22 07:58:19
2023-08-21 12:35:19
⌀ | gha_language
stringclasses 149
values | src_encoding
stringclasses 26
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 3
10.2M
| extension
stringclasses 188
values | content
stringlengths 3
10.2M
| authors
sequencelengths 1
1
| author_id
stringlengths 1
132
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
0d5fa245a529ddeeda28d724b8adc4bcffa098d1 | efd2ec91bb1861f571a9ac6a663b72203eeaf8c1 | /env/lib/python3.6/tempfile.py | e5f4dd3d9c1adb7fde84d96552564a983ab0c6e4 | [] | no_license | bopopescu/DjangoFirst | de2cfdf6dcf9d72e211b3374865f4b38dd2465df | d2776f44d15024d6ed03d184e27269dff0c53d2a | refs/heads/master | 2021-10-28T08:48:15.508077 | 2019-04-23T06:29:28 | 2019-04-23T06:29:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 47 | py | /home/satish/anaconda/lib/python3.6/tempfile.py | [
"[email protected]"
] | |
25bd19ebec3d335bb1ab4630ad5ef6a7c9856ce5 | fde8c89b352076f95cc16e589b1baf18f7befb51 | /dulwich/pack.py | 878162b964ef50a9c5bcebdcb0a02cf5529b4243 | [] | no_license | 571451370/devstack_mitaka | b11145256deab817bcdf60a01a67bb6b2f9ddb52 | 1bdd3f2598f91c1446b85c5b6def7784a2f6ab02 | refs/heads/master | 2020-08-26T12:53:07.482514 | 2017-04-12T01:32:55 | 2017-04-12T01:32:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 66,284 | py | # pack.py -- For dealing with packed git objects.
# Copyright (C) 2007 James Westby <[email protected]>
# Copyright (C) 2008-2013 Jelmer Vernooij <[email protected]>
#
# Dulwich is dual-licensed under the Apache License, Version 2.0 and the GNU
# General Public License as public by the Free Software Foundation; version 2.0
# or (at your option) any later version. You can redistribute it and/or
# modify it under the terms of either of these two licenses.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# You should have received a copy of the licenses; if not, see
# <http://www.gnu.org/licenses/> for a copy of the GNU General Public License
# and <http://www.apache.org/licenses/LICENSE-2.0> for a copy of the Apache
# License, Version 2.0.
#
"""Classes for dealing with packed git objects.
A pack is a compact representation of a bunch of objects, stored
using deltas where possible.
They have two parts, the pack file, which stores the data, and an index
that tells you where the data is.
To find an object you look in all of the index files 'til you find a
match for the object name. You then use the pointer got from this as
a pointer in to the corresponding packfile.
"""
from collections import defaultdict
import binascii
from io import BytesIO, UnsupportedOperation
from collections import (
deque,
)
import difflib
import struct
from itertools import chain
try:
from itertools import imap, izip
except ImportError:
# Python3
imap = map
izip = zip
import os
import sys
try:
import mmap
except ImportError:
has_mmap = False
else:
has_mmap = True
# For some reason the above try, except fails to set has_mmap = False for plan9
if sys.platform == 'Plan9':
has_mmap = False
from hashlib import sha1
from os import (
SEEK_CUR,
SEEK_END,
)
from struct import unpack_from
import zlib
from dulwich.errors import (
ApplyDeltaError,
ChecksumMismatch,
)
from dulwich.file import GitFile
from dulwich.lru_cache import (
LRUSizeCache,
)
from dulwich.objects import (
ShaFile,
hex_to_sha,
sha_to_hex,
object_header,
)
OFS_DELTA = 6
REF_DELTA = 7
DELTA_TYPES = (OFS_DELTA, REF_DELTA)
DEFAULT_PACK_DELTA_WINDOW_SIZE = 10
def take_msb_bytes(read, crc32=None):
"""Read bytes marked with most significant bit.
:param read: Read function
"""
ret = []
while len(ret) == 0 or ret[-1] & 0x80:
b = read(1)
if crc32 is not None:
crc32 = binascii.crc32(b, crc32)
ret.append(ord(b[:1]))
return ret, crc32
class UnpackedObject(object):
"""Class encapsulating an object unpacked from a pack file.
These objects should only be created from within unpack_object. Most
members start out as empty and are filled in at various points by
read_zlib_chunks, unpack_object, DeltaChainIterator, etc.
End users of this object should take care that the function they're getting
this object from is guaranteed to set the members they need.
"""
__slots__ = [
'offset', # Offset in its pack.
'_sha', # Cached binary SHA.
'obj_type_num', # Type of this object.
'obj_chunks', # Decompressed and delta-resolved chunks.
'pack_type_num', # Type of this object in the pack (may be a delta).
'delta_base', # Delta base offset or SHA.
'comp_chunks', # Compressed object chunks.
'decomp_chunks', # Decompressed object chunks.
'decomp_len', # Decompressed length of this object.
'crc32', # CRC32.
]
# TODO(dborowitz): read_zlib_chunks and unpack_object could very well be
# methods of this object.
def __init__(self, pack_type_num, delta_base, decomp_len, crc32):
self.offset = None
self._sha = None
self.pack_type_num = pack_type_num
self.delta_base = delta_base
self.comp_chunks = None
self.decomp_chunks = []
self.decomp_len = decomp_len
self.crc32 = crc32
if pack_type_num in DELTA_TYPES:
self.obj_type_num = None
self.obj_chunks = None
else:
self.obj_type_num = pack_type_num
self.obj_chunks = self.decomp_chunks
self.delta_base = delta_base
def sha(self):
"""Return the binary SHA of this object."""
if self._sha is None:
self._sha = obj_sha(self.obj_type_num, self.obj_chunks)
return self._sha
def sha_file(self):
"""Return a ShaFile from this object."""
return ShaFile.from_raw_chunks(self.obj_type_num, self.obj_chunks)
# Only provided for backwards compatibility with code that expects either
# chunks or a delta tuple.
def _obj(self):
"""Return the decompressed chunks, or (delta base, delta chunks)."""
if self.pack_type_num in DELTA_TYPES:
return (self.delta_base, self.decomp_chunks)
else:
return self.decomp_chunks
def __eq__(self, other):
if not isinstance(other, UnpackedObject):
return False
for slot in self.__slots__:
if getattr(self, slot) != getattr(other, slot):
return False
return True
def __ne__(self, other):
return not (self == other)
def __repr__(self):
data = ['%s=%r' % (s, getattr(self, s)) for s in self.__slots__]
return '%s(%s)' % (self.__class__.__name__, ', '.join(data))
_ZLIB_BUFSIZE = 4096
def read_zlib_chunks(read_some, unpacked, include_comp=False,
buffer_size=_ZLIB_BUFSIZE):
"""Read zlib data from a buffer.
This function requires that the buffer have additional data following the
compressed data, which is guaranteed to be the case for git pack files.
:param read_some: Read function that returns at least one byte, but may
return less than the requested size.
:param unpacked: An UnpackedObject to write result data to. If its crc32
attr is not None, the CRC32 of the compressed bytes will be computed
using this starting CRC32.
After this function, will have the following attrs set:
* comp_chunks (if include_comp is True)
* decomp_chunks
* decomp_len
* crc32
:param include_comp: If True, include compressed data in the result.
:param buffer_size: Size of the read buffer.
:return: Leftover unused data from the decompression.
:raise zlib.error: if a decompression error occurred.
"""
if unpacked.decomp_len <= -1:
raise ValueError('non-negative zlib data stream size expected')
decomp_obj = zlib.decompressobj()
comp_chunks = []
decomp_chunks = unpacked.decomp_chunks
decomp_len = 0
crc32 = unpacked.crc32
while True:
add = read_some(buffer_size)
if not add:
raise zlib.error('EOF before end of zlib stream')
comp_chunks.append(add)
decomp = decomp_obj.decompress(add)
decomp_len += len(decomp)
decomp_chunks.append(decomp)
unused = decomp_obj.unused_data
if unused:
left = len(unused)
if crc32 is not None:
crc32 = binascii.crc32(add[:-left], crc32)
if include_comp:
comp_chunks[-1] = add[:-left]
break
elif crc32 is not None:
crc32 = binascii.crc32(add, crc32)
if crc32 is not None:
crc32 &= 0xffffffff
if decomp_len != unpacked.decomp_len:
raise zlib.error('decompressed data does not match expected size')
unpacked.crc32 = crc32
if include_comp:
unpacked.comp_chunks = comp_chunks
return unused
def iter_sha1(iter):
"""Return the hexdigest of the SHA1 over a set of names.
:param iter: Iterator over string objects
:return: 40-byte hex sha1 digest
"""
sha = sha1()
for name in iter:
sha.update(name)
return sha.hexdigest().encode('ascii')
def load_pack_index(path):
"""Load an index file by path.
:param filename: Path to the index file
:return: A PackIndex loaded from the given path
"""
with GitFile(path, 'rb') as f:
return load_pack_index_file(path, f)
def _load_file_contents(f, size=None):
try:
fd = f.fileno()
except (UnsupportedOperation, AttributeError):
fd = None
# Attempt to use mmap if possible
if fd is not None:
if size is None:
size = os.fstat(fd).st_size
if has_mmap:
try:
contents = mmap.mmap(fd, size, access=mmap.ACCESS_READ)
except mmap.error:
# Perhaps a socket?
pass
else:
return contents, size
contents = f.read()
size = len(contents)
return contents, size
def load_pack_index_file(path, f):
"""Load an index file from a file-like object.
:param path: Path for the index file
:param f: File-like object
:return: A PackIndex loaded from the given file
"""
contents, size = _load_file_contents(f)
if contents[:4] == b'\377tOc':
version = struct.unpack(b'>L', contents[4:8])[0]
if version == 2:
return PackIndex2(path, file=f, contents=contents,
size=size)
else:
raise KeyError('Unknown pack index format %d' % version)
else:
return PackIndex1(path, file=f, contents=contents, size=size)
def bisect_find_sha(start, end, sha, unpack_name):
"""Find a SHA in a data blob with sorted SHAs.
:param start: Start index of range to search
:param end: End index of range to search
:param sha: Sha to find
:param unpack_name: Callback to retrieve SHA by index
:return: Index of the SHA, or None if it wasn't found
"""
assert start <= end
while start <= end:
i = (start + end) // 2
file_sha = unpack_name(i)
if file_sha < sha:
start = i + 1
elif file_sha > sha:
end = i - 1
else:
return i
return None
class PackIndex(object):
"""An index in to a packfile.
Given a sha id of an object a pack index can tell you the location in the
packfile of that object if it has it.
"""
def __eq__(self, other):
if not isinstance(other, PackIndex):
return False
for (name1, _, _), (name2, _, _) in izip(self.iterentries(),
other.iterentries()):
if name1 != name2:
return False
return True
def __ne__(self, other):
return not self.__eq__(other)
def __len__(self):
"""Return the number of entries in this pack index."""
raise NotImplementedError(self.__len__)
def __iter__(self):
"""Iterate over the SHAs in this pack."""
return imap(sha_to_hex, self._itersha())
def iterentries(self):
"""Iterate over the entries in this pack index.
:return: iterator over tuples with object name, offset in packfile and
crc32 checksum.
"""
raise NotImplementedError(self.iterentries)
def get_pack_checksum(self):
"""Return the SHA1 checksum stored for the corresponding packfile.
:return: 20-byte binary digest
"""
raise NotImplementedError(self.get_pack_checksum)
def object_index(self, sha):
"""Return the index in to the corresponding packfile for the object.
Given the name of an object it will return the offset that object
lives at within the corresponding pack file. If the pack file doesn't
have the object then None will be returned.
"""
if len(sha) == 40:
sha = hex_to_sha(sha)
return self._object_index(sha)
def _object_index(self, sha):
"""See object_index.
:param sha: A *binary* SHA string. (20 characters long)_
"""
raise NotImplementedError(self._object_index)
def objects_sha1(self):
"""Return the hex SHA1 over all the shas of all objects in this pack.
:note: This is used for the filename of the pack.
"""
return iter_sha1(self._itersha())
def _itersha(self):
"""Yield all the SHA1's of the objects in the index, sorted."""
raise NotImplementedError(self._itersha)
class MemoryPackIndex(PackIndex):
"""Pack index that is stored entirely in memory."""
def __init__(self, entries, pack_checksum=None):
"""Create a new MemoryPackIndex.
:param entries: Sequence of name, idx, crc32 (sorted)
:param pack_checksum: Optional pack checksum
"""
self._by_sha = {}
for name, idx, crc32 in entries:
self._by_sha[name] = idx
self._entries = entries
self._pack_checksum = pack_checksum
def get_pack_checksum(self):
return self._pack_checksum
def __len__(self):
return len(self._entries)
def _object_index(self, sha):
return self._by_sha[sha][0]
def _itersha(self):
return iter(self._by_sha)
def iterentries(self):
return iter(self._entries)
class FilePackIndex(PackIndex):
"""Pack index that is based on a file.
To do the loop it opens the file, and indexes first 256 4 byte groups
with the first byte of the sha id. The value in the four byte group indexed
is the end of the group that shares the same starting byte. Subtract one
from the starting byte and index again to find the start of the group.
The values are sorted by sha id within the group, so do the math to find
the start and end offset and then bisect in to find if the value is present.
"""
def __init__(self, filename, file=None, contents=None, size=None):
"""Create a pack index object.
Provide it with the name of the index file to consider, and it will map
it whenever required.
"""
self._filename = filename
# Take the size now, so it can be checked each time we map the file to
# ensure that it hasn't changed.
if file is None:
self._file = GitFile(filename, 'rb')
else:
self._file = file
if contents is None:
self._contents, self._size = _load_file_contents(self._file, size)
else:
self._contents, self._size = (contents, size)
def __eq__(self, other):
# Quick optimization:
if (isinstance(other, FilePackIndex) and
self._fan_out_table != other._fan_out_table):
return False
return super(FilePackIndex, self).__eq__(other)
def close(self):
self._file.close()
if getattr(self._contents, "close", None) is not None:
self._contents.close()
def __len__(self):
"""Return the number of entries in this pack index."""
return self._fan_out_table[-1]
def _unpack_entry(self, i):
"""Unpack the i-th entry in the index file.
:return: Tuple with object name (SHA), offset in pack file and CRC32
checksum (if known).
"""
raise NotImplementedError(self._unpack_entry)
def _unpack_name(self, i):
"""Unpack the i-th name from the index file."""
raise NotImplementedError(self._unpack_name)
def _unpack_offset(self, i):
"""Unpack the i-th object offset from the index file."""
raise NotImplementedError(self._unpack_offset)
def _unpack_crc32_checksum(self, i):
"""Unpack the crc32 checksum for the i-th object from the index file."""
raise NotImplementedError(self._unpack_crc32_checksum)
def _itersha(self):
for i in range(len(self)):
yield self._unpack_name(i)
def iterentries(self):
"""Iterate over the entries in this pack index.
:return: iterator over tuples with object name, offset in packfile and
crc32 checksum.
"""
for i in range(len(self)):
yield self._unpack_entry(i)
def _read_fan_out_table(self, start_offset):
ret = []
for i in range(0x100):
fanout_entry = self._contents[start_offset+i*4:start_offset+(i+1)*4]
ret.append(struct.unpack('>L', fanout_entry)[0])
return ret
def check(self):
"""Check that the stored checksum matches the actual checksum."""
actual = self.calculate_checksum()
stored = self.get_stored_checksum()
if actual != stored:
raise ChecksumMismatch(stored, actual)
def calculate_checksum(self):
"""Calculate the SHA1 checksum over this pack index.
:return: This is a 20-byte binary digest
"""
return sha1(self._contents[:-20]).digest()
def get_pack_checksum(self):
"""Return the SHA1 checksum stored for the corresponding packfile.
:return: 20-byte binary digest
"""
return bytes(self._contents[-40:-20])
def get_stored_checksum(self):
"""Return the SHA1 checksum stored for this index.
:return: 20-byte binary digest
"""
return bytes(self._contents[-20:])
def _object_index(self, sha):
"""See object_index.
:param sha: A *binary* SHA string. (20 characters long)_
"""
assert len(sha) == 20
idx = ord(sha[:1])
if idx == 0:
start = 0
else:
start = self._fan_out_table[idx-1]
end = self._fan_out_table[idx]
i = bisect_find_sha(start, end, sha, self._unpack_name)
if i is None:
raise KeyError(sha)
return self._unpack_offset(i)
class PackIndex1(FilePackIndex):
"""Version 1 Pack Index file."""
def __init__(self, filename, file=None, contents=None, size=None):
super(PackIndex1, self).__init__(filename, file, contents, size)
self.version = 1
self._fan_out_table = self._read_fan_out_table(0)
def _unpack_entry(self, i):
(offset, name) = unpack_from('>L20s', self._contents,
(0x100 * 4) + (i * 24))
return (name, offset, None)
def _unpack_name(self, i):
offset = (0x100 * 4) + (i * 24) + 4
return self._contents[offset:offset+20]
def _unpack_offset(self, i):
offset = (0x100 * 4) + (i * 24)
return unpack_from('>L', self._contents, offset)[0]
def _unpack_crc32_checksum(self, i):
# Not stored in v1 index files
return None
class PackIndex2(FilePackIndex):
"""Version 2 Pack Index file."""
def __init__(self, filename, file=None, contents=None, size=None):
super(PackIndex2, self).__init__(filename, file, contents, size)
if self._contents[:4] != b'\377tOc':
raise AssertionError('Not a v2 pack index file')
(self.version, ) = unpack_from(b'>L', self._contents, 4)
if self.version != 2:
raise AssertionError('Version was %d' % self.version)
self._fan_out_table = self._read_fan_out_table(8)
self._name_table_offset = 8 + 0x100 * 4
self._crc32_table_offset = self._name_table_offset + 20 * len(self)
self._pack_offset_table_offset = (self._crc32_table_offset +
4 * len(self))
self._pack_offset_largetable_offset = (self._pack_offset_table_offset +
4 * len(self))
def _unpack_entry(self, i):
return (self._unpack_name(i), self._unpack_offset(i),
self._unpack_crc32_checksum(i))
def _unpack_name(self, i):
offset = self._name_table_offset + i * 20
return self._contents[offset:offset+20]
def _unpack_offset(self, i):
offset = self._pack_offset_table_offset + i * 4
offset = unpack_from('>L', self._contents, offset)[0]
if offset & (2**31):
offset = self._pack_offset_largetable_offset + (offset&(2**31-1)) * 8
offset = unpack_from('>Q', self._contents, offset)[0]
return offset
def _unpack_crc32_checksum(self, i):
return unpack_from('>L', self._contents,
self._crc32_table_offset + i * 4)[0]
def read_pack_header(read):
"""Read the header of a pack file.
:param read: Read function
:return: Tuple of (pack version, number of objects). If no data is available
to read, returns (None, None).
"""
header = read(12)
if not header:
return None, None
if header[:4] != b'PACK':
raise AssertionError('Invalid pack header %r' % header)
(version,) = unpack_from(b'>L', header, 4)
if version not in (2, 3):
raise AssertionError('Version was %d' % version)
(num_objects,) = unpack_from(b'>L', header, 8)
return (version, num_objects)
def chunks_length(chunks):
if isinstance(chunks, bytes):
return len(chunks)
else:
return sum(imap(len, chunks))
def unpack_object(read_all, read_some=None, compute_crc32=False,
include_comp=False, zlib_bufsize=_ZLIB_BUFSIZE):
"""Unpack a Git object.
:param read_all: Read function that blocks until the number of requested
bytes are read.
:param read_some: Read function that returns at least one byte, but may not
return the number of bytes requested.
:param compute_crc32: If True, compute the CRC32 of the compressed data. If
False, the returned CRC32 will be None.
:param include_comp: If True, include compressed data in the result.
:param zlib_bufsize: An optional buffer size for zlib operations.
:return: A tuple of (unpacked, unused), where unused is the unused data
leftover from decompression, and unpacked in an UnpackedObject with
the following attrs set:
* obj_chunks (for non-delta types)
* pack_type_num
* delta_base (for delta types)
* comp_chunks (if include_comp is True)
* decomp_chunks
* decomp_len
* crc32 (if compute_crc32 is True)
"""
if read_some is None:
read_some = read_all
if compute_crc32:
crc32 = 0
else:
crc32 = None
bytes, crc32 = take_msb_bytes(read_all, crc32=crc32)
type_num = (bytes[0] >> 4) & 0x07
size = bytes[0] & 0x0f
for i, byte in enumerate(bytes[1:]):
size += (byte & 0x7f) << ((i * 7) + 4)
raw_base = len(bytes)
if type_num == OFS_DELTA:
bytes, crc32 = take_msb_bytes(read_all, crc32=crc32)
raw_base += len(bytes)
if bytes[-1] & 0x80:
raise AssertionError
delta_base_offset = bytes[0] & 0x7f
for byte in bytes[1:]:
delta_base_offset += 1
delta_base_offset <<= 7
delta_base_offset += (byte & 0x7f)
delta_base = delta_base_offset
elif type_num == REF_DELTA:
delta_base = read_all(20)
if compute_crc32:
crc32 = binascii.crc32(delta_base, crc32)
raw_base += 20
else:
delta_base = None
unpacked = UnpackedObject(type_num, delta_base, size, crc32)
unused = read_zlib_chunks(read_some, unpacked, buffer_size=zlib_bufsize,
include_comp=include_comp)
return unpacked, unused
def _compute_object_size(value):
"""Compute the size of a unresolved object for use with LRUSizeCache."""
(num, obj) = value
if num in DELTA_TYPES:
return chunks_length(obj[1])
return chunks_length(obj)
class PackStreamReader(object):
"""Class to read a pack stream.
The pack is read from a ReceivableProtocol using read() or recv() as
appropriate.
"""
def __init__(self, read_all, read_some=None, zlib_bufsize=_ZLIB_BUFSIZE):
self.read_all = read_all
if read_some is None:
self.read_some = read_all
else:
self.read_some = read_some
self.sha = sha1()
self._offset = 0
self._rbuf = BytesIO()
# trailer is a deque to avoid memory allocation on small reads
self._trailer = deque()
self._zlib_bufsize = zlib_bufsize
def _read(self, read, size):
"""Read up to size bytes using the given callback.
As a side effect, update the verifier's hash (excluding the last 20
bytes read).
:param read: The read callback to read from.
:param size: The maximum number of bytes to read; the particular
behavior is callback-specific.
"""
data = read(size)
# maintain a trailer of the last 20 bytes we've read
n = len(data)
self._offset += n
tn = len(self._trailer)
if n >= 20:
to_pop = tn
to_add = 20
else:
to_pop = max(n + tn - 20, 0)
to_add = n
self.sha.update(bytes(bytearray([self._trailer.popleft() for _ in range(to_pop)])))
self._trailer.extend(data[-to_add:])
# hash everything but the trailer
self.sha.update(data[:-to_add])
return data
def _buf_len(self):
buf = self._rbuf
start = buf.tell()
buf.seek(0, SEEK_END)
end = buf.tell()
buf.seek(start)
return end - start
@property
def offset(self):
return self._offset - self._buf_len()
def read(self, size):
"""Read, blocking until size bytes are read."""
buf_len = self._buf_len()
if buf_len >= size:
return self._rbuf.read(size)
buf_data = self._rbuf.read()
self._rbuf = BytesIO()
return buf_data + self._read(self.read_all, size - buf_len)
def recv(self, size):
"""Read up to size bytes, blocking until one byte is read."""
buf_len = self._buf_len()
if buf_len:
data = self._rbuf.read(size)
if size >= buf_len:
self._rbuf = BytesIO()
return data
return self._read(self.read_some, size)
def __len__(self):
return self._num_objects
def read_objects(self, compute_crc32=False):
"""Read the objects in this pack file.
:param compute_crc32: If True, compute the CRC32 of the compressed
data. If False, the returned CRC32 will be None.
:return: Iterator over UnpackedObjects with the following members set:
offset
obj_type_num
obj_chunks (for non-delta types)
delta_base (for delta types)
decomp_chunks
decomp_len
crc32 (if compute_crc32 is True)
:raise ChecksumMismatch: if the checksum of the pack contents does not
match the checksum in the pack trailer.
:raise zlib.error: if an error occurred during zlib decompression.
:raise IOError: if an error occurred writing to the output file.
"""
pack_version, self._num_objects = read_pack_header(self.read)
if pack_version is None:
return
for i in range(self._num_objects):
offset = self.offset
unpacked, unused = unpack_object(
self.read, read_some=self.recv, compute_crc32=compute_crc32,
zlib_bufsize=self._zlib_bufsize)
unpacked.offset = offset
# prepend any unused data to current read buffer
buf = BytesIO()
buf.write(unused)
buf.write(self._rbuf.read())
buf.seek(0)
self._rbuf = buf
yield unpacked
if self._buf_len() < 20:
# If the read buffer is full, then the last read() got the whole
# trailer off the wire. If not, it means there is still some of the
# trailer to read. We need to read() all 20 bytes; N come from the
# read buffer and (20 - N) come from the wire.
self.read(20)
pack_sha = bytearray(self._trailer)
if pack_sha != self.sha.digest():
raise ChecksumMismatch(sha_to_hex(pack_sha), self.sha.hexdigest())
class PackStreamCopier(PackStreamReader):
"""Class to verify a pack stream as it is being read.
The pack is read from a ReceivableProtocol using read() or recv() as
appropriate and written out to the given file-like object.
"""
def __init__(self, read_all, read_some, outfile, delta_iter=None):
"""Initialize the copier.
:param read_all: Read function that blocks until the number of requested
bytes are read.
:param read_some: Read function that returns at least one byte, but may
not return the number of bytes requested.
:param outfile: File-like object to write output through.
:param delta_iter: Optional DeltaChainIterator to record deltas as we
read them.
"""
super(PackStreamCopier, self).__init__(read_all, read_some=read_some)
self.outfile = outfile
self._delta_iter = delta_iter
def _read(self, read, size):
"""Read data from the read callback and write it to the file."""
data = super(PackStreamCopier, self)._read(read, size)
self.outfile.write(data)
return data
def verify(self):
"""Verify a pack stream and write it to the output file.
See PackStreamReader.iterobjects for a list of exceptions this may
throw.
"""
if self._delta_iter:
for unpacked in self.read_objects():
self._delta_iter.record(unpacked)
else:
for _ in self.read_objects():
pass
def obj_sha(type, chunks):
"""Compute the SHA for a numeric type and object chunks."""
sha = sha1()
sha.update(object_header(type, chunks_length(chunks)))
if isinstance(chunks, bytes):
sha.update(chunks)
else:
for chunk in chunks:
sha.update(chunk)
return sha.digest()
def compute_file_sha(f, start_ofs=0, end_ofs=0, buffer_size=1<<16):
"""Hash a portion of a file into a new SHA.
:param f: A file-like object to read from that supports seek().
:param start_ofs: The offset in the file to start reading at.
:param end_ofs: The offset in the file to end reading at, relative to the
end of the file.
:param buffer_size: A buffer size for reading.
:return: A new SHA object updated with data read from the file.
"""
sha = sha1()
f.seek(0, SEEK_END)
length = f.tell()
if (end_ofs < 0 and length + end_ofs < start_ofs) or end_ofs > length:
raise AssertionError(
"Attempt to read beyond file length. "
"start_ofs: %d, end_ofs: %d, file length: %d" % (
start_ofs, end_ofs, length))
todo = length + end_ofs - start_ofs
f.seek(start_ofs)
while todo:
data = f.read(min(todo, buffer_size))
sha.update(data)
todo -= len(data)
return sha
class PackData(object):
"""The data contained in a packfile.
Pack files can be accessed both sequentially for exploding a pack, and
directly with the help of an index to retrieve a specific object.
The objects within are either complete or a delta against another.
The header is variable length. If the MSB of each byte is set then it
indicates that the subsequent byte is still part of the header.
For the first byte the next MS bits are the type, which tells you the type
of object, and whether it is a delta. The LS byte is the lowest bits of the
size. For each subsequent byte the LS 7 bits are the next MS bits of the
size, i.e. the last byte of the header contains the MS bits of the size.
For the complete objects the data is stored as zlib deflated data.
The size in the header is the uncompressed object size, so to uncompress
you need to just keep feeding data to zlib until you get an object back,
or it errors on bad data. This is done here by just giving the complete
buffer from the start of the deflated object on. This is bad, but until I
get mmap sorted out it will have to do.
Currently there are no integrity checks done. Also no attempt is made to
try and detect the delta case, or a request for an object at the wrong
position. It will all just throw a zlib or KeyError.
"""
def __init__(self, filename, file=None, size=None):
"""Create a PackData object representing the pack in the given filename.
The file must exist and stay readable until the object is disposed of. It
must also stay the same size. It will be mapped whenever needed.
Currently there is a restriction on the size of the pack as the python
mmap implementation is flawed.
"""
self._filename = filename
self._size = size
self._header_size = 12
if file is None:
self._file = GitFile(self._filename, 'rb')
else:
self._file = file
(version, self._num_objects) = read_pack_header(self._file.read)
self._offset_cache = LRUSizeCache(1024*1024*20,
compute_size=_compute_object_size)
self.pack = None
@property
def filename(self):
return os.path.basename(self._filename)
@classmethod
def from_file(cls, file, size):
return cls(str(file), file=file, size=size)
@classmethod
def from_path(cls, path):
return cls(filename=path)
def close(self):
self._file.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def _get_size(self):
if self._size is not None:
return self._size
self._size = os.path.getsize(self._filename)
if self._size < self._header_size:
errmsg = ('%s is too small for a packfile (%d < %d)' %
(self._filename, self._size, self._header_size))
raise AssertionError(errmsg)
return self._size
def __len__(self):
"""Returns the number of objects in this pack."""
return self._num_objects
def calculate_checksum(self):
"""Calculate the checksum for this pack.
:return: 20-byte binary SHA1 digest
"""
return compute_file_sha(self._file, end_ofs=-20).digest()
def get_ref(self, sha):
"""Get the object for a ref SHA, only looking in this pack."""
# TODO: cache these results
if self.pack is None:
raise KeyError(sha)
try:
offset = self.pack.index.object_index(sha)
except KeyError:
offset = None
if offset:
type, obj = self.get_object_at(offset)
elif self.pack is not None and self.pack.resolve_ext_ref:
type, obj = self.pack.resolve_ext_ref(sha)
else:
raise KeyError(sha)
return offset, type, obj
def resolve_object(self, offset, type, obj, get_ref=None):
"""Resolve an object, possibly resolving deltas when necessary.
:return: Tuple with object type and contents.
"""
# Walk down the delta chain, building a stack of deltas to reach
# the requested object.
base_offset = offset
base_type = type
base_obj = obj
delta_stack = []
while base_type in DELTA_TYPES:
prev_offset = base_offset
if get_ref is None:
get_ref = self.get_ref
if base_type == OFS_DELTA:
(delta_offset, delta) = base_obj
# TODO: clean up asserts and replace with nicer error messages
assert (
isinstance(base_offset, int)
or isinstance(base_offset, long))
assert (
isinstance(delta_offset, int)
or isinstance(base_offset, long))
base_offset = base_offset - delta_offset
base_type, base_obj = self.get_object_at(base_offset)
assert isinstance(base_type, int)
elif base_type == REF_DELTA:
(basename, delta) = base_obj
assert isinstance(basename, bytes) and len(basename) == 20
base_offset, base_type, base_obj = get_ref(basename)
assert isinstance(base_type, int)
delta_stack.append((prev_offset, base_type, delta))
# Now grab the base object (mustn't be a delta) and apply the
# deltas all the way up the stack.
chunks = base_obj
for prev_offset, delta_type, delta in reversed(delta_stack):
chunks = apply_delta(chunks, delta)
# TODO(dborowitz): This can result in poor performance if
# large base objects are separated from deltas in the pack.
# We should reorganize so that we apply deltas to all
# objects in a chain one after the other to optimize cache
# performance.
if prev_offset is not None:
self._offset_cache[prev_offset] = base_type, chunks
return base_type, chunks
def iterobjects(self, progress=None, compute_crc32=True):
self._file.seek(self._header_size)
for i in range(1, self._num_objects + 1):
offset = self._file.tell()
unpacked, unused = unpack_object(
self._file.read, compute_crc32=compute_crc32)
if progress is not None:
progress(i, self._num_objects)
yield (offset, unpacked.pack_type_num, unpacked._obj(),
unpacked.crc32)
self._file.seek(-len(unused), SEEK_CUR) # Back up over unused data.
def _iter_unpacked(self):
# TODO(dborowitz): Merge this with iterobjects, if we can change its
# return type.
self._file.seek(self._header_size)
if self._num_objects is None:
return
for _ in range(self._num_objects):
offset = self._file.tell()
unpacked, unused = unpack_object(
self._file.read, compute_crc32=False)
unpacked.offset = offset
yield unpacked
self._file.seek(-len(unused), SEEK_CUR) # Back up over unused data.
def iterentries(self, progress=None):
"""Yield entries summarizing the contents of this pack.
:param progress: Progress function, called with current and total
object count.
:return: iterator of tuples with (sha, offset, crc32)
"""
num_objects = self._num_objects
resolve_ext_ref = (
self.pack.resolve_ext_ref if self.pack is not None else None)
indexer = PackIndexer.for_pack_data(
self, resolve_ext_ref=resolve_ext_ref)
for i, result in enumerate(indexer):
if progress is not None:
progress(i, num_objects)
yield result
def sorted_entries(self, progress=None):
"""Return entries in this pack, sorted by SHA.
:param progress: Progress function, called with current and total
object count
:return: List of tuples with (sha, offset, crc32)
"""
ret = sorted(self.iterentries(progress=progress))
return ret
def create_index_v1(self, filename, progress=None):
"""Create a version 1 file for this data file.
:param filename: Index filename.
:param progress: Progress report function
:return: Checksum of index file
"""
entries = self.sorted_entries(progress=progress)
with GitFile(filename, 'wb') as f:
return write_pack_index_v1(f, entries, self.calculate_checksum())
def create_index_v2(self, filename, progress=None):
"""Create a version 2 index file for this data file.
:param filename: Index filename.
:param progress: Progress report function
:return: Checksum of index file
"""
entries = self.sorted_entries(progress=progress)
with GitFile(filename, 'wb') as f:
return write_pack_index_v2(f, entries, self.calculate_checksum())
def create_index(self, filename, progress=None,
version=2):
"""Create an index file for this data file.
:param filename: Index filename.
:param progress: Progress report function
:return: Checksum of index file
"""
if version == 1:
return self.create_index_v1(filename, progress)
elif version == 2:
return self.create_index_v2(filename, progress)
else:
raise ValueError('unknown index format %d' % version)
def get_stored_checksum(self):
"""Return the expected checksum stored in this pack."""
self._file.seek(-20, SEEK_END)
return self._file.read(20)
def check(self):
"""Check the consistency of this pack."""
actual = self.calculate_checksum()
stored = self.get_stored_checksum()
if actual != stored:
raise ChecksumMismatch(stored, actual)
def get_object_at(self, offset):
"""Given an offset in to the packfile return the object that is there.
Using the associated index the location of an object can be looked up,
and then the packfile can be asked directly for that object using this
function.
"""
try:
return self._offset_cache[offset]
except KeyError:
pass
assert offset >= self._header_size
self._file.seek(offset)
unpacked, _ = unpack_object(self._file.read)
return (unpacked.pack_type_num, unpacked._obj())
class DeltaChainIterator(object):
"""Abstract iterator over pack data based on delta chains.
Each object in the pack is guaranteed to be inflated exactly once,
regardless of how many objects reference it as a delta base. As a result,
memory usage is proportional to the length of the longest delta chain.
Subclasses can override _result to define the result type of the iterator.
By default, results are UnpackedObjects with the following members set:
* offset
* obj_type_num
* obj_chunks
* pack_type_num
* delta_base (for delta types)
* comp_chunks (if _include_comp is True)
* decomp_chunks
* decomp_len
* crc32 (if _compute_crc32 is True)
"""
_compute_crc32 = False
_include_comp = False
def __init__(self, file_obj, resolve_ext_ref=None):
self._file = file_obj
self._resolve_ext_ref = resolve_ext_ref
self._pending_ofs = defaultdict(list)
self._pending_ref = defaultdict(list)
self._full_ofs = []
self._shas = {}
self._ext_refs = []
@classmethod
def for_pack_data(cls, pack_data, resolve_ext_ref=None):
walker = cls(None, resolve_ext_ref=resolve_ext_ref)
walker.set_pack_data(pack_data)
for unpacked in pack_data._iter_unpacked():
walker.record(unpacked)
return walker
def record(self, unpacked):
type_num = unpacked.pack_type_num
offset = unpacked.offset
if type_num == OFS_DELTA:
base_offset = offset - unpacked.delta_base
self._pending_ofs[base_offset].append(offset)
elif type_num == REF_DELTA:
self._pending_ref[unpacked.delta_base].append(offset)
else:
self._full_ofs.append((offset, type_num))
def set_pack_data(self, pack_data):
self._file = pack_data._file
def _walk_all_chains(self):
for offset, type_num in self._full_ofs:
for result in self._follow_chain(offset, type_num, None):
yield result
for result in self._walk_ref_chains():
yield result
assert not self._pending_ofs
def _ensure_no_pending(self):
if self._pending_ref:
raise KeyError([sha_to_hex(s) for s in self._pending_ref])
def _walk_ref_chains(self):
if not self._resolve_ext_ref:
self._ensure_no_pending()
return
for base_sha, pending in sorted(self._pending_ref.items()):
if base_sha not in self._pending_ref:
continue
try:
type_num, chunks = self._resolve_ext_ref(base_sha)
except KeyError:
# Not an external ref, but may depend on one. Either it will get
# popped via a _follow_chain call, or we will raise an error
# below.
continue
self._ext_refs.append(base_sha)
self._pending_ref.pop(base_sha)
for new_offset in pending:
for result in self._follow_chain(new_offset, type_num, chunks):
yield result
self._ensure_no_pending()
def _result(self, unpacked):
return unpacked
def _resolve_object(self, offset, obj_type_num, base_chunks):
self._file.seek(offset)
unpacked, _ = unpack_object(
self._file.read, include_comp=self._include_comp,
compute_crc32=self._compute_crc32)
unpacked.offset = offset
if base_chunks is None:
assert unpacked.pack_type_num == obj_type_num
else:
assert unpacked.pack_type_num in DELTA_TYPES
unpacked.obj_type_num = obj_type_num
unpacked.obj_chunks = apply_delta(base_chunks,
unpacked.decomp_chunks)
return unpacked
def _follow_chain(self, offset, obj_type_num, base_chunks):
# Unlike PackData.get_object_at, there is no need to cache offsets as
# this approach by design inflates each object exactly once.
todo = [(offset, obj_type_num, base_chunks)]
for offset, obj_type_num, base_chunks in todo:
unpacked = self._resolve_object(offset, obj_type_num, base_chunks)
yield self._result(unpacked)
unblocked = chain(self._pending_ofs.pop(unpacked.offset, []),
self._pending_ref.pop(unpacked.sha(), []))
todo.extend(
(new_offset, unpacked.obj_type_num, unpacked.obj_chunks)
for new_offset in unblocked)
def __iter__(self):
return self._walk_all_chains()
def ext_refs(self):
return self._ext_refs
class PackIndexer(DeltaChainIterator):
"""Delta chain iterator that yields index entries."""
_compute_crc32 = True
def _result(self, unpacked):
return unpacked.sha(), unpacked.offset, unpacked.crc32
class PackInflater(DeltaChainIterator):
"""Delta chain iterator that yields ShaFile objects."""
def _result(self, unpacked):
return unpacked.sha_file()
class SHA1Reader(object):
"""Wrapper around a file-like object that remembers the SHA1 of its data."""
def __init__(self, f):
self.f = f
self.sha1 = sha1(b'')
def read(self, num=None):
data = self.f.read(num)
self.sha1.update(data)
return data
def check_sha(self):
stored = self.f.read(20)
if stored != self.sha1.digest():
raise ChecksumMismatch(self.sha1.hexdigest(), sha_to_hex(stored))
def close(self):
return self.f.close()
def tell(self):
return self.f.tell()
class SHA1Writer(object):
"""Wrapper around a file-like object that remembers the SHA1 of its data."""
def __init__(self, f):
self.f = f
self.length = 0
self.sha1 = sha1(b'')
def write(self, data):
self.sha1.update(data)
self.f.write(data)
self.length += len(data)
def write_sha(self):
sha = self.sha1.digest()
assert len(sha) == 20
self.f.write(sha)
self.length += len(sha)
return sha
def close(self):
sha = self.write_sha()
self.f.close()
return sha
def offset(self):
return self.length
def tell(self):
return self.f.tell()
def pack_object_header(type_num, delta_base, size):
"""Create a pack object header for the given object info.
:param type_num: Numeric type of the object.
:param delta_base: Delta base offset or ref, or None for whole objects.
:param size: Uncompressed object size.
:return: A header for a packed object.
"""
header = []
c = (type_num << 4) | (size & 15)
size >>= 4
while size:
header.append(c | 0x80)
c = size & 0x7f
size >>= 7
header.append(c)
if type_num == OFS_DELTA:
ret = [delta_base & 0x7f]
delta_base >>= 7
while delta_base:
delta_base -= 1
ret.insert(0, 0x80 | (delta_base & 0x7f))
delta_base >>= 7
header.extend(ret)
elif type_num == REF_DELTA:
assert len(delta_base) == 20
header += delta_base
return bytearray(header)
def write_pack_object(f, type, object, sha=None):
"""Write pack object to a file.
:param f: File to write to
:param type: Numeric type of the object
:param object: Object to write
:return: Tuple with offset at which the object was written, and crc32
"""
if type in DELTA_TYPES:
delta_base, object = object
else:
delta_base = None
header = bytes(pack_object_header(type, delta_base, len(object)))
comp_data = zlib.compress(object)
crc32 = 0
for data in (header, comp_data):
f.write(data)
if sha is not None:
sha.update(data)
crc32 = binascii.crc32(data, crc32)
return crc32 & 0xffffffff
def write_pack(filename, objects, deltify=None, delta_window_size=None):
"""Write a new pack data file.
:param filename: Path to the new pack file (without .pack extension)
:param objects: Iterable of (object, path) tuples to write.
Should provide __len__
:param window_size: Delta window size
:param deltify: Whether to deltify pack objects
:return: Tuple with checksum of pack file and index file
"""
with GitFile(filename + '.pack', 'wb') as f:
entries, data_sum = write_pack_objects(f, objects,
delta_window_size=delta_window_size, deltify=deltify)
entries = sorted([(k, v[0], v[1]) for (k, v) in entries.items()])
with GitFile(filename + '.idx', 'wb') as f:
return data_sum, write_pack_index_v2(f, entries, data_sum)
def write_pack_header(f, num_objects):
"""Write a pack header for the given number of objects."""
f.write(b'PACK') # Pack header
f.write(struct.pack(b'>L', 2)) # Pack version
f.write(struct.pack(b'>L', num_objects)) # Number of objects in pack
def deltify_pack_objects(objects, window_size=None):
"""Generate deltas for pack objects.
:param objects: An iterable of (object, path) tuples to deltify.
:param window_size: Window size; None for default
:return: Iterator over type_num, object id, delta_base, content
delta_base is None for full text entries
"""
if window_size is None:
window_size = DEFAULT_PACK_DELTA_WINDOW_SIZE
# Build a list of objects ordered by the magic Linus heuristic
# This helps us find good objects to diff against us
magic = []
for obj, path in objects:
magic.append((obj.type_num, path, -obj.raw_length(), obj))
magic.sort()
possible_bases = deque()
for type_num, path, neg_length, o in magic:
raw = o.as_raw_string()
winner = raw
winner_base = None
for base in possible_bases:
if base.type_num != type_num:
continue
delta = create_delta(base.as_raw_string(), raw)
if len(delta) < len(winner):
winner_base = base.sha().digest()
winner = delta
yield type_num, o.sha().digest(), winner_base, winner
possible_bases.appendleft(o)
while len(possible_bases) > window_size:
possible_bases.pop()
def write_pack_objects(f, objects, delta_window_size=None, deltify=False):
"""Write a new pack data file.
:param f: File to write to
:param objects: Iterable of (object, path) tuples to write.
Should provide __len__
:param window_size: Sliding window size for searching for deltas;
Set to None for default window size.
:param deltify: Whether to deltify objects
:return: Dict mapping id -> (offset, crc32 checksum), pack checksum
"""
if deltify:
pack_contents = deltify_pack_objects(objects, delta_window_size)
else:
pack_contents = (
(o.type_num, o.sha().digest(), None, o.as_raw_string())
for (o, path) in objects)
return write_pack_data(f, len(objects), pack_contents)
def write_pack_data(f, num_records, records):
"""Write a new pack data file.
:param f: File to write to
:param num_records: Number of records
:param records: Iterator over type_num, object_id, delta_base, raw
:return: Dict mapping id -> (offset, crc32 checksum), pack checksum
"""
# Write the pack
entries = {}
f = SHA1Writer(f)
write_pack_header(f, num_records)
for type_num, object_id, delta_base, raw in records:
offset = f.offset()
if delta_base is not None:
try:
base_offset, base_crc32 = entries[delta_base]
except KeyError:
type_num = REF_DELTA
raw = (delta_base, raw)
else:
type_num = OFS_DELTA
raw = (offset - base_offset, raw)
crc32 = write_pack_object(f, type_num, raw)
entries[object_id] = (offset, crc32)
return entries, f.write_sha()
def write_pack_index_v1(f, entries, pack_checksum):
"""Write a new pack index file.
:param f: A file-like object to write to
:param entries: List of tuples with object name (sha), offset_in_pack,
and crc32_checksum.
:param pack_checksum: Checksum of the pack file.
:return: The SHA of the written index file
"""
f = SHA1Writer(f)
fan_out_table = defaultdict(lambda: 0)
for (name, offset, entry_checksum) in entries:
fan_out_table[ord(name[:1])] += 1
# Fan-out table
for i in range(0x100):
f.write(struct.pack('>L', fan_out_table[i]))
fan_out_table[i+1] += fan_out_table[i]
for (name, offset, entry_checksum) in entries:
if not (offset <= 0xffffffff):
raise TypeError("pack format 1 only supports offsets < 2Gb")
f.write(struct.pack('>L20s', offset, name))
assert len(pack_checksum) == 20
f.write(pack_checksum)
return f.write_sha()
def _delta_encode_size(size):
ret = bytearray()
c = size & 0x7f
size >>= 7
while size:
ret.append(c | 0x80)
c = size & 0x7f
size >>= 7
ret.append(c)
return ret
# The length of delta compression copy operations in version 2 packs is limited
# to 64K. To copy more, we use several copy operations. Version 3 packs allow
# 24-bit lengths in copy operations, but we always make version 2 packs.
_MAX_COPY_LEN = 0xffff
def _encode_copy_operation(start, length):
scratch = []
op = 0x80
for i in range(4):
if start & 0xff << i*8:
scratch.append((start >> i*8) & 0xff)
op |= 1 << i
for i in range(2):
if length & 0xff << i*8:
scratch.append((length >> i*8) & 0xff)
op |= 1 << (4+i)
return bytearray([op] + scratch)
def create_delta(base_buf, target_buf):
"""Use python difflib to work out how to transform base_buf to target_buf.
:param base_buf: Base buffer
:param target_buf: Target buffer
"""
assert isinstance(base_buf, bytes)
assert isinstance(target_buf, bytes)
out_buf = bytearray()
# write delta header
out_buf += _delta_encode_size(len(base_buf))
out_buf += _delta_encode_size(len(target_buf))
# write out delta opcodes
seq = difflib.SequenceMatcher(a=base_buf, b=target_buf)
for opcode, i1, i2, j1, j2 in seq.get_opcodes():
# Git patch opcodes don't care about deletes!
#if opcode == 'replace' or opcode == 'delete':
# pass
if opcode == 'equal':
# If they are equal, unpacker will use data from base_buf
# Write out an opcode that says what range to use
copy_start = i1
copy_len = i2 - i1
while copy_len > 0:
to_copy = min(copy_len, _MAX_COPY_LEN)
out_buf += _encode_copy_operation(copy_start, to_copy)
copy_start += to_copy
copy_len -= to_copy
if opcode == 'replace' or opcode == 'insert':
# If we are replacing a range or adding one, then we just
# output it to the stream (prefixed by its size)
s = j2 - j1
o = j1
while s > 127:
out_buf.append(127)
out_buf += bytearray(target_buf[o:o+127])
s -= 127
o += 127
out_buf.append(s)
out_buf += bytearray(target_buf[o:o+s])
return bytes(out_buf)
def apply_delta(src_buf, delta):
"""Based on the similar function in git's patch-delta.c.
:param src_buf: Source buffer
:param delta: Delta instructions
"""
if not isinstance(src_buf, bytes):
src_buf = b''.join(src_buf)
if not isinstance(delta, bytes):
delta = b''.join(delta)
out = []
index = 0
delta_length = len(delta)
def get_delta_header_size(delta, index):
size = 0
i = 0
while delta:
cmd = ord(delta[index:index+1])
index += 1
size |= (cmd & ~0x80) << i
i += 7
if not cmd & 0x80:
break
return size, index
src_size, index = get_delta_header_size(delta, index)
dest_size, index = get_delta_header_size(delta, index)
assert src_size == len(src_buf), '%d vs %d' % (src_size, len(src_buf))
while index < delta_length:
cmd = ord(delta[index:index+1])
index += 1
if cmd & 0x80:
cp_off = 0
for i in range(4):
if cmd & (1 << i):
x = ord(delta[index:index+1])
index += 1
cp_off |= x << (i * 8)
cp_size = 0
# Version 3 packs can contain copy sizes larger than 64K.
for i in range(3):
if cmd & (1 << (4+i)):
x = ord(delta[index:index+1])
index += 1
cp_size |= x << (i * 8)
if cp_size == 0:
cp_size = 0x10000
if (cp_off + cp_size < cp_size or
cp_off + cp_size > src_size or
cp_size > dest_size):
break
out.append(src_buf[cp_off:cp_off+cp_size])
elif cmd != 0:
out.append(delta[index:index+cmd])
index += cmd
else:
raise ApplyDeltaError('Invalid opcode 0')
if index != delta_length:
raise ApplyDeltaError('delta not empty: %r' % delta[index:])
if dest_size != chunks_length(out):
raise ApplyDeltaError('dest size incorrect')
return out
def write_pack_index_v2(f, entries, pack_checksum):
"""Write a new pack index file.
:param f: File-like object to write to
:param entries: List of tuples with object name (sha), offset_in_pack, and
crc32_checksum.
:param pack_checksum: Checksum of the pack file.
:return: The SHA of the index file written
"""
f = SHA1Writer(f)
f.write(b'\377tOc') # Magic!
f.write(struct.pack('>L', 2))
fan_out_table = defaultdict(lambda: 0)
for (name, offset, entry_checksum) in entries:
fan_out_table[ord(name[:1])] += 1
# Fan-out table
largetable = []
for i in range(0x100):
f.write(struct.pack(b'>L', fan_out_table[i]))
fan_out_table[i+1] += fan_out_table[i]
for (name, offset, entry_checksum) in entries:
f.write(name)
for (name, offset, entry_checksum) in entries:
f.write(struct.pack(b'>L', entry_checksum))
for (name, offset, entry_checksum) in entries:
if offset < 2**31:
f.write(struct.pack(b'>L', offset))
else:
f.write(struct.pack(b'>L', 2**31 + len(largetable)))
largetable.append(offset)
for offset in largetable:
f.write(struct.pack(b'>Q', offset))
assert len(pack_checksum) == 20
f.write(pack_checksum)
return f.write_sha()
write_pack_index = write_pack_index_v2
class Pack(object):
"""A Git pack object."""
def __init__(self, basename, resolve_ext_ref=None):
self._basename = basename
self._data = None
self._idx = None
self._idx_path = self._basename + '.idx'
self._data_path = self._basename + '.pack'
self._data_load = lambda: PackData(self._data_path)
self._idx_load = lambda: load_pack_index(self._idx_path)
self.resolve_ext_ref = resolve_ext_ref
@classmethod
def from_lazy_objects(self, data_fn, idx_fn):
"""Create a new pack object from callables to load pack data and
index objects."""
ret = Pack('')
ret._data_load = data_fn
ret._idx_load = idx_fn
return ret
@classmethod
def from_objects(self, data, idx):
"""Create a new pack object from pack data and index objects."""
ret = Pack('')
ret._data_load = lambda: data
ret._idx_load = lambda: idx
return ret
def name(self):
"""The SHA over the SHAs of the objects in this pack."""
return self.index.objects_sha1()
@property
def data(self):
"""The pack data object being used."""
if self._data is None:
self._data = self._data_load()
self._data.pack = self
self.check_length_and_checksum()
return self._data
@property
def index(self):
"""The index being used.
:note: This may be an in-memory index
"""
if self._idx is None:
self._idx = self._idx_load()
return self._idx
def close(self):
if self._data is not None:
self._data.close()
if self._idx is not None:
self._idx.close()
def __enter__(self):
return self
def __exit__(self, exc_type, exc_val, exc_tb):
self.close()
def __eq__(self, other):
return isinstance(self, type(other)) and self.index == other.index
def __len__(self):
"""Number of entries in this pack."""
return len(self.index)
def __repr__(self):
return '%s(%r)' % (self.__class__.__name__, self._basename)
def __iter__(self):
"""Iterate over all the sha1s of the objects in this pack."""
return iter(self.index)
def check_length_and_checksum(self):
"""Sanity check the length and checksum of the pack index and data."""
assert len(self.index) == len(self.data)
idx_stored_checksum = self.index.get_pack_checksum()
data_stored_checksum = self.data.get_stored_checksum()
if idx_stored_checksum != data_stored_checksum:
raise ChecksumMismatch(sha_to_hex(idx_stored_checksum),
sha_to_hex(data_stored_checksum))
def check(self):
"""Check the integrity of this pack.
:raise ChecksumMismatch: if a checksum for the index or data is wrong
"""
self.index.check()
self.data.check()
for obj in self.iterobjects():
obj.check()
# TODO: object connectivity checks
def get_stored_checksum(self):
return self.data.get_stored_checksum()
def __contains__(self, sha1):
"""Check whether this pack contains a particular SHA1."""
try:
self.index.object_index(sha1)
return True
except KeyError:
return False
def get_raw(self, sha1):
offset = self.index.object_index(sha1)
obj_type, obj = self.data.get_object_at(offset)
type_num, chunks = self.data.resolve_object(offset, obj_type, obj)
return type_num, b''.join(chunks)
def __getitem__(self, sha1):
"""Retrieve the specified SHA1."""
type, uncomp = self.get_raw(sha1)
return ShaFile.from_raw_string(type, uncomp, sha=sha1)
def iterobjects(self):
"""Iterate over the objects in this pack."""
return iter(PackInflater.for_pack_data(
self.data, resolve_ext_ref=self.resolve_ext_ref))
def pack_tuples(self):
"""Provide an iterable for use with write_pack_objects.
:return: Object that can iterate over (object, path) tuples
and provides __len__
"""
class PackTupleIterable(object):
def __init__(self, pack):
self.pack = pack
def __len__(self):
return len(self.pack)
def __iter__(self):
return ((o, None) for o in self.pack.iterobjects())
return PackTupleIterable(self)
def keep(self, msg=None):
"""Add a .keep file for the pack, preventing git from garbage collecting it.
:param msg: A message written inside the .keep file; can be used later to
determine whether or not a .keep file is obsolete.
:return: The path of the .keep file, as a string.
"""
keepfile_name = '%s.keep' % self._basename
with GitFile(keepfile_name, 'wb') as keepfile:
if msg:
keepfile.write(msg)
keepfile.write(b'\n')
return keepfile_name
try:
from dulwich._pack import apply_delta, bisect_find_sha
except ImportError:
pass
| [
"[email protected]"
] | |
4808cbaedeec5b5afd0caf7172bca3b9c3bb2900 | 557ca4eae50206ecb8b19639cab249cb2d376f30 | /Chapter04/spiral.py | b642ee9c1d01400018b8cff8264cad308b034929 | [] | no_license | philipdongfei/Think-python-2nd | 781846f455155245e7e82900ea002f1cf490c43f | 56e2355b8d5b34ffcee61b38fbfd200fd6d4ffaf | refs/heads/master | 2021-01-09T19:57:49.658680 | 2020-03-13T06:32:11 | 2020-03-13T06:32:11 | 242,441,512 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 340 | py | import turtle
def draw_spiral(t, n, length=3, a=0.1, b=0.0002):
theta = 0.0
for i in range(n):
t.fd(length)
dtheta = 1 / (a + b * theta)
t.lt(dtheta)
theta += dtheta
def main():
bob = turtle.Turtle()
draw_spiral(bob, n=1000)
turtle.mainloop()
if __name__ == "__main__":
main()
| [
"[email protected]"
] | |
f94dc3e35df3d080642dc8f8fd2a3ffb9b4675a5 | 0d2c2ffe431b159a87bcd78c97147422dce8d778 | /GUI学习/01PyQt5快速开发与实战/ch05高级界面控件/11timer2.py | c00045f390bd96d04ec0f63ccf8a09b77033800c | [] | no_license | YuanXianguo/Python-Project-ITC | 9e297fc1e1e8ec2b136e6e8b1db0afaaba81c16c | afd14cbe501147ec66b4aa0c1c7907b3ae41d148 | refs/heads/master | 2020-04-16T13:54:33.727825 | 2019-12-20T02:16:52 | 2019-12-20T02:16:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 508 | py | import sys
from PyQt5.QtWidgets import QApplication, QLabel
from PyQt5.QtCore import Qt, QTimer
def test():
print(1)
if __name__ == '__main__':
app = QApplication(sys.argv)
label = QLabel('<font color=red size=128><b>'
'Hello PyQt,窗口会在3秒后消失!</b></font>')
# 无边框窗口
label.setWindowFlags(Qt.SplashScreen | Qt.FramelessWindowHint)
label.show()
# 设置10秒后自动退出
QTimer.singleShot(3000, test)
sys.exit(app.exec_())
| [
"[email protected]"
] | |
a11c216ccd83de27c2498fc31e7adcb24de5c462 | 69f83bcff8a2bd9c8ef082a2141a39a5322c4b2a | /pyenv/env/lib/python2.7/site-packages/transport/tester.py | a0c92e0c385d219b834498f737ba0f7ed0dcd5a7 | [] | no_license | md848-cornell/NRF-ROKIX-sensor-mesh | ab12f6572a992ed5c468eb08b8c4586b52b411b2 | b244207af0fb0fce6e2722c384d3c6c25d5ac025 | refs/heads/master | 2020-05-21T10:56:15.013174 | 2019-05-16T16:12:11 | 2019-05-16T16:12:11 | 186,021,295 | 0 | 1 | null | 2020-03-07T21:39:41 | 2019-05-10T16:35:25 | C | UTF-8 | Python | false | false | 679 | py | """
Copyright (c) 2017 Nordic Semiconductor ASA
CoAP transport class for tests.
"""
from transport.base import TransportBase
from ipaddress import ip_address
class TesterTransport(TransportBase):
def __init__(self, port=None):
TransportBase.__init__(self, port)
self.tester_opened = False
self.tester_data = None
self.tester_remote = None
self.output_count = 0
def open(self):
self.tester_opened = True
def close(self):
self.tester_opened = False
def send(self, data, dest):
self.tester_data = data
self.tester_remote = dest
self.output_count += 1
| [
"Mike DiDomenico"
] | Mike DiDomenico |
6f08a86ea414a778c093cdd193e66adf1fa27fb9 | 6219e6536774e8eeb4cadc4a84f6f2bea376c1b0 | /scraper/storage_spiders/kuchevn.py | a9cc915cd7f15c2358aed743c2373312c26e7f93 | [
"MIT"
] | permissive | nguyenminhthai/choinho | 109d354b410b92784a9737f020894d073bea1534 | d2a216fe7a5064d73cdee3e928a7beef7f511fd1 | refs/heads/master | 2023-05-07T16:51:46.667755 | 2019-10-22T07:53:41 | 2019-10-22T07:53:41 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,034 | py | # Auto generated by generator.py. Delete this line if you make modification.
from scrapy.spiders import Rule
from scrapy.linkextractors import LinkExtractor
XPATH = {
'name' : "//div[@class='page_title']/h1",
'price' : "//meta[@property='og:price:amount']/@content",
'category' : "",
'description' : "//div[@class='tab-container']/div[@class='pd_description_content tab-content clearfix ui-tabs-panel ui-widget-content ui-corner-bottom']",
'images' : "//meta[@property='og:image'][1]/@content",
'canonical' : "//link[@rel='canonical']/@href",
'base_url' : "",
'brand' : "",
'in_stock' : "",
'guarantee' : "",
'promotion' : ""
}
name = 'kuche.vn'
allowed_domains = ['kuche.vn']
start_urls = ['http://kuche.vn/']
tracking_url = ''
sitemap_urls = ['']
sitemap_rules = [('', 'parse_item')]
sitemap_follow = ['']
rules = [
#Rule(LinkExtractor(), 'parse_item'),
#Rule(LinkExtractor(), 'parse'),
Rule(LinkExtractor(allow=['/[a-zA-Z0-9-]+($|\?page=\d+$)']), 'parse_item_and_links'),
]
| [
"[email protected]"
] | |
c0fc94a656f1cee1a7c8ee20e88f8085721c9112 | c67f449dc7187f154df7093a95ddcc14a3f0a18f | /youngseokcoin/test/functional/net.py | a4a15da130025b87c9678b54942f92c910989ea7 | [
"MIT"
] | permissive | youngseokaaa-presentation/A_system_to_ensure_the_integrity_of_Internet_of_things_by_using_Blockchain | cee9ba19e9d029759fc2fe4a43235c56fd9abe43 | b2a47bc63386b5a115fc3ce62997034ebd8d4a1e | refs/heads/master | 2023-02-17T07:58:43.043470 | 2021-01-11T05:40:28 | 2021-01-11T05:40:28 | 295,317,246 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,225 | py | #!/usr/bin/env python3
# Copyright (c) 2017 The Youngseokcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test RPC calls related to net.
Tests correspond to code in rpc/net.cpp.
"""
import time
from test_framework.test_framework import YoungseokcoinTestFramework
from test_framework.util import (
assert_equal,
assert_raises_rpc_error,
connect_nodes_bi,
p2p_port,
)
class NetTest(YoungseokcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 2
def run_test(self):
self._test_connection_count()
self._test_getnettotals()
self._test_getnetworkinginfo()
self._test_getaddednodeinfo()
self._test_getpeerinfo()
def _test_connection_count(self):
# connect_nodes_bi connects each node to the other
assert_equal(self.nodes[0].getconnectioncount(), 2)
def _test_getnettotals(self):
# check that getnettotals totalbytesrecv and totalbytessent
# are consistent with getpeerinfo
peer_info = self.nodes[0].getpeerinfo()
assert_equal(len(peer_info), 2)
net_totals = self.nodes[0].getnettotals()
assert_equal(sum([peer['bytesrecv'] for peer in peer_info]),
net_totals['totalbytesrecv'])
assert_equal(sum([peer['bytessent'] for peer in peer_info]),
net_totals['totalbytessent'])
# test getnettotals and getpeerinfo by doing a ping
# the bytes sent/received should change
# note ping and pong are 32 bytes each
self.nodes[0].ping()
time.sleep(0.1)
peer_info_after_ping = self.nodes[0].getpeerinfo()
net_totals_after_ping = self.nodes[0].getnettotals()
for before, after in zip(peer_info, peer_info_after_ping):
assert_equal(before['bytesrecv_per_msg']['pong'] + 32, after['bytesrecv_per_msg']['pong'])
assert_equal(before['bytessent_per_msg']['ping'] + 32, after['bytessent_per_msg']['ping'])
assert_equal(net_totals['totalbytesrecv'] + 32*2, net_totals_after_ping['totalbytesrecv'])
assert_equal(net_totals['totalbytessent'] + 32*2, net_totals_after_ping['totalbytessent'])
def _test_getnetworkinginfo(self):
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
self.nodes[0].setnetworkactive(False)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], False)
timeout = 3
while self.nodes[0].getnetworkinfo()['connections'] != 0:
# Wait a bit for all sockets to close
assert timeout > 0, 'not all connections closed in time'
timeout -= 0.1
time.sleep(0.1)
self.nodes[0].setnetworkactive(True)
connect_nodes_bi(self.nodes, 0, 1)
assert_equal(self.nodes[0].getnetworkinfo()['networkactive'], True)
assert_equal(self.nodes[0].getnetworkinfo()['connections'], 2)
def _test_getaddednodeinfo(self):
assert_equal(self.nodes[0].getaddednodeinfo(), [])
# add a node (node2) to node0
ip_port = "127.0.0.1:{}".format(p2p_port(2))
self.nodes[0].addnode(ip_port, 'add')
# check that the node has indeed been added
added_nodes = self.nodes[0].getaddednodeinfo(ip_port)
assert_equal(len(added_nodes), 1)
assert_equal(added_nodes[0]['addednode'], ip_port)
# check that a non-existant node returns an error
assert_raises_rpc_error(-24, "Node has not been added",
self.nodes[0].getaddednodeinfo, '1.1.1.1')
def _test_getpeerinfo(self):
peer_info = [x.getpeerinfo() for x in self.nodes]
# check both sides of bidirectional connection between nodes
# the address bound to on one side will be the source address for the other node
assert_equal(peer_info[0][0]['addrbind'], peer_info[1][0]['addr'])
assert_equal(peer_info[1][0]['addrbind'], peer_info[0][0]['addr'])
if __name__ == '__main__':
NetTest().main()
| [
"[email protected]"
] | |
84c6051cd1c083c73006b2058485e017d4f6a001 | 4d259f441632f5c45b94e8d816fc31a4f022af3c | /eventlet/prod_worker.py | 51d9d239ff441f414a29933caf1e28379ec9f8d3 | [] | no_license | xiaoruiguo/lab | c37224fd4eb604aa2b39fe18ba64e93b7159a1eb | ec99f51b498244c414b025d7dae91fdad2f8ef46 | refs/heads/master | 2020-05-25T01:37:42.070770 | 2016-05-16T23:24:26 | 2016-05-16T23:24:26 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 429 | py | from eventlet.queue import Queue
import eventlet
q = Queue()
def worker():
while True:
q.get()
a = 0
for i in xrange(1000000):
a = a + 1
print 'get'
def producer():
while True:
a = 0
for i in xrange(1000000):
a = a + 1
q.put('lol')
print 'put'
eventlet.spawn(worker)
eventlet.spawn(producer)
eventlet.sleep(30)
| [
"[email protected]"
] | |
29ff7530a12d24ef2ff8f6e0744b6cf91faba8cf | a5b09349bb10685621788f815acfcef23e93b540 | /tests/test_set_item.py | 6e7fa7390740d64e413f0be77568016de3a82fe9 | [] | no_license | yishuihanhan/slimurl | 05d95edf3b83a118bc22a4fda4f0e8ca9d4662f7 | d6ee69b0c825dcc4129bb265bd97e61218b73ccc | refs/heads/master | 2020-04-02T08:34:55.228207 | 2017-01-10T10:09:50 | 2017-01-10T10:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 639 | py | #!/usr/bin/env python
# encoding: utf-8
from slimurl import URL
def check_set(url, args, result):
key, value = args
url[key] = value
assert url == result
def test_set():
cases = [
["http://example.net/", ('foo', 'bar'), "http://example.net/?foo=bar"],
["http://example.net/", ('foo', (0, 1)), "http://example.net/?foo=0&foo=1"],
["http://example.net/", ('foo', ("0", "1")), "http://example.net/?foo=0&foo=1"],
["http://example.net/", ('foo', (0, "1")), "http://example.net/?foo=0&foo=1"],
]
for url, args, result in cases:
yield check_set, URL(url), args, URL(result)
| [
"[email protected]"
] | |
75dbe56cf58aa518de51464a64dfaa8d7f95feea | 9e929843f73b099456bab9df1b08971288e3b839 | /tests/integration_tests/vectors_tests/test_lower_than_or_equals.py | bedb21d4a788496d3c2bbb9138f82d33ab29733b | [
"MIT"
] | permissive | lycantropos/cppstd | fd20a37c46bd730d15b6e5c34e795f39907fad75 | 2a44dad540a8cc36e7fac8805cf2f5402be34aee | refs/heads/master | 2023-01-11T01:13:25.821513 | 2020-11-12T23:19:40 | 2020-11-12T23:19:40 | 302,339,499 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 511 | py | from hypothesis import given
from tests.utils import (BoundPortedVectorsPair,
equivalence)
from . import strategies
@given(strategies.vectors_pairs, strategies.vectors_pairs)
def test_basic(first_pair: BoundPortedVectorsPair,
second_pair: BoundPortedVectorsPair) -> None:
first_bound, first_ported = first_pair
second_bound, second_ported = second_pair
assert equivalence(first_bound <= second_bound,
first_ported <= second_ported)
| [
"[email protected]"
] | |
1c1917ab1339c7cbb623080afbb9a8125b03933c | 7c25e479b21b1e3e69a6be140f6511a892901182 | /python_practice/middle_linked_list.py | 5379e1fe8bdae8d8f5d08bb398c0fd1504ec458c | [] | no_license | ahrav/Python-Django | 6be3e3b5a39a6eabcf2b97b071232f8b85de64d3 | 8a2a3f706aab557b872f27e780bd7e4ebd274f72 | refs/heads/master | 2022-09-09T01:29:31.391833 | 2019-05-23T03:34:15 | 2019-05-23T03:34:15 | 181,137,783 | 0 | 0 | null | 2022-08-23T00:22:08 | 2019-04-13T07:40:44 | Python | UTF-8 | Python | false | false | 670 | py | class Node:
def __init__(self, data):
self.data = data
self.next = None
class LinkedList:
def __init__(self):
self.head = None
def push(self, new_data):
new_node = Node(new_data)
new_node.next = self.head
self.head = new_node
def printMiddle(self):
slow_ptr = self.head
fast_ptr = self.head
if self.head is not None:
while (fast_ptr is not None and fast_ptr.next is not None):
fast_ptr = fast_ptr.next.next
slow_ptr = slow_ptr.next
return slow_ptr.data | [
"[email protected]"
] | |
4b30d61e07bfa3a4839fcb6fe9d0d2e52479a80d | 401f783a202949adbf144b5780bcd87a6daf2299 | /code/python/Day-69/SnakeGame.py | c61b7e9c28275ea314027b26a33f30786ac67215 | [] | no_license | TalatWaheed/100-days-code | 1934c8113e6e7be86ca86ea66c518d2f2cedf82a | b8fd92d4ddb6adc4089d38ac7ccd2184f9c47919 | refs/heads/master | 2021-07-04T14:28:45.363798 | 2019-03-05T13:49:55 | 2019-03-05T13:49:55 | 140,101,486 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 2,380 | py | # SNAKES GAME
# Use ARROW KEYS to play, SPACE BAR for pausing/resuming and Esc Key for exiting
import curses
from curses import KEY_RIGHT, KEY_LEFT, KEY_UP, KEY_DOWN
from random import randint
curses.initscr()
win = curses.newwin(20, 60, 0, 0)
win.keypad(1)
curses.noecho()
curses.curs_set(0)
win.border(0)
win.nodelay(1)
key = KEY_RIGHT
score = 0
snake = [[4,10], [4,9], [4,8]]
food = [10,20]
win.addch(food[0], food[1], '*')
while key != 27:
win.border(0)
win.addstr(0, 2, 'Score : ' + str(score) + ' ')
win.addstr(0, 27, ' SNAKE ')
win.timeout(150 - (len(snake)/5 + len(snake)/10)%120)
prevKey = key
event = win.getch()
key = key if event == -1 else event
if key == ord(' '):
key = -1
while key != ord(' '):
key = win.getch()
key = prevKey
continue
if key not in [KEY_LEFT, KEY_RIGHT, KEY_UP, KEY_DOWN, 27]:
key = prevKey
snake.insert(0, [snake[0][0] + (key == KEY_DOWN and 1) + (key == KEY_UP and -1), snake[0][1] + (key == KEY_LEFT and -1) + (key == KEY_RIGHT and 1)])
if snake[0][0] == 0: snake[0][0] = 18
if snake[0][1] == 0: snake[0][1] = 58
if snake[0][0] == 19: snake[0][0] = 1
if snake[0][1] == 59: snake[0][1] = 1
# Exit if snake crosses the boundaries (Uncomment to enable)
#if snake[0][0] == 0 or snake[0][0] == 19 or snake[0][1] == 0 or snake[0][1] == 59: break
if snake[0] in snake[1:]: break
if snake[0] == food:
food = []
score += 1
while food == []:
food = [randint(1, 18), randint(1, 58)]
if food in snake: food = []
win.addch(food[0], food[1], '*')
else:
last = snake.pop()
win.addch(last[0], last[1], ' ')
win.addch(snake[0][0], snake[0][1], '#')
curses.endwin()
print("\nScore - " + str(score))
print("http://bitemelater.in\n")
| [
"[email protected]"
] | |
a41d7737fdb64767088b4153d8994a0422a6044c | ca2dbcfeac6ab571a19bd7d91b7234fd461d09e3 | /contact/settings.py | f6b23ebea5443fb592009997c1e7ce9e73093d67 | [] | no_license | RahulSinghDhek/test-contact | 51ebcc85e32a3d4fc86cb978824337b444f077be | ff14bb369e4caae6cd4db95388f7c87bf65c3227 | refs/heads/master | 2020-04-20T02:18:27.516767 | 2019-01-31T17:44:41 | 2019-01-31T17:44:41 | 168,568,336 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,655 | py | """
Django settings for contact project.
Generated by 'django-admin startproject' using Django 2.1.4.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '+mg_ysn-@n6l*ltqbi59wn(b(9pt32ugy_l!ztko^ux0nl80@k'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['127.0.0.1','https://contactlistdhek.herokuapp.com/']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'phonebook',
'rest_framework.authtoken'
]
REST_FRAMEWORK = {
'DEFAULT_AUTHENTICATION_CLASSES': [
'rest_framework.authentication.TokenAuthentication',
],
'DEFAULT_PAGINATION_CLASS': 'rest_framework.pagination.LimitOffsetPagination',
'PAGE_SIZE': 10
}
MIDDLEWARE = [
'whitenoise.middleware.WhiteNoiseMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'contact.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'contact.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2', # Add 'postgresql_psycopg2', 'mysql', 'sqlite3' or 'oracle'.
'NAME': 'test', # Or path to database file if using sqlite3.
'USER': 'postgres', # Not used with sqlite3.
'PASSWORD': '1234', # Not used with sqlite3.
'HOST': 'localhost', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '5432', # Set to empty string for default. Not used with sqlite3.
}
# 'default': {
# 'ENGINE': 'django.db.backends.sqlite3',
# 'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
# }
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
# https://docs.djangoproject.com/en/1.11/howto/static-files/
PROJECT_ROOT = os.path.join(os.path.abspath(__file__))
STATIC_ROOT = os.path.join(PROJECT_ROOT, 'staticfiles')
STATIC_URL = '/static/'
# Extra lookup directories for collectstatic to find static files
STATICFILES_DIRS = (
os.path.join(PROJECT_ROOT, 'static'),
)
# Add configuration for static files storage using whitenoise
STATICFILES_STORAGE = 'whitenoise.django.GzipManifestStaticFilesStorage'
import dj_database_url
prod_db = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(prod_db)
| [
"[email protected]"
] | |
df0300b9ae066ae31618798f45525e2480426413 | 7a1b08c64b29522d4bbb913475275c1bc8ad61a4 | /patient_agenda/events/doc_events/conver_value.py | afcdc7008de591882958f1eb59c25de32cecb141 | [
"MIT"
] | permissive | erichilarysmithsr/time-track | 8f84d4cc92cebaedce550b3741982d204e734a6c | dc0a7b63c937d561309f9b1c84af65fb581a8e18 | refs/heads/master | 2023-03-27T08:07:46.717221 | 2021-03-30T16:45:50 | 2021-03-30T16:45:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,695 | py | #!/usr/bin/python3
# -*- coding:utf-8 -*-
import os
import json
import subprocess
try:
with open('./patient_agenda/events/doc_events/patient_rdv.json') as file:
data=json.load(file)
except FileNotFoundError as fileout:
print("File 1 patient_rdv.json not created", fileout)
for value in data:
print(value)
data_list1 = []
for value in data:
data_list1.append(value[1])
data_day = data_list1[0]
data_month = data_list1[1]
data_year = data_list1[2]
try:
if data_day < 10:
extraday = '0' +''+ str(data_day)
elif data_day >= 10:
extraday = str(data_day)
else:
pass
except ValueError as valout:
print("Value of day is a problem", valout)
try:
if data_month < 10:
extramounth = '0' +''+ str(data_month)
elif data_month >= 10:
extramounth = str(data_month)
else:
pass
except ValueError as valout:
print("Value of mounth is a problem", valout)
# initword = "Appointment set for :"
# initword +' '+
final_data = extraday +'/'+ extramounth +'/'+ str(data_year) +' :'
print(final_data)
try:
if os.path.getsize('./patient_agenda/events/doc_events/fix_agenda/patient_value.json'):
print("+ File 'value' exist !")
with open('./patient_agenda/events/doc_events/fix_agenda/patient_value.json','w') as partytime:
json.dump(final_data, partytime)
except FileNotFoundError as msg:
print("File doesn't exist, but it has been created !")
with open('./patient_agenda/events/doc_events/fix_agenda/patient_value.json','w') as partyleft:
json.dump(final_data, partyleft)
subprocess.call('./patient_agenda/events/doc_events/fix_agenda/extend_agenda.py')
| [
"[email protected]"
] | |
0048953dec39f492a91c8bdde7a9ddaca57537a1 | 5d4753b7e463827c9540e982108de22f62435c3f | /python/tink/daead/_deterministic_aead_wrapper_test.py | b59d11dca3c7331a23581b856195197dfeb49b72 | [
"Apache-2.0"
] | permissive | thaidn/tink | 8c9b65e3f3914eb54d70847c9f56853afd051dd3 | 2a75c1c3e4ef6aa1b6e29700bf5946b725276c95 | refs/heads/master | 2021-07-25T02:02:59.839232 | 2021-02-10T17:21:31 | 2021-02-10T17:22:01 | 337,815,957 | 2 | 0 | Apache-2.0 | 2021-02-10T18:28:20 | 2021-02-10T18:28:20 | null | UTF-8 | Python | false | false | 6,074 | py | # Copyright 2019 Google LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tink.python.tink.aead_wrapper."""
from __future__ import absolute_import
from __future__ import division
# Placeholder for import for type annotations
from __future__ import print_function
from absl.testing import absltest
from absl.testing import parameterized
import tink
from tink import daead
from tink.testing import keyset_builder
DAEAD_TEMPLATE = daead.deterministic_aead_key_templates.AES256_SIV
RAW_DAEAD_TEMPLATE = keyset_builder.raw_template(DAEAD_TEMPLATE)
def setUpModule():
daead.register()
class AeadWrapperTest(parameterized.TestCase):
@parameterized.parameters([DAEAD_TEMPLATE, RAW_DAEAD_TEMPLATE])
def test_encrypt_decrypt(self, template):
keyset_handle = tink.new_keyset_handle(template)
primitive = keyset_handle.primitive(daead.DeterministicAead)
ciphertext = primitive.encrypt_deterministically(
b'plaintext', b'associated_data')
self.assertEqual(
primitive.decrypt_deterministically(ciphertext, b'associated_data'),
b'plaintext')
@parameterized.parameters([DAEAD_TEMPLATE, RAW_DAEAD_TEMPLATE])
def test_decrypt_unknown_ciphertext_fails(self, template):
unknown_handle = tink.new_keyset_handle(template)
unknown_primitive = unknown_handle.primitive(daead.DeterministicAead)
unknown_ciphertext = unknown_primitive.encrypt_deterministically(
b'plaintext', b'associated_data')
keyset_handle = tink.new_keyset_handle(template)
primitive = keyset_handle.primitive(daead.DeterministicAead)
with self.assertRaises(tink.TinkError):
primitive.decrypt_deterministically(unknown_ciphertext,
b'associated_data')
@parameterized.parameters([DAEAD_TEMPLATE, RAW_DAEAD_TEMPLATE])
def test_decrypt_wrong_associated_data_fails(self, template):
keyset_handle = tink.new_keyset_handle(template)
primitive = keyset_handle.primitive(daead.DeterministicAead)
ciphertext = primitive.encrypt_deterministically(b'plaintext',
b'associated_data')
with self.assertRaises(tink.TinkError):
primitive.decrypt_deterministically(ciphertext, b'wrong_associated_data')
@parameterized.parameters([(DAEAD_TEMPLATE, DAEAD_TEMPLATE),
(RAW_DAEAD_TEMPLATE, DAEAD_TEMPLATE),
(DAEAD_TEMPLATE, RAW_DAEAD_TEMPLATE),
(RAW_DAEAD_TEMPLATE, RAW_DAEAD_TEMPLATE)])
def test_encrypt_decrypt_with_key_rotation(self, template1, template2):
builder = keyset_builder.new_keyset_builder()
older_key_id = builder.add_new_key(template1)
builder.set_primary_key(older_key_id)
p1 = builder.keyset_handle().primitive(daead.DeterministicAead)
newer_key_id = builder.add_new_key(template2)
p2 = builder.keyset_handle().primitive(daead.DeterministicAead)
builder.set_primary_key(newer_key_id)
p3 = builder.keyset_handle().primitive(daead.DeterministicAead)
builder.disable_key(older_key_id)
p4 = builder.keyset_handle().primitive(daead.DeterministicAead)
self.assertNotEqual(older_key_id, newer_key_id)
# p1 encrypts with the older key. So p1, p2 and p3 can decrypt it,
# but not p4.
ciphertext1 = p1.encrypt_deterministically(b'plaintext', b'ad')
self.assertEqual(p1.decrypt_deterministically(ciphertext1, b'ad'),
b'plaintext')
self.assertEqual(p2.decrypt_deterministically(ciphertext1, b'ad'),
b'plaintext')
self.assertEqual(p3.decrypt_deterministically(ciphertext1, b'ad'),
b'plaintext')
with self.assertRaises(tink.TinkError):
_ = p4.decrypt_deterministically(ciphertext1, b'ad')
# p2 encrypts with the older key. So p1, p2 and p3 can decrypt it,
# but not p4.
ciphertext2 = p2.encrypt_deterministically(b'plaintext', b'ad')
self.assertEqual(p1.decrypt_deterministically(ciphertext2, b'ad'),
b'plaintext')
self.assertEqual(p2.decrypt_deterministically(ciphertext2, b'ad'),
b'plaintext')
self.assertEqual(p3.decrypt_deterministically(ciphertext2, b'ad'),
b'plaintext')
with self.assertRaises(tink.TinkError):
_ = p4.decrypt_deterministically(ciphertext2, b'ad')
# p3 encrypts with the newer key. So p2, p3 and p4 can decrypt it,
# but not p1.
ciphertext3 = p3.encrypt_deterministically(b'plaintext', b'ad')
with self.assertRaises(tink.TinkError):
_ = p1.decrypt_deterministically(ciphertext3, b'ad')
self.assertEqual(p2.decrypt_deterministically(ciphertext3, b'ad'),
b'plaintext')
self.assertEqual(p3.decrypt_deterministically(ciphertext3, b'ad'),
b'plaintext')
self.assertEqual(p4.decrypt_deterministically(ciphertext3, b'ad'),
b'plaintext')
# p4 encrypts with the newer key. So p2, p3 and p4 can decrypt it,
# but not p1.
ciphertext4 = p4.encrypt_deterministically(b'plaintext', b'ad')
with self.assertRaises(tink.TinkError):
_ = p1.decrypt_deterministically(ciphertext4, b'ad')
self.assertEqual(p2.decrypt_deterministically(ciphertext4, b'ad'),
b'plaintext')
self.assertEqual(p3.decrypt_deterministically(ciphertext4, b'ad'),
b'plaintext')
self.assertEqual(p4.decrypt_deterministically(ciphertext4, b'ad'),
b'plaintext')
if __name__ == '__main__':
absltest.main()
| [
"[email protected]"
] | |
f487b32d187d8c46617f40bfa556df73ae0f4374 | f9d564f1aa83eca45872dab7fbaa26dd48210d08 | /huaweicloud-sdk-iotanalytics/huaweicloudsdkiotanalytics/v1/model/list_batch_jobs_response.py | c820f1d45d59dffa07e947b9b3b4e80b79e3084e | [
"Apache-2.0"
] | permissive | huaweicloud/huaweicloud-sdk-python-v3 | cde6d849ce5b1de05ac5ebfd6153f27803837d84 | f69344c1dadb79067746ddf9bfde4bddc18d5ecf | refs/heads/master | 2023-09-01T19:29:43.013318 | 2023-08-31T08:28:59 | 2023-08-31T08:28:59 | 262,207,814 | 103 | 44 | NOASSERTION | 2023-06-22T14:50:48 | 2020-05-08T02:28:43 | Python | UTF-8 | Python | false | false | 3,929 | py | # coding: utf-8
import six
from huaweicloudsdkcore.sdk_response import SdkResponse
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class ListBatchJobsResponse(SdkResponse):
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'count': 'int',
'jobs': 'list[Job]'
}
attribute_map = {
'count': 'count',
'jobs': 'jobs'
}
def __init__(self, count=None, jobs=None):
"""ListBatchJobsResponse
The model defined in huaweicloud sdk
:param count: 定时作业总个数。
:type count: int
:param jobs:
:type jobs: list[:class:`huaweicloudsdkiotanalytics.v1.Job`]
"""
super(ListBatchJobsResponse, self).__init__()
self._count = None
self._jobs = None
self.discriminator = None
if count is not None:
self.count = count
if jobs is not None:
self.jobs = jobs
@property
def count(self):
"""Gets the count of this ListBatchJobsResponse.
定时作业总个数。
:return: The count of this ListBatchJobsResponse.
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this ListBatchJobsResponse.
定时作业总个数。
:param count: The count of this ListBatchJobsResponse.
:type count: int
"""
self._count = count
@property
def jobs(self):
"""Gets the jobs of this ListBatchJobsResponse.
:return: The jobs of this ListBatchJobsResponse.
:rtype: list[:class:`huaweicloudsdkiotanalytics.v1.Job`]
"""
return self._jobs
@jobs.setter
def jobs(self, jobs):
"""Sets the jobs of this ListBatchJobsResponse.
:param jobs: The jobs of this ListBatchJobsResponse.
:type jobs: list[:class:`huaweicloudsdkiotanalytics.v1.Job`]
"""
self._jobs = jobs
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, ListBatchJobsResponse):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| [
"[email protected]"
] | |
30ca95084a650818ad76ed5e625a46506e6e8e39 | 60e27c8b1755c741dfd069393e8b65766a9647ae | /07_Natural_Language_Processing/C0702_bag_of_words.py | fb7af5b042f0820d93e1aaa9984960d0ba24a209 | [
"MIT"
] | permissive | xiejinwen113/tensorflow_cookbook | d0426991be2369d6480728c2af7a4dc93eccf621 | 57d7ee719385ddd249a67c3a85bd336e884a67e5 | refs/heads/master | 2022-03-24T08:30:43.089441 | 2019-12-09T09:55:39 | 2019-12-09T09:55:39 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,784 | py | # -*- encoding: utf-8 -*-
"""
@Author : zYx.Tom
@Contact : [email protected]
@site : https://github.com/zhuyuanxiang/tensorflow_cookbook
---------------------------
@Software : PyCharm
@Project : TensorFlow_Machine_Learning_Cookbook
@File : C0702_bag_of_words.py
@Version : v0.1
@Time : 2019-11-07 17:11
@License : (C)Copyright 2018-2019, zYx.Tom
@Reference : 《TensorFlow机器学习实战指南,Nick McClure》, Sec0702,P144
@Desc : 自然语言处理,使用 TensorFlow 实现“词袋”
@理解:
1. 这个模型是个错误的模型,因为数据集本身就是87%的正常短信,那么只要判断为正常短信就有87%的准确率。
而模型的准确率还不到87%,说明正确理解数据集是非常重要的。
2. 跟踪sess.run(x_col_sums,feed_dict = {x_data: t}),也会发现训练的嵌入矩阵的结果就是UNKNOWN单词和'to'单词过多的短信就是垃圾短信,
这个也是因为数据集中数据偏离造成的,根本原因还是模型与数据不匹配。
"""
# common imports
import os
import string
import sys
import matplotlib.pyplot as plt
import numpy as np # pip install numpy<1.17,小于1.17就不会报错
import sklearn
import tensorflow as tf
import winsound
from tensorflow.contrib import learn
from tensorflow.python.framework import ops
# 设置数据显示的精确度为小数点后3位
np.set_printoptions(precision = 8, suppress = True, threshold = np.inf, linewidth = 200)
# 利用随机种子,保证随机数据的稳定性,使得每次随机测试的结果一样
seed = 42
np.random.seed(seed)
tf.set_random_seed(seed)
# Python ≥3.5 is required
assert sys.version_info >= (3, 5)
# Scikit-Learn ≥0.20 is required
assert sklearn.__version__ >= "0.20"
# numpy 1.16.4 is required
assert np.__version__ in ["1.16.5", "1.16.4"]
# 屏蔽警告:Your CPU supports instructions that this TensorFlow binary was not compiled to use: AVX2 FMA
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '2'
# 初始化默认的计算图
ops.reset_default_graph()
# Open graph session
sess = tf.Session()
# ----------------------------------------------------------------------
print("载入数据。。。")
# 下载的文件直接读出,没有下载的文件就下载后读出
data_file_name = "../Data/SMS_SPam/SMSSpamCollection"
with open(data_file_name, encoding = 'utf-8') as temp_output_file:
text_data = temp_output_file.read()
pass
pass
# Format Data
text_data = text_data.encode('ascii', errors = 'ignore')
text_data = text_data.decode().split('\n')
text_data = [x.split('\t') for x in text_data if len(x) >= 1]
texts = [x[1] for x in text_data]
target = [x[0] for x in text_data]
# 将标签整数化, 'spam' 表示垃圾短信,设置为1, 'ham' 表示正常短信,设置为0
target = [1 if x == 'spam' else 0 for x in target]
# 文本标准化
texts = [x.lower() for x in texts] # 文本字母小写
texts = [''.join(c for c in x if c not in string.punctuation) for x in texts] # 移除标点符号
texts = [''.join(c for c in x if c not in '0123456789') for x in texts] # 移除数字
texts = [' '.join(x.split()) for x in texts] # 移除多余的空格
# 统计文本中不同长度的单词的数目,最大单词长度不超过50个字母
text_lengths = [len(x.split()) for x in texts]
text_lengths = [x for x in text_lengths if x < 50]
plt.hist(text_lengths, bins = 25)
plt.title("图7-1:文本数据中的单词长度的直方图")
sentence_size = 25 # 每个句子的单词个数最多不超过25个,不足25个用0填充,超过25个的从后往前截断
min_word_freq = 3 # 单词出现的频率不低于3次,如果某个单词只在某几条短信中出现,那么就不选入字典
# TensorFlow 自带的分词器 VocabularyProcessor()
vocab_processor = learn.preprocessing.VocabularyProcessor(sentence_size, min_frequency = min_word_freq)
# Have to fit transform to get length of unique words.
vocab_processor.fit_transform(texts) # 使用文本数据进行训练并且变换为字典
embedding_size = len(vocab_processor.vocabulary_) # 取字典大小为嵌入层的大小
# 将文本数据切分为训练数据集(80%)和测试数据集(20%)
train_indices = np.random.choice(len(texts), int(round(len(texts) * 0.8)), replace = False)
test_indices = np.array(list(set(range(len(texts))) - set(train_indices)))
texts_train = [x for ix, x in enumerate(texts) if ix in train_indices]
texts_test = [x for ix, x in enumerate(texts) if ix in test_indices]
target_train = [x for ix, x in enumerate(target) if ix in train_indices]
target_test = [x for ix, x in enumerate(target) if ix in test_indices]
# 设置单位矩阵用于 One-Hot 编码
identity_mat = tf.diag(tf.ones(shape = [embedding_size]))
# 为 logistic regression 创建变量
A = tf.Variable(tf.random_normal(shape = [embedding_size, 1]))
b = tf.Variable(tf.random_normal(shape = [1, 1]))
# 初始化占位符
x_data = tf.placeholder(shape = [sentence_size], dtype = tf.int32)
y_target = tf.placeholder(shape = [1, 1], dtype = tf.float32)
# 搜索 Text-Vocab Embedding 权重,单位矩阵用于映射句子中的单词的 One-Hot 向量
x_embed = tf.nn.embedding_lookup(identity_mat, x_data)
x_col_sums = tf.reduce_sum(x_embed, 0) # ToDo:为什么要按列求和?
# 模型的输出
x_col_sums_2D = tf.expand_dims(x_col_sums, 0)
model_output = x_col_sums_2D @ A + b
# 交叉熵损失函数
loss = tf.reduce_mean(tf.nn.sigmoid_cross_entropy_with_logits(labels = y_target, logits = model_output))
# Prediction operation
prediction = tf.sigmoid(model_output)
# Declare optimizer
my_opt = tf.train.GradientDescentOptimizer(0.001)
train_step = my_opt.minimize(loss)
# Intitialize Variables
init = tf.global_variables_initializer()
sess.run(init)
# Start Logistic Regression
print('基于训练集中{}个句子开始训练。。。'.format(len(texts_train)))
loss_vec, train_acc_all, train_acc_avg = [], [], []
for ix, t in enumerate(vocab_processor.transform(texts_train)): # 只转换不训练,不应该再次训练
y_data = [[target_train[ix]]]
sess.run(train_step, feed_dict = {x_data: t, y_target: y_data})
temp_loss = sess.run(loss, feed_dict = {x_data: t, y_target: y_data})
loss_vec.append(temp_loss)
if ix % 100 == 0:
print('训练集迭代次数: #' + str(ix + 1) + ': Loss = ' + str(temp_loss))
pass
[[temp_pred]] = sess.run(prediction, feed_dict = {x_data: t, y_target: y_data})
# 获得预测结果
train_acc_temp = target_train[ix] == np.round(temp_pred)
train_acc_all.append(train_acc_temp)
if len(train_acc_all) >= 50:
# 跟踪最后50个训练精度的平均值
train_acc_avg.append(np.mean(train_acc_all[-50:]))
pass
pass
# 获取测试集的评估精度
print('基于测试集中{}个句子开始评估。。。'.format(len(texts_test)))
test_acc_all = []
for ix, t in enumerate(vocab_processor.transform(texts_test)):
y_data = [[target_test[ix]]]
if ix % 50 == 0:
print("测试集迭代次数 #", ix + 1)
pass
[[temp_pred]] = sess.run(prediction, feed_dict = {x_data: t, y_target: y_data})
test_acc_temp = target_test[ix] == np.round(temp_pred)
test_acc_all.append(test_acc_temp)
pass
print("\n测试集精度: {}".format(np.mean(test_acc_all)))
# Plot training accuracy over time
plt.figure()
plt.plot(range(len(train_acc_avg)), train_acc_avg, 'b-', label = "训练集精度")
plt.title("统计最后50个训练集数据的平均训练集精度")
plt.xlabel('迭代次数')
plt.ylabel("训练集精度")
# -----------------------------------------------------------------
# 运行结束的提醒
winsound.Beep(600, 500)
if len(plt.get_fignums()) != 0:
plt.show()
pass
| [
"[email protected]"
] | |
718e3a5fc92037cb1b160a8aa5414d824609ab9d | c3082eb2adc43b311dd3c9ff16fd3ed9df85f266 | /python/examples/functional/filter.py | 9c0234468fbdee92b7c2a3dfc2206ea06281c324 | [] | no_license | szabgab/slides | 78818c7138331b3ba9e221c81da3678a46efe9b3 | 63bba06678554db737602f2fbcd6510c36037e8a | refs/heads/main | 2023-08-31T07:13:51.536711 | 2023-08-29T13:17:59 | 2023-08-29T13:17:59 | 122,212,527 | 87 | 69 | null | 2023-05-19T06:55:11 | 2018-02-20T14:57:03 | Python | UTF-8 | Python | false | false | 122 | py | numbers = [1, 3, 27, 10, 38]
big_numbers = filter(lambda n: n > 10, numbers)
print(big_numbers)
print(list(big_numbers))
| [
"[email protected]"
] | |
d1e21770e28bf318bb9670ca416bde39191d4f7d | 6e0108c11132e63c81adbfab4309011b1f9f6dda | /tests/python/extra/clear_leaves.py | 4d2f1e3a58a3fcb2fd07655efd2646b28d0a5f5f | [
"Apache-2.0"
] | permissive | scottdonaldau/ledger-qrl | c28a614ae52c44e53947e444abf078ec27041815 | 7a3b933b84065b9db2b775d50205efcdbed2399e | refs/heads/master | 2020-04-12T07:12:25.687015 | 2018-12-19T02:55:43 | 2018-12-19T02:55:43 | 162,360,262 | 0 | 0 | Apache-2.0 | 2018-12-19T00:15:27 | 2018-12-19T00:15:27 | null | UTF-8 | Python | false | false | 269 | py | from pyledgerqrl.ledgerqrl import *
dev = LedgerQRL()
start = time.time()
for i in range(256):
data = bytearray([i]) + bytearray.fromhex("00" * 32)
answer = dev.send(INS_TEST_WRITE_LEAF, data)
assert len(answer) == 0
answer = dev.send(INS_TEST_PK_GEN_1)
| [
"[email protected]"
] | |
55559c3ca1ad5ff7d80c5cf736dab7da2c5d72a7 | dfff816642f4e1afeab268f441906a6d811d3fb4 | /polling_stations/apps/data_collection/management/commands/import_newham.py | d1463f346e39dd465ff77e53dbf91e637072ccae | [] | permissive | mtravis/UK-Polling-Stations | 2c07e03d03959492c7312e5a4bfbb71e12320432 | 26e0331dc29253dc436a0462ffaa01e974c5dc52 | refs/heads/master | 2020-05-14T18:36:31.501346 | 2019-04-17T12:54:57 | 2019-04-17T12:54:57 | 181,912,382 | 0 | 0 | BSD-3-Clause | 2019-04-17T14:48:26 | 2019-04-17T14:48:26 | null | UTF-8 | Python | false | false | 1,255 | py | from data_collection.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "E09000025"
addresses_name = "local.2018-05-03/Version 2/LBNewham Democracy_Club__03May2018.TSV"
stations_name = "local.2018-05-03/Version 2/LBNewham Democracy_Club__03May2018.TSV"
elections = ["local.2018-05-03", "mayor.newham.2018-05-03"]
csv_delimiter = "\t"
def address_record_to_dict(self, record):
if record.addressline6 == "E16 1EF":
return None
if record.property_urn == "10090852604":
return None
if record.property_urn == "10034510101":
rec = super().address_record_to_dict(record)
rec["postcode"] = "E13 8NA"
return rec
if record.addressline6 == "E16 1XF":
return None
if record.property_urn == "10090756946":
rec = super().address_record_to_dict(record)
rec["postcode"] = "E7 9AW"
return rec
if record.property_urn == "10023994990":
rec = super().address_record_to_dict(record)
rec["postcode"] = "E7 9AW"
return rec
return super().address_record_to_dict(record)
| [
"[email protected]"
] | |
f7a133da42b483bbd6721ea185ae86310461ffcc | 1eb2d7d2a6e945a9bc487afcbc51daefd9af02e6 | /algorithm/zhang/baiduface.py | aae543c80ba05cfedc089fe690d2f4beb4954ca2 | [] | no_license | fengges/eds | 11dc0fdc7a17b611af1f61894f497ad443439bfe | 635bcf015e3ec12e96949632c546d29fc99aee31 | refs/heads/master | 2021-06-20T04:43:02.019309 | 2019-06-20T12:55:26 | 2019-06-20T12:55:26 | 133,342,023 | 0 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,255 | py | import os,time
from aip import AipFace
from PIL import Image, ImageDraw
""" 你的 APPID AK SK """
APP_ID = '10909628'
API_KEY = 'sInxLcVbCLSg6rNXVDXR4sHD'
SECRET_KEY = 'e2zgNstc7GEhhvFOfCVKDW2itVf0iID4'
filepath ="pic"
client = AipFace(APP_ID, API_KEY, SECRET_KEY)
def get_file_content(filePath):
with open(filePath, 'rb') as fp:
return fp.read()
pathDir = os.listdir(filepath)
for path in pathDir:
pic=filepath+'/'+path
pic3="pic3/"+path
image = get_file_content(pic)
""" 调用人脸检测 """
client.detect(image)
""" 如果有可选参数 """
options = {}
options["max_face_num"] = 10
options["face_fields"] = "age"
""" 带参数调用人脸检测 """
res=client.detect(image, options)
try:
result=res["result"]
except:
print(res)
img = Image.open(pic3)
img_d = ImageDraw.Draw(img)
for f in result:
face_rectangle = f["location"]
img_d.rectangle((face_rectangle['left'], face_rectangle['top'],
face_rectangle['left'] + face_rectangle['width'],
face_rectangle['top'] + face_rectangle['height']), outline="red")
img.save(pic3)
img.close()
print("sleep")
time.sleep(2)
| [
"[email protected]"
] | |
7525215d0910bbe6e1301574a8bc8953a626bd5e | 17ecc93814360e911d119a9d9a5fde1e9044c409 | /orden/models.py | 684f7edaabf2ab08452c0ae69ebe10a069f38e6f | [] | no_license | rpparada/TOCA | d2541c9464aad8a406801b4d8ef52fbe409dab3f | 253993b0bbbd49895f014bcf6bc271de5475ae7e | refs/heads/master | 2022-12-12T05:07:34.686563 | 2020-11-11T01:12:22 | 2020-11-11T01:12:22 | 236,039,156 | 0 | 0 | null | 2022-12-08T11:31:42 | 2020-01-24T16:27:23 | CSS | UTF-8 | Python | false | false | 18,395 | py | from django.db import models
from django.urls import reverse
from django.db.models.signals import pre_save, post_save
from django.conf import settings
from django.core.files.storage import FileSystemStorage
from django.core.files.base import ContentFile
from django.core.files import File
from django.utils import timezone
from django.contrib import messages
User = settings.AUTH_USER_MODEL
import math
import os
from carro.models import CarroCompra, ItemCarroCompra
from facturacion.models import FacturacionProfile
from direccion.models import Direccion
from transaccional.models import EmailTemplate
from toca.utils import unique_orden_id_generator, unique_slug_generator, render_to_pdf_file
from toca.parametros import parCarro, parOrden, mediodepago
# Create your models here.
# Orden Compra
class OrdenCompraQuerySet(models.query.QuerySet):
def by_request(self, request):
fact_profile, created = FacturacionProfile.objects.new_or_get(request)
return self.filter(facturacion_profile=fact_profile)
def by_orden_id(self, orden_id):
qs = self.filter(orden_id=orden_id)
obj = None
if qs.count() == 1:
obj = qs.first()
return obj
def by_tocata(self, tocata):
return self.filter(carro__item__tocata=tocata)
class OrdenCompraManager(models.Manager):
def get_queryset(self):
return OrdenCompraQuerySet(self.model, using=self._db)
def by_request(self, request):
return self.get_queryset().by_request(request)
def new_or_get(self, fact_profile, carro_obj):
created = False
qs = self.get_queryset().filter(facturacion_profile=fact_profile, carro=carro_obj, activo=True, estado='pendiente')
if qs.count() == 1:
obj = qs.first()
else:
obj = self.model.objects.create(facturacion_profile=fact_profile, carro=carro_obj)
created = True
return obj, created
def by_orden_id(self, orden_id):
return self.get_queryset().by_orden_id(orden_id)
def by_tocata(self, tocata):
return self.get_queryset().by_tocata(tocata)
ORDENCOMPRA_ESTADO_OPCIONES = (
('pendiente','Pendiente'),
('pagado','Pagado'),
('cancelado','Cancelado'),
)
class OrdenCompra(models.Model):
orden_id = models.CharField(max_length=120, blank=True)
facturacion_profile = models.ForeignKey(FacturacionProfile, null=True, blank=True, on_delete=models.CASCADE)
direccion_envio = models.ForeignKey(Direccion, related_name='direccion_envio', null=True, blank=True, on_delete=models.CASCADE)
direccion_facturacion = models.ForeignKey(Direccion, related_name='direccion_facturacion', null=True, blank=True, on_delete=models.CASCADE)
email_adicional = models.EmailField(null=True, blank=True)
carro = models.ForeignKey(CarroCompra, on_delete=models.DO_NOTHING)
total = models.DecimalField(default=0.00, max_digits=100, decimal_places=2)
envio = models.DecimalField(default=0.00, max_digits=100, decimal_places=2)
activo = models.BooleanField(default=True)
fecha_pago = models.DateTimeField(null=True, blank=True)
estado = models.CharField(max_length=20, choices=ORDENCOMPRA_ESTADO_OPCIONES,default='pendiente')
fecha_actu = models.DateTimeField(auto_now=True)
fecha_crea = models.DateTimeField(auto_now_add=True)
objects = OrdenCompraManager()
def __str__(self):
return self.orden_id
def actualiza_total(self):
carro_total = self.carro.total
envio_total = self.envio
nuevo_total = math.fsum([carro_total, envio_total])
self.total = nuevo_total
self.save()
return nuevo_total
def email_detalles_compra(self, request):
if self.email_adicional:
recipient_list = [self.facturacion_profile.email,self.email_adicional]
else:
recipient_list = [self.facturacion_profile.email]
EmailTemplate.send(
'email_boleta_itickets',
context = { 'object': self },
subject = 'Tocatas Íntimas - ITicket Orden {num_orden}'.format(num_orden=self.orden_id),
sender = '[email protected]',
emails = recipient_list
)
def check_done(self, request):
is_done = True
for item in self.carro.item.all():
quitar_item = False
# Verificar estado de tocata
if not item.tocata.check_vigencia():
quitar_item = True
messages.error(request,'Tocata ya no esta disponible')
# Agregar verificacion de venta de entradas disponibles
if not item.tocata.check_entradas(item.cantidad):
quitar_item = True
messages.error(request,'No hay suficientes entradas disponibles')
# Comprar antes de la fecha y hora del evento
if not item.tocata.check_fechahora():
quitar_item = True
messages.error(request,'Compra fuera de tiempo')
# Verificar si usuario ya compro entrasas para este evento
entradas_comp = EntradasCompradas.objects.by_tocata_request(request, item.tocata)
num_entradas = 0
for entrada in entradas_comp:
num_entradas += entrada.item.cantidad
entradas_disponibles = 4 - num_entradas
if item.cantidad > entradas_disponibles:
quitar_item = True
messages.error(request,'Ya compraste entradas')
# Quita tocatas que no califican y cancela check out
if quitar_item:
self.carro.item.remove(item)
item.delete()
request.session['carro_tocatas'] = self.carro.item.count()
is_done = False
facturacion_profile = self.facturacion_profile
total = self.total
if facturacion_profile and total <= 0:
is_done = False
return is_done
def actualiza_compras(self):
for item in self.carro.item.all():
obj, created = EntradasCompradas.objects.get_or_create(
orden = self,
facturacion_profile = self.facturacion_profile,
item = item
)
return EntradasCompradas.objects.filter(orden=self).count()
def agrega_entradas_compra(self):
entradas_obj = EntradasCompradas.objects.by_orden(self)
if entradas_obj:
for entrada_obj in entradas_obj:
context = {
'boleta_id': 8838838,
'nombre_cliente': 'Rodrigo Parada',
'cantidad': 29939,
'fecha_compra': 'Hoy'
}
#pdf = render_to_pdf('carro/entradaspdf.html', context)
pdf = render_to_pdf_file('carro/entradaspdf.html', context, 'test.pdf')
# Falta salvar a un archivo antes de guardarlo en la tabla
f = open('nuevotest.pdf', 'wb')
myfile = File(f)
myfile = ContentFile(pdf)
entrada_obj.file.save('nuevotest.pdf', myfile)
#entrada_obj.save()
def mark_pagado(self):
if self.estado != 'pagado':
#if self.check_done():
self.estado = 'pagado'
self.fecha_pago = timezone.now()
self.save()
self.actualiza_compras()
return self.estado
def guarda_cobro(self, transaction, token):
cobro_obj = Cobro.objects.create(
orden = self,
facturacion_profile = self.facturacion_profile,
token = token,
accountingDate = transaction['accountingDate'],
buyOrder = transaction['buyOrder'],
cardNumber = transaction['cardDetail']['cardNumber'],
cardExpirationDate = transaction['cardDetail']['cardExpirationDate'],
sharesAmount = transaction['detailOutput'][0]['sharesAmount'],
sharesNumber = transaction['detailOutput'][0]['sharesNumber'],
amount = transaction['detailOutput'][0]['amount'],
commerceCode = transaction['detailOutput'][0]['commerceCode'],
authorizationCode = transaction['detailOutput'][0]['authorizationCode'],
paymentTypeCode = transaction['detailOutput'][0]['paymentTypeCode'],
responseCode = transaction['detailOutput'][0]['responseCode'],
sessionId = transaction['sessionId'],
transactionDate = transaction['transactionDate'],
urlRedirection = transaction['urlRedirection'],
vci = transaction['VCI'],
)
return cobro_obj
def sumar_asistentes_total(self):
for item in self.carro.item.all():
item.tocata.asistentes_total += item.cantidad
item.tocata.save()
def limpia_carro(self):
self.carro.vigente = False
self.carro.save()
def pre_save_ordencompra_receiver(sender, instance, *args, **kwargs):
if not instance.orden_id:
instance.orden_id = unique_orden_id_generator(instance)
qs = OrdenCompra.objects.filter(carro=instance.carro).exclude(facturacion_profile=instance.facturacion_profile)
if qs.exists():
qs.update(activo=False)
pre_save.connect(pre_save_ordencompra_receiver, sender=OrdenCompra)
def post_save_carro_total(sender, instance, created, *args, **kwargs):
if not created:
carro_obj = instance
carro_total = carro_obj.total
carro_id = carro_obj.id
qs = OrdenCompra.objects.filter(carro__id=carro_id)
if qs.count() == 1:
orden_obj = qs.first()
orden_obj.actualiza_total()
post_save.connect(post_save_carro_total, sender=CarroCompra)
def post_save_orden(sender, instance, created, *args, **kwargs):
if created:
instance.actualiza_total()
post_save.connect(post_save_orden, sender=OrdenCompra)
# Cobro Transbank
class CobroQuerySet(models.query.QuerySet):
def by_orden(self, orden):
return self.filter(orden=orden)
def by_token(self, token):
return self.filter(token=token)
class CobroManager(models.Manager):
def get_queryset(self):
return CobroQuerySet(self.model, using=self._db)
def by_orden(self, orden):
return self.get_queryset().by_orden(orden)
def by_token(self, token):
return self.get_queryset().by_token(token)
def new_or_get(self, orden):
created = False
qs = self.get_queryset().by_orden(orden)
if qs.count() == 1:
obj = qs.first()
else:
obj = self.model.objects.create(orden=orden)
created = True
return obj, created
class Cobro(models.Model):
orden = models.ForeignKey(OrdenCompra, on_delete=models.DO_NOTHING)
facturacion_profile = models.ForeignKey(FacturacionProfile, null=True, blank=True, on_delete=models.CASCADE)
token = models.CharField(max_length=64, blank=True, null=True)
accountingDate = models.CharField(max_length=4, blank=True, null=True)
buyOrder = models.CharField(max_length=26, blank=True, null=True)
cardNumber = models.CharField(max_length=16, blank=True, null=True)
cardExpirationDate = models.CharField(max_length=4, blank=True, null=True)
sharesAmount = models.CharField(max_length=10, blank=True, null=True)
sharesNumber = models.CharField(max_length=2, blank=True, null=True)
amount = models.CharField(max_length=10, blank=True, null=True)
commerceCode = models.CharField(max_length=12, blank=True, null=True)
authorizationCode = models.CharField(max_length=6, blank=True, null=True)
paymentTypeCode = models.CharField(max_length=3, blank=True, null=True)
responseCode = models.CharField(max_length=2, blank=True, null=True)
sessionId = models.CharField(max_length=61, blank=True, null=True)
transactionDate = models.DateTimeField(blank=True, null=True)
urlRedirection = models.CharField(max_length=256, blank=True, null=True)
vci = models.CharField(max_length=3, blank=True, null=True)
objects = CobroManager()
def __str__(self):
return str(self.orden.id)
# Control de Cobros
# Código de respuesta de la autorización. Valores posibles:
# 0 = Transacción aprobada
# -1 = Rechazo de transacción - Reintente (Posible error en el ingreso de datos de la transacción)
# -2 = Rechazo de transacción (Se produjo fallo al procesar la transacción. Este mensaje de rechazo está relacionado a parámetros de la tarjeta y/o su cuenta asociada)
# -3 = Error en transacción (Interno Transbank)
# -4 = Rechazo emisor (Rechazada por parte del emisor)
# -5 = Rechazo - Posible Fraude (Transacción con riesgo de posible fraude)
CONTROLCOBRO_ESTADO_OPCIONES = (
('initTransaction','initTransaction'),
('getTransactionResult','getTransactionResult'),
('acknowledgeTransaction','acknowledgeTransaction'),
('rechazoTransaccion_1','rechazoTransaccion_1'),
('rechazoTransaccion_2','rechazoTransaccion_2'),
('errorTransaccion','errorTransaccion'),
('rechazoEmisor','rechazoEmisor'),
('rechazoPosibleFraude','rechazoPosibleFraude'),
('desconocido','desconocido'),
('exitoso','existoso'),
)
class ControlCobroQuerySet(models.query.QuerySet):
def by_token(self, token):
return self.filter(token=token)
class ControlCobroManager(models.Manager):
def get_queryset(self):
return ControlCobroQuerySet(self.model, using=self._db)
def by_token(self, token):
return self.get_queryset().by_token(token)
def new_or_get(self, token):
created = False
qs = self.get_queryset().by_token(token)
if qs.count() == 1:
obj = qs.first()
else:
obj = self.model.objects.create(token=token)
created = True
return obj, created
# Control de pago con Transbank
class ControlCobro(models.Model):
token = models.CharField(max_length=64)
estado = models.CharField(max_length=30, choices=CONTROLCOBRO_ESTADO_OPCIONES, default='initTransaction')
orden = models.ForeignKey(OrdenCompra, on_delete=models.DO_NOTHING, blank=True, null=True)
fecha_actu = models.DateTimeField(auto_now=True)
fecha_crea = models.DateTimeField(auto_now_add=True)
objects = ControlCobroManager()
def __str__(self):
return self.token
def actualizar_estado(self, estado):
self.estado = estado
self.save()
def agregar_orden(self, orden):
self.orden = orden
self.save()
# Entradas Compradas
class EntradasCompradasQuerySet(models.query.QuerySet):
def activas(self):
return self.filter(rembolsado=False)
def by_request(self, request):
fact_profile, created = FacturacionProfile.objects.new_or_get(request)
return self.filter(facturacion_profile=fact_profile)
def by_orden(self, orden):
return self.filter(orden=orden)
def by_tocata(self, tocata):
return self.filter(item__tocata=tocata)
class EntradasCompradasManager(models.Manager):
def get_queryset(self):
return EntradasCompradasQuerySet(self.model, self._db)
def all(self):
return self.get_queryset().activas()
def by_request(self, request):
return self.get_queryset().by_request(request)
def by_orden(self, orden):
return self.get_queryset().by_orden(orden)
def by_tocata_request(self, request, tocata):
return self.get_queryset().by_request(request).by_tocata(tocata)
def by_tocata(self, tocata):
return self.get_queryset().by_tocata(tocata)
def upload_ticket_file_loc(instance, filename):
username = instance.facturacion_profile.usuario.email
slug = instance.item.tocata.slug
if not slug:
slug = unique_slug_generator(instance.item.tocata)
location = '{0}/tickets/{1}/'.format(username,slug)
return location + filename
class EntradasCompradas(models.Model):
orden = models.ForeignKey(OrdenCompra, on_delete=models.CASCADE)
facturacion_profile = models.ForeignKey(FacturacionProfile, on_delete=models.CASCADE)
item = models.ForeignKey(ItemCarroCompra, on_delete=models.CASCADE)
rembolsado = models.BooleanField(default=False)
slug = models.SlugField(blank=True, unique=False)
fecha_actu = models.DateTimeField(auto_now=True)
fecha_crea = models.DateTimeField(auto_now_add=True)
file = models.FileField(
upload_to=upload_ticket_file_loc,
storage=FileSystemStorage(location=settings.PROTECTED_ROOT),
null=True,
blank=True
)
objects = EntradasCompradasManager()
def __str__(self):
return str(self.item.cantidad)+' '+str(self.item.tocata.nombre)
def get_absolute_url(self):
return "/ordenes/{orden_id}/{slug}".format(orden_id=self.orden.orden_id, slug=self.slug)
def get_download_url(self):
return reverse('ordenes:downloadticket',
kwargs={'orden_id': self.orden.id,
'pk': self.pk}
)
def get_invitados(self):
return self.item.cantidad - 1
@property
def nombrearchivo(self):
if self.file:
return os.path.basename(self.file.name)
def entradascompradas_pre_save_receiver(sender, instance, *args, **kwargs):
if not instance.slug:
instance.slug = unique_slug_generator(instance.item.tocata)
pre_save.connect(entradascompradas_pre_save_receiver, sender=EntradasCompradas)
| [
"[email protected]"
] | |
1a4331aa03052d0136ac9424cf6c3d97e49dc9fc | 4a2bd14eb54a5447b9b5c67df97d9237cd506bd7 | /setup.py | 61fde968a06933af9c27eabc838e71e919e782a8 | [] | no_license | GapData/bokehutils | 85363af5d1575983fe980a7c5a269eab354d168d | deadedd7a8a2210beeb8cce226d7d566f84a6f11 | refs/heads/master | 2021-05-29T13:40:12.105135 | 2015-09-30T19:40:03 | 2015-09-30T19:40:03 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,608 | py | # Copyright (C) 2015 by Per Unneberg
from setuptools import setup, find_packages
import glob
import versioneer
INSTALL_REQUIRES = [
'sphinx>=1.3',
'pytest',
'pytest-cov>=1.8.1',
'bokeh>=0.10.0',
]
try:
# Hack for readthedocs
if not 'readthedocs' in os.path.dirname(os.path.realpath(__file__)):
pass
else:
print("readthedocs in path name; assuming we're building docs @readthedocs")
INSTALL_REQUIRES.append('sphinx-bootstrap-theme')
except:
pass
# Integrating pytest with setuptools: see
# https://pytest.org/latest/goodpractises.html#integrating-with-distutils-python-setup-py-test
from distutils.core import setup, Command
# you can also import from setuptools
class PyTest(Command):
user_options = []
def initialize_options(self):
pass
def finalize_options(self):
pass
def run(self):
import subprocess
import sys
errno = subprocess.call([sys.executable, 'runtests.py'])
raise SystemExit(errno)
_version = versioneer.get_version()
_cmdclass = versioneer.get_cmdclass()
setup(name="bokehutils",
version=_version,
cmdclass=_cmdclass,
author="Per Unneberg",
author_email="[email protected]",
description="Utility functions for working with bokeh plots",
license="MIT",
scripts=glob.glob('scripts/*.py'),
install_requires=INSTALL_REQUIRES,
packages=find_packages(exclude=['ez_setup', 'test*']),
package_data={
'bokehutils': [
'_templates/*',
'static/*',
],
})
| [
"[email protected]"
] | |
6a9813e8183140d4f37cce18fe72fbf348307aa9 | e41849b424e892c0ef1325ec768f4a5aa6dc11e5 | /biosteam/units/_multi_effect_evaporator.py | ae5f2bda8e39b53c5a21f2db763537ba2cc004ca | [
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"NCSA"
] | permissive | Saurajyoti/biosteam | 28a8548ec9c453124e31d73c4e3d628d44dad322 | 65d35586c9e40660f170e5a8aa4e4450ea171a23 | refs/heads/master | 2023-06-15T22:07:54.544645 | 2021-07-13T04:19:14 | 2021-07-13T04:19:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,479 | py | # -*- coding: utf-8 -*-
# BioSTEAM: The Biorefinery Simulation and Techno-Economic Analysis Modules
# Copyright (C) 2020-2021, Yoel Cortes-Pena <[email protected]>
#
# This module is under the UIUC open-source license. See
# github.com/BioSTEAMDevelopmentGroup/biosteam/blob/master/LICENSE.txt
# for license details.
"""
"""
import numpy as np
import biosteam as bst
from .. import Unit
from .mixing import Mixer
from .heat_exchange import HXutility
from ._flash import Flash, Evaporator_PQ
from .design_tools import (
compute_vacuum_system_power_and_cost,
compute_heat_transfer_area
)
from thermosteam import MultiStream, settings
import flexsolve as flx
from warnings import warn
from .design_tools import heat_transfer as ht
__all__ = ('MultiEffectEvaporator',)
log = np.log
exp = np.exp
# Table 22.32 Product process and design (pg 592)
# Name: ('Area range (m2)', 'Cost(A) (USD)', 'U (kJ/(hr*m2*K)))', 'Material')
evaporators = {'Horizontal tube':
((9.29, 743.224),
lambda A, CE: CE*2.304*A**0.53,
4906.02,
'Carbon steel'),
'Long-tube vertical':
((9.29, 743.224),
lambda A, CE: CE*3.086*A**0.55,
8176.699,
'Carbon steel'),
'Forced circulation':
((13.935, 8000),
lambda A, CE: CE/500*exp(8.2986 + 0.5329*log(A*0.0929)-0.000196*log(A*0.0929)**2),
10731.918,
'Carbon steel'),
'Falling film':
((13.935, 371.612),
lambda A, CE: CE*7.416*A**0.55,
10220.874,
'Stainless steel tubes/Carbon steel shell')}
class MultiEffectEvaporator(Unit):
"""
Creates evaporatorators with pressures given by P (a list of pressures).
Adjusts first evaporator vapor fraction to satisfy an overall fraction
evaporated. All evaporators after the first have zero duty. Condenses
the vapor coming out of the last evaporator. Pumps all liquid streams
to prevent back flow in later parts. All liquid evaporated is ultimately
recondensed. Cost is based on required heat transfer area. Vacuum system
is based on air leakage. Air leakage is based on volume, as given by
residence time `tau` and flow rate to each evaporator.
Parameters
----------
ins : stream
Inlet.
outs : stream sequence
* [0] Solid-rich stream.
* [1] Condensate stream.
P : tuple[float]
Pressures describing each evaporator (Pa).
V : float
Molar fraction evaporated as specified in `V_definition`
(either overall or in the first effect).
V_definition : str, optional
* 'Overall' - `V` is the overall molar fraction evaporated.
* 'First-effect' - `V` is the molar fraction evaporated in the first effect.
Examples
--------
Concentrate sugar setting vapor fraction at the first effect:
>>> import biosteam as bst
>>> from biorefineries.cornstover import chemicals
>>> bst.settings.set_thermo(chemicals)
>>> feed = bst.Stream('feed', Water=1000, Glucose=100,
... AceticAcid=0.5, HMF=0.1, Furfural=0.1,
... units='kg/hr')
>>> E1 = bst.MultiEffectEvaporator('E1', ins=feed, outs=('solids', 'liquid'),
... V=0.1, V_definition='First-effect',
... P=(101325, 73581, 50892, 32777, 20000))
>>> E1.simulate()
>>> E1.show()
MultiEffectEvaporator: E1
ins...
[0] feed
phase: 'l', T: 298.15 K, P: 101325 Pa
flow (kmol/hr): Water 55.5
AceticAcid 0.00833
Furfural 0.00104
HMF 0.000793
Glucose 0.555
outs...
[0] solids
phase: 'l', T: 333.24 K, P: 20000 Pa
flow (kmol/hr): Water 20.6
AceticAcid 0.00189
Furfural 7.39e-05
HMF 0.000793
Glucose 0.555
[1] liquid
phase: 'l', T: 352.12 K, P: 101325 Pa
flow (kmol/hr): Water 34.9
AceticAcid 0.00643
Furfural 0.000967
>>> E1.results()
Multi-Effect Evaporator Units E1
Power Rate kW 5.72
Cost USD/hr 0.447
Low pressure steam Duty kJ/hr 5.8e+05
Flow kmol/hr 14.9
Cost USD/hr 3.55
Cooling water Duty kJ/hr -3.49e+05
Flow kmol/hr 239
Cost USD/hr 0.116
Design Area m^2 11
Volume m^3 1.64
Purchase cost Condenser USD 5.35e+03
Evaporators USD 9.59e+03
Liquid-ring pump USD 1.24e+04
Total purchase cost USD 2.74e+04
Utility cost USD/hr 4.12
Concentrate sugar setting overall vapor fraction:
>>> import biosteam as bst
>>> from biorefineries.cornstover import chemicals
>>> bst.settings.set_thermo(chemicals)
>>> feed = bst.Stream('feed', Water=1000, Glucose=100,
... AceticAcid=0.5, HMF=0.1, Furfural=0.1,
... units='kg/hr')
>>> E1 = bst.MultiEffectEvaporator('E1', ins=feed, outs=('solids', 'liquid'),
... V=0.1, V_definition='Overall',
... P=(101325, 73581, 50892, 32777, 20000))
>>> E1.simulate()
>>> E1.show()
MultiEffectEvaporator: E1
ins...
[0] feed
phase: 'l', T: 298.15 K, P: 101325 Pa
flow (kmol/hr): Water 55.5
AceticAcid 0.00833
Furfural 0.00104
HMF 0.000793
Glucose 0.555
outs...
[0] solids
phase: 'l', T: 354.94 K, P: 50892 Pa
flow (kmol/hr): Water 50
AceticAcid 0.0069
Furfural 0.000579
HMF 0.000793
Glucose 0.555
[1] liquid
phase: 'l', T: 361.2 K, P: 101325 Pa
flow (kmol/hr): Water 5.55
AceticAcid 0.00143
Furfural 0.000462
>>> E1.results()
Multi-Effect Evaporator Units E1
Power Rate kW 5.72
Cost USD/hr 0.447
Low pressure steam Duty kJ/hr 3.82e+05
Flow kmol/hr 9.85
Cost USD/hr 2.34
Cooling water Duty kJ/hr -1.15e+05
Flow kmol/hr 78.5
Cost USD/hr 0.0383
Design Area m^2 1.64
Volume m^3 0.986
Purchase cost Condenser USD 3.89e+03
Evaporators USD 2.77e+03
Liquid-ring pump USD 1.24e+04
Total purchase cost USD 1.91e+04
Utility cost USD/hr 2.83
"""
line = 'Multi-Effect Evaporator'
_units = {'Area': 'm^2',
'Volume': 'm^3'}
_F_BM_default = {'Evaporators': 2.45,
'Liquid-ring pump': 1.0,
'Condenser': 3.17}
_N_outs = 2
_N_heat_utilities = 2
#: Residence time (hr)
tau = 0.30
# Evaporator type
_Type = 'Forced circulation'
# Data for simmulation and costing
_evap_data = evaporators[_Type]
@property
def Type(self):
"""Evaporation type."""
return self._Type
@Type.setter
def Type(self, evap_type):
try:
self._evap_data = evaporators[evap_type]
except KeyError:
dummy = str(evaporators.keys())[11:-2]
raise ValueError(f"Type must be one of the following: {dummy}")
self._Type = evap_type
@property
def V_definition(self):
"""[str] Must be one of the following:
* 'Overall' - Defines attribute `V` as the overall molar fraction evaporated.
* 'First-effect' - Defines attribute `V` as the molar fraction evaporated in the first effect.
"""
return self._V_definition
@V_definition.setter
def V_definition(self, V_definition):
V_definition = V_definition.capitalize()
if V_definition in ('Overall', 'First-effect'):
self._V_definition = V_definition
else:
raise ValueError("V_definition must be either 'Overall' or 'First-effect'")
def __init__(self, ID='', ins=None, outs=(), thermo=None, *, P, V, V_definition='Overall'):
Unit.__init__(self, ID, ins, outs, thermo)
self.P = P #: tuple[float] Pressures describing each evaporator (Pa).
self.V = V #: [float] Molar fraction evaporated.
self.V_definition = V_definition
self._V_first_effect = None
self._reload_components = True
self.components = {}
def reset_cache(self):
self._reload_components = True
def load_components(self):
P = self.P
thermo = self.thermo
# Create components
self._N_evap = n = len(P) # Number of evaporators
first_evaporator = Flash(None, outs=(None, None), P=P[0], thermo=thermo)
# Put liquid first, then vapor side stream
evaporators = [first_evaporator]
for i in range(1, n):
evap = Evaporator_PQ(None, outs=(None, None, None), P=P[i], Q=0, thermo=thermo)
evaporators.append(evap)
condenser = HXutility(None, outs=[None], thermo=thermo, V=0)
condenser.parent = self
self.heat_utilities = (first_evaporator.heat_utilities[0],
condenser.heat_utilities[0])
mixer = Mixer(None, outs=[None], thermo=thermo)
components = self.components
components['evaporators'] = evaporators
components['condenser'] = condenser
components['mixer'] = mixer
# Set-up components
other_evaporators = evaporators[1:]
first_evaporator.ins[:] = [i.copy() for i in self.ins]
# Put liquid first, then vapor side stream
ins = [first_evaporator.outs[1], first_evaporator.outs[0]]
for evap in other_evaporators:
evap.ins[:] = ins
ins = [evap.outs[1], evap.outs[0]]
def _V_overall(self, V_first_effect):
first_evaporator, *other_evaporators = self.components['evaporators']
first_evaporator.V = V_overall = V_first_effect
first_evaporator._run()
for evap in other_evaporators:
evap._run()
V_overall += (1. - V_overall) * evap.V
return V_overall
def _V_overall_objective_function(self, V_first_effect):
return self._V_overall(V_first_effect) - self.V
def _run(self):
out_wt_solids, liq = self.outs
ins = self.ins
if self.V == 0:
out_wt_solids.copy_like(ins[0])
for i in self.heat_utilities:
i.empty(); i.heat_exchanger = None
liq.empty()
self._reload_components = True
return
if self._reload_components:
self.load_components()
self._reload_components = False
if self.V_definition == 'Overall':
P = tuple(self.P)
self.P = list(P)
for i in range(self._N_evap-1):
if self._V_overall(0.) > self.V:
self.P.pop()
self.load_components()
self._reload_components = True
else:
break
self.P = P
self._V_first_effect = flx.IQ_interpolation(self._V_overall_objective_function,
0., 1., None, None, self._V_first_effect,
xtol=1e-9, ytol=1e-6,
checkiter=False)
V_overall = self.V
else:
V_overall = self._V_overall(self.V)
n = self._N_evap # Number of evaporators
components = self.components
evaporators = components['evaporators']
condenser = components['condenser']
mixer = components['mixer']
last_evaporator = evaporators[-1]
# Condensing vapor from last effector
outs_vap = last_evaporator.outs[0]
condenser.ins[:] = [outs_vap]
condenser._run()
outs_liq = [condenser.outs[0]] # list containing all output liquids
# Unpack other output streams
out_wt_solids.copy_like(last_evaporator.outs[1])
for i in range(1, n):
evap = evaporators[i]
outs_liq.append(evap.outs[2])
# Mix liquid streams
mixer.ins[:] = outs_liq
mixer._run()
liq.copy_like(mixer.outs[0])
mixed_stream = MultiStream(None, thermo=self.thermo)
mixed_stream.copy_flow(self.ins[0])
mixed_stream.vle(P=last_evaporator.P, V=V_overall)
out_wt_solids.mol = mixed_stream.imol['l']
liq.mol = mixed_stream.imol['g']
def _design(self):
if self.V == 0: return
# This functions also finds the cost
A_range, C_func, U, _ = self._evap_data
components = self.components
evaporators = components['evaporators']
Design = self.design_results
Cost = self.baseline_purchase_costs
CE = bst.CE
first_evaporator = evaporators[0]
heat_exchanger = first_evaporator.heat_exchanger
hu = heat_exchanger.heat_utilities[0]
duty = first_evaporator.H_out - first_evaporator.H_in
Q = abs(duty)
Tci = first_evaporator.ins[0].T
Tco = first_evaporator.outs[0].T
hu(duty, Tci, Tco)
Th = hu.inlet_utility_stream.T
LMTD = ht.compute_LMTD(Th, Th, Tci, Tco)
ft = 1
A = abs(compute_heat_transfer_area(LMTD, U, Q, ft))
first_evaporator.baseline_purchase_costs['Evaporator'] = C = C_func(A, CE)
self._evap_costs = evap_costs = [C]
# Find condenser requirements
condenser = components['condenser']
condenser._summary()
Cost['Condenser'] = condenser.purchase_cost
# Find area and cost of evaporators
As = [A]
A_min, A_max = A_range
evap = evaporators[-1]
for evap in evaporators[1:]:
Q = evap.design_results['Heat transfer']
if Q <= 1e-12:
As.append(0.)
evap_costs.append(0.)
else:
Tc = evap.outs[0].T
Th = evap.outs[2].T
LMTD = Th - Tc
A = compute_heat_transfer_area(LMTD, U, Q, 1.)
As.append(A)
if settings.debug and not A_min < A < A_max:
warn(f'area requirement ({A}) is out of range, {A_range}')
evap_costs.append(C_func(A, CE))
self._As = As
Design['Area'] = A = sum(As)
Design['Volume'] = total_volume = self._N_evap * self.tau * self.ins[0].F_vol
Cost['Evaporators'] = sum(evap_costs)
# Calculate power
power, cost = compute_vacuum_system_power_and_cost(
F_mass=0, F_vol=0, P_suction=evap.outs[0].P,
vessel_volume=total_volume,
vacuum_system_preference='Liquid-ring pump')
Cost['Liquid-ring pump'] = cost
self.power_utility(power)
| [
"[email protected]"
] | |
2a1a2dba821ba88d97ccfa8b1ac0ad83ecc9db61 | 5a07828016e8bafbea5dac8f83c8bfd5d0bfd603 | /py_290w290/140309_srw.py | ba5c140fc766ac183dd4cc526b37aee626cb33e2 | [] | no_license | JJHopkins/rajter_compare | db5b88d2c6c1efc0fead9b6ed40fb3cce36bedb4 | 2ba52f4f16cf2aca350a82ea58d0aa8f8866c47c | refs/heads/master | 2020-06-04T23:53:57.089329 | 2014-04-08T18:02:30 | 2014-04-08T18:02:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,176 | py | #$ {\bf Free energy between two skewed cylinders (CG-10 in water). Full retarded result, function of separation $\ell$ and angle $\theta$} \\
#$ Equation 12: $G(\ell,\theta) = - \frac{ (\pi R_1^{2})(\pi R_2^{2}) }{2 \pi~\ell^{4} \sin{\theta}} \left( {\cal A}^{(0)}(\ell) + {\cal A}^{(2)}(\ell) \cos 2\theta \right)$ \\
#$ $G(\ell,\theta) = - \frac{k_BT}{64 \pi} \frac{ \pi^2 R_1^{2} R_2^{2} }{\ell^{4} \sin{\theta}} {\sum_{n=0}^{\infty}}' \Delta_{1,\parallel} \Delta_{2,\parallel} ~p_n^{4} ~\int_0^{\infty} t dt ~\frac{e^{- 2 p_n \sqrt{t^{2} + 1}}}{(t^{2} + 1)} \tilde g(t, a_1(i \omega_n), a_2(i \omega_n), \theta),$ \\
#$ with $\tilde g(t, a_1, a_2, \theta) &=& 2 \left[ (1+3a_1)(1+3a_2) t^{4} + 2 (1+2a_1+2a_2+3a_1a_2) t^{2} + 2(1+a_1)(1+a_2)\right] + \nonumber \\
#$ & & ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ + (1-a_1)(1-a_2)(t^{2} + 2)^2 \cos 2\theta.$ \\
#!/usr/bin/python
import numpy as np
import scipy.optimize as opt
from scipy.integrate import trapz
import matplotlib.pyplot as pl
from matplotlib import axis as ax
# use pyreport -l file.py
from pylab import show
from matplotlib.ticker import MultipleLocator
from mpl_toolkits.mplot3d import Axes3D
from pylab import pause
from matplotlib.backends.backend_pdf import PdfPages
#pp = PdfPages('plots/skew_ret_water/skew_ret_water.pdf')
eiz_x = np.loadtxt('data/eiz_x_output_eV.txt') #perpendicular, radial
eiz_z = np.loadtxt('data/eiz_z_output_eV.txt') # parallel,axial
eiz_w = np.loadtxt('data/eiz_w_output_eV.txt') # water as intervening medium
#eiz_w[0] = eiz_w[1] #NOTE: there is a jump from first val down to second val
x90_A0, y90_A0 = np.loadtxt('data/290-W-290-PERPA0.PRN',unpack=True, usecols = [0,1]) # water as intervening medium
x90_A2, y90_A2 = np.loadtxt('data/290-W-290-PERPA2.PRN',unpack=True, usecols = [0,1]) # water as intervening medium
r_1 = 1.0e-9
r_2 = 1.0e-9
c = 2.99e8 # in m/s
Temp = 297
kb = 1.3807e-23 # in J/K
coeff = 2.411e14 # in rad/s
# NOTES:
# at RT, 1 kT = 4.11e-21 J
# 1 eV = 1.602e-19 J = 0.016 zJ
# h_bar_eV = 6.5821e-16 eVs
# h_bar = 1. #1.0546e-34 #in Js
#kb = 8.6173e-5 # in eV/K
# z_n_eV = (2*pi*kT/h_bar)n
# = (0.159 eV) / (6.5821e-16 eVs)
# = n*2.411e14 rad/s
# z_n_J = (2*pi*kT/h_bar)n
# = (1.3807e-23 J/K) / (1.0546e-34 Js))*n
# = n*2.411e14 rad/s
#coeff = 0.159 # in eV w/o 1/h_bar
ns = np.arange(0.,500.)
z = ns * coeff
ls = np.linspace(1.0e-9, 1.0e-6, 200)
#thetas = np.linspace((0.01)*np.pi,(1./2)*np.pi,25)
thetas = [np.pi/8,np.pi/4,np.pi/3,np.pi/2]
dt = 1.0
ts = np.arange(1.0,10000.,dt)
def Aiz(perp, par,med):
return (2.0*(perp-med)*med)/((perp+med)*(par-med))
def ys(a,time,eizw,L, N):
term0 = ( time / (time*time+1.0) )
term1 = ( time**4 * 2.0*(1. + 3.*a)*(1.+3.*a) )
term2 = ( time**2 * 4.0*(1. + 2.0*a+2.0*a+3.0*a*a))
term3 = ( 4.0*(1. + a)*(1.0 + a) )
term4 = (-2.0 * np.sqrt(eizw)* L * coeff * N / c * np.sqrt(time*time + 1.0))
#print 'ys term0', term0
#print 'ys term1', term1
#print 'ys term2', term2
#print 'ys term3', term3
#print 'ys term4', term4
#print '----'
return (term0) * np.exp(term4)*( (term1) + (term2) + (term3))#* term5
def y_2s(a,time,eizw, L, N):
term0 = (time / (time*time+1.0) )
term1 = ((1.- a)*(1.- a)*(time * time + 2.0)*(time * time + 2.0))
term2 = (-2.0 * np.sqrt(eizw)* L * coeff * N / c * np.sqrt(time*time + 1.0))
#print 'y_2s term0', term0
#print 'y_2s term1', term1
#print 'y_2s term2', term2
#print '----'
return term0 * term1* np.exp(term2) #* term3
def As(eizz,eizw,L,N,Y):
term1 = (((eizz-eizw)/eizw)*((eizz-eizw)/eizw))
term2 = (Y * eizw *eizw * (coeff*N)**4 * L**4 / (c**4))
#term3 = Y
#print 'As term1 = ', term1
#print 'As term2 = ', term2
##print 'As term3 = ', term3
#print '----'
return term1 * term2# * term3
def A_2s(eizz,eizw, L , N ,Y):
term1 = (((eizz-eizw)/eizw)*((eizz-eizw)/eizw))
term2 = (Y * eizw *eizw * (coeff*N)**4 * L**4 / (c**4))
#term3 = Y
#print 'A_2s term1 = ', term1
#print 'A_2s term2 = ', term2
##print 'A_2s term3 = ', term3
#print '----'
return (term1 * term2)# * term3
y = np.zeros(shape=(len(ns),len(ls)))
y_2 = np.zeros(shape=(len(ns),len(ls)))
A = np.zeros(shape=(len(ns),len(ls)))
A_2 = np.zeros(shape=(len(ns),len(ls)))
EL = np.zeros(len(ls))
G_l_t_dt = np.zeros(shape=(len(ls),len(thetas)))
A2_theta = np.zeros(shape=(len(ls),len(thetas)))
aiz = []
aiz = Aiz(eiz_x,eiz_z, eiz_w) # of length = len(ns)
#aiz[74] = aiz[73]
#aiz[75] = aiz[76]
for k,length in enumerate(ls):
sum_A = np.empty(len(ls))
sum_A_2 = np.empty(len(ls))
for j,n in enumerate(ns):
# Integral:
y[j,k] = trapz(ys(aiz[j],ts,eiz_w[j],length,n),ts,dt)
y_2[j,k] = trapz(y_2s(aiz[j],ts,eiz_w[j],length,n),ts,dt)
#print 'dt Integral y = ',i,k,j, y
#print 'dt Integral y_2 = ',i,k,j, y_2
#print '----'
#print 'N terms for A0 = ' , As(eiz_z[j],eiz_w[j],length,n,y)
#print 'N terms for A2 = ', A_2s(eiz_z[j],eiz_w[j],length,n,y_2)
#print '----'
A[j,k] = As(eiz_z[j],eiz_w[j],length,n,y[j,k])
A_2[j,k] = A_2s(eiz_z[j],eiz_w[j],length,n,y_2[j,k])# * np.cos(2.0*theta)
A[0] = (1./2)*A[0]
A_2[0] = (1./2)*A_2[0]
sum_A = np.sum(A,axis=0)
#print 'sum of A0 = ', k,j,sum_A
sum_A_2 = np.sum(A_2,axis=0)
#print 'sum of A2 = ', k,j,sum_A_2
#print '----'
#print 'shape sum_A_2 = ', np.shape(sum_A_2)
#sys.exit()
for k,length in enumerate(ls):
for i, theta in enumerate(thetas):
EL[k] = 1./(length*length*length*length)
A2_theta[k,i] = sum_A_2[k]* np.cos(2.0*theta)
G_l_t_dt[k,i] = (1.602e-19 / 4.11e-21) * (1./32) * EL[k]*np.pi*r_1*r_1*r_2*r_2*(sum_A[k] + sum_A_2[k]* np.cos(2.0*theta) )/(2.0*np.sin(theta))# (1e21)*
np.savetxt('G_srw.txt',G_l_t_dt)
#pl.figure()
#pl.plot(ns,eiz_x, color = 'b', label = r'$\varepsilon_{\hat{x}}(i\zeta_{N})$')
#pl.plot(ns,eiz_z, color = 'r', label = r'$\varepsilon_{\hat{z}}(i\zeta_{N})$')
##pl.plot(ns,eiz_w, color = 'c', label = r'$\varepsilon_{vac}(i\zeta_{N})$')
#pl.plot(ns,eiz_w, color = 'c', label = r'$\varepsilon_{water}(i\zeta_{N})$')
#pl.xlabel(r'$N$', size = 20)
#pl.ylabel(r'$\varepsilon(i\zeta)$', size = 20)
#pl.legend(loc = 'best')
##pl.title(r'$\mathrm{CG-10\, DNA}$', size = 20)
##pl.axis([0,500,0.9,2.6])
##pl.savefig('plots/skew_ret_water/eiz.pdf' )
#show()
pl.figure()
pl.plot(ns,aiz, color = 'b')#, label = r'$\varepsilon_{\hat{x}}(i\zeta_{N})$')
pl.xlabel(r'$N$', size = 20)
pl.ylabel(r'$a_{1,2}(i\zeta_{N})$', size = 20)
pl.legend(loc = 'best')
#pl.title(r'$\mathrm{Anisotropy \,Metric}$', size = 20)
#pl.axis([0,500,0.9,2.6])
pl.grid()
pl.savefig('plots/skew_ret_water/140306_290w290_aiz.pdf' )
show()
pl.figure()
pl.loglog(1e9*ls,(1e21*kb*Temp/32)*sum_A ,'b-', label = r'$\mathcal{A^{(0)}}(\ell)$')
pl.loglog(1e9*ls,(1e21*kb*Temp/32)*sum_A_2,'g-', label = r'$\mathcal{A^{(2)}}(\ell)$')
pl.loglog(x90_A0, y90_A0,'k-' , label = r'GH $\mathcal{A^{(0)}}(\ell)$')
pl.loglog(x90_A2, y90_A2,'k--', label = r'GH $\mathcal{A^{(2)}}(\ell)$')
pl.xlabel(r'$\mathrm{separation}\,\ell\,\,\,\rm{[nm]}$', size = 20)
pl.ylabel(r'$\mathrm{\mathcal{A^{(0)},\,\,A^{(2)}}}\,\,\,\rm{[zJ]}$', size = 20)
#pl.title(r'$\mathrm{Hamaker \, coeff.s \,:\,skewed,\,retarded,\,water}$', size = 20)
#pl.legend(loc = 'best')
#pl.axis([1e-9,1e-6,1e-24,1e-19])
pl.minorticks_on()
pl.ticklabel_format(axis = 'both')
pl.grid(which = 'both')
pl.tick_params(which = 'both',labelright = True)
pl.savefig('plots/skew_ret_water/140309_290w290_GH_skew_ret_A0_A2.pdf')
show()
pl.figure()
pl.loglog(ls,(kb*Temp/32)*sum_A,'b-', label = r'$\mathcal{A^{(0)}}(\ell)$')
pl.loglog(ls,(kb*Temp/32)*sum_A_2,'g-', label = r'$\mathcal{A^{(2)}}(\ell)$')
#pl.loglog(ls,(kb*T/32)*A2_theta,':', label = r'$\mathcal{A^{(2)}}(\ell)cos(2\theta)$')
pl.xlabel(r'$\mathrm{separation}\,\ell\,\,\,\rm{[m]}$', size = 20)
pl.ylabel(r'$\mathrm{\mathcal{A^{(0)},\,\,A^{(2)}}}$', size = 20)
#pl.title(r'$\mathrm{Hamaker \, coeff.s \,:\,skewed,\,retarded,\,water}$', size = 20)
pl.legend(loc = 'upper right')
#pl.axis([1e-9,1e-6,1e-24,1e-19])
pl.minorticks_on()
pl.ticklabel_format(axis = 'both')
pl.grid(which = 'both')
pl.tick_params(which = 'both',labelright = True)
pl.savefig('plots/skew_ret_water/140306_290w290_skew_ret_A0_A2.pdf')
show()
ls4 = 1e9*ls[ 2]#2]
ls5 = 1e9*ls[12]#4]
ls6 = 1e9*ls[22]#6]
ls1 = 1e9*ls[32]#8]
ls2 = 1e9*ls[42]#12]
ls3 = 1e9*ls[52]#16]
fig = pl.figure()
ax = fig.add_axes([0.1,0.1,0.8,0.8])
#pl.semilogy(thetas, G_l_t_dt)
ax.semilogy(thetas, G_l_t_dt[ 2,:], label = r'$\ell$ = %1.2f nm' %ls4)
ax.semilogy(thetas, G_l_t_dt[12,:], label = r'$\ell$ = %1.2f nm' %ls5)
#ax.semilogy(thetas, G_l_t_dt[22,:], label = r'$\ell$ = %1.2f nm' %ls6)
ax.semilogy(thetas, G_l_t_dt[32,:], label = r'$\ell$ = %1.2f nm' %ls1)
#ax.semilogy(thetas, G_l_t_dt[42,:], label = r'$\ell$ = %1.2f nm' %ls2)
ax.semilogy(thetas, G_l_t_dt[52,:], label = r'$\ell$ = %1.2f nm' %ls3)
#ax.semilogy(0,0,'', label = r'$G_\theta = cos(2\theta)/2sin(\theta)$')
pl.xlabel(r'$Angle\,\,\mathrm{[radians]}$', size = 20)
pl.ylabel(r'$-G(\ell,\theta)\,\,\mathrm{[k_{B}T]}$', size = 20)
#pl.axis([0,1.7,1e-10,1.0])
#pl.title(r'$\mathrm{-G(\ell,\theta)\,vs.\,angle:\,skewed,\,retarded,\,water}$', size = 20)
pl.legend(loc = 'lower left')
#pl.savefig('plots/skew_ret_water/skew_ret_water_G_vs_theta.pdf')
#show()
pl.minorticks_on()
pl.ticklabel_format(axis = 'both')
pl.grid(which = 'both')
pl.tick_params(which = 'both',labelright = True)
pl.savefig('plots/skew_ret_water/140306_290w290_skew_ret_G_vs_theta_fixed_l.pdf')
show()
pl.figure()
#pl.loglog(ls, G_l_t_dt)#, label = labels[i])
pl.loglog(ls, G_l_t_dt[:,1], label = r'$\theta = \pi/4$')
pl.loglog(ls, G_l_t_dt[:,2], label = r'$\theta = \pi/3$')
pl.loglog(ls, G_l_t_dt[:,3], label = r'$\theta = \pi/2$')
pl.xlabel(r'$\ell\,\,\mathrm{[m]}$', size = 24)
pl.ylabel(r'$-G(\ell,\theta)\,\,\mathrm{[k_{B}T]}$', size = 20)
#pl.axis([1.0e-9, 1.0e-6,1e-16,1e3])
#pl.title(r'$\mathrm{-G(\ell,\theta)\,vs.\,separation:\,skewed,\,retarded,\,water}$', size = 20)
pl.legend(loc = 'best')
pl.minorticks_on()
pl.ticklabel_format(axis = 'both')
pl.grid(which = 'both')
pl.tick_params(which = 'both',labelright = True)
pl.savefig('plots/skew_ret_water/140306_290w290_skew_ret_G_vs_l.pdf')
show()
| [
"[email protected]"
] | |
859652d89a14584f5e955cc3ad819c804f555e3b | 5bfca95abf14f7bb0ff29b58b018fc9062d3f837 | /apps/first_draft/migrations/0001_initial.py | ac8c2856ce7f990fcbd46afa84be29c6e4323981 | [] | no_license | wdudek82/django-ogame-clone | 621afb20ea2dd3c0f2e4b93dfdd604e0628bd7b8 | 472971da826d078176a5d619b3b5cad89e3d1c5c | refs/heads/master | 2021-09-08T15:19:49.407650 | 2017-11-26T19:14:48 | 2017-11-26T19:14:48 | 124,670,158 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,675 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.11.7 on 2017-11-15 20:31
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Building',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=255, unique=True)),
('base_cost', models.PositiveIntegerField()),
],
),
migrations.CreateModel(
name='PlayerBuilding',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('level', models.PositiveIntegerField(default=0)),
('acceleration', models.PositiveIntegerField(choices=[(1, 0), (2, 1), (3, 2), (4, 3), (5, 4), (6, 5), (7, 6), (8, 7), (9, 8), (10, 9), (11, 10), (12, 11), (13, 12), (14, 13), (15, 14), (16, 15), (17, 16), (18, 17), (19, 18), (20, 19), (21, 20), (22, 21), (23, 22), (24, 23), (25, 24), (26, 25), (27, 26), (28, 27), (29, 28), (30, 29), (31, 30), (32, 31), (33, 32), (34, 33), (35, 34), (36, 35), (37, 36), (38, 37), (39, 38), (40, 39), (41, 40), (42, 41), (43, 42), (44, 43), (45, 44), (46, 45), (47, 46), (48, 47), (49, 48), (50, 49), (51, 50), (52, 51), (53, 52), (54, 53), (55, 54), (56, 55), (57, 56), (58, 57), (59, 58), (60, 59), (61, 60), (62, 61), (63, 62), (64, 63), (65, 64), (66, 65), (67, 66), (68, 67), (69, 68), (70, 69), (71, 70), (72, 71), (73, 72), (74, 73), (75, 74), (76, 75), (77, 76), (78, 77), (79, 78), (80, 79), (81, 80), (82, 81), (83, 82), (84, 83), (85, 84), (86, 85), (87, 86), (88, 87), (89, 88), (90, 89), (91, 90), (92, 91), (93, 92), (94, 93), (95, 94), (96, 95), (97, 96), (98, 97), (99, 98), (100, 99), (101, 100)])),
('created_at', models.DateTimeField(auto_now_add=True)),
('updated_at', models.DateTimeField(auto_now=True)),
('building', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='first_draft.Building')),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='PlayerResouce',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('amount', models.PositiveIntegerField(default=0)),
('player', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Resources',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('type', models.CharField(max_length=255, unique=True)),
],
options={
'verbose_name_plural': 'Resources',
},
),
migrations.AddField(
model_name='playerresouce',
name='resource',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='first_draft.Resources'),
),
migrations.AlterUniqueTogether(
name='playerbuilding',
unique_together=set([('player', 'building')]),
),
]
| [
"[email protected]"
] | |
1954e1d277622077bd20c0d616dc58a3d5b3662f | 0bd7a6bef178bb93b2c3fb19e789c7e3b364e6d1 | /simple_frame/inference/segmentation_export.py | ea4b06435005677303a2da54ba9827504d7de7c2 | [] | no_license | Magnety/Simple_Frame_linux | 090e07491e170314718c9ba5f2da2a4393bdb1ad | 7e1ef5d11e3baa8784fd9b6bbf81b0d954dd1ca6 | refs/heads/main | 2023-06-02T09:35:36.023461 | 2021-06-17T09:23:01 | 2021-06-17T09:23:01 | 371,412,450 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 11,716 | py | # Copyright 2020 Division of Medical Image Computing, German Cancer Research Center (DKFZ), Heidelberg, Germany
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import sys
from copy import deepcopy
from typing import Union, Tuple
import numpy as np
import SimpleITK as sitk
from batchgenerators.augmentations.utils import resize_segmentation
from simple_frame.preprocessing.preprocessing import get_lowres_axis, get_do_separate_z, resample_data_or_seg
from batchgenerators.utilities.file_and_folder_operations import *
def save_segmentation_nifti_from_softmax(segmentation_softmax: Union[str, np.ndarray], out_fname: str,
properties_dict: dict, order: int = 1,
region_class_order: Tuple[Tuple[int]] = None,
seg_postprogess_fn: callable = None, seg_postprocess_args: tuple = None,
resampled_npz_fname: str = None,
non_postprocessed_fname: str = None, force_separate_z: bool = None,
interpolation_order_z: int = 0, verbose: bool = True):
"""
This is a utility for writing segmentations to nifto and npz. It requires the data to have been preprocessed by
GenericPreprocessor because it depends on the property dictionary output (dct) to know the geometry of the original
data. segmentation_softmax does not have to have the same size in pixels as the original data, it will be
resampled to match that. This is generally useful because the spacings our networks operate on are most of the time
not the native spacings of the image data.
If seg_postprogess_fn is not None then seg_postprogess_fnseg_postprogess_fn(segmentation, *seg_postprocess_args)
will be called before nifto export
There is a problem with python process communication that prevents us from communicating obejcts
larger than 2 GB between processes (basically when the length of the pickle string that will be sent is
communicated by the multiprocessing.Pipe object then the placeholder (\%i I think) does not allow for long
enough strings (lol). This could be fixed by changing i to l (for long) but that would require manually
patching system python code.) We circumvent that problem here by saving softmax_pred to a npy file that will
then be read (and finally deleted) by the Process. save_segmentation_nifti_from_softmax can take either
filename or np.ndarray for segmentation_softmax and will handle this automatically
:param segmentation_softmax:
:param out_fname:
:param properties_dict:
:param order:
:param region_class_order:
:param seg_postprogess_fn:
:param seg_postprocess_args:
:param resampled_npz_fname:
:param non_postprocessed_fname:
:param force_separate_z: if None then we dynamically decide how to resample along z, if True/False then always
/never resample along z separately. Do not touch unless you know what you are doing
:param interpolation_order_z: if separate z resampling is done then this is the order for resampling in z
:param verbose:
:return:
"""
if verbose: print("force_separate_z:", force_separate_z, "interpolation order:", order)
if isinstance(segmentation_softmax, str):
assert isfile(segmentation_softmax), "If isinstance(segmentation_softmax, str) then " \
"isfile(segmentation_softmax) must be True"
del_file = deepcopy(segmentation_softmax)
segmentation_softmax = np.load(segmentation_softmax)
os.remove(del_file)
# first resample, then put result into bbox of cropping, then save
current_shape = segmentation_softmax.shape
shape_original_after_cropping = properties_dict.get('size_after_cropping')
shape_original_before_cropping = properties_dict.get('original_size_of_raw_data')
# current_spacing = dct.get('spacing_after_resampling')
# original_spacing = dct.get('original_spacing')
if np.any([i != j for i, j in zip(np.array(current_shape[1:]), np.array(shape_original_after_cropping))]):
if force_separate_z is None:
if get_do_separate_z(properties_dict.get('original_spacing')):
do_separate_z = True
lowres_axis = get_lowres_axis(properties_dict.get('original_spacing'))
elif get_do_separate_z(properties_dict.get('spacing_after_resampling')):
do_separate_z = True
lowres_axis = get_lowres_axis(properties_dict.get('spacing_after_resampling'))
else:
do_separate_z = False
lowres_axis = None
else:
do_separate_z = force_separate_z
if do_separate_z:
lowres_axis = get_lowres_axis(properties_dict.get('original_spacing'))
else:
lowres_axis = None
if verbose: print("separate z:", do_separate_z, "lowres axis", lowres_axis)
seg_old_spacing = resample_data_or_seg(segmentation_softmax, shape_original_after_cropping, is_seg=False,
axis=lowres_axis, order=order, do_separate_z=do_separate_z, cval=0,
order_z=interpolation_order_z)
# seg_old_spacing = resize_softmax_output(segmentation_softmax, shape_original_after_cropping, order=order)
else:
if verbose: print("no resampling necessary")
seg_old_spacing = segmentation_softmax
if resampled_npz_fname is not None:
np.savez_compressed(resampled_npz_fname, softmax=seg_old_spacing.astype(np.float16))
# this is needed for ensembling if the nonlinearity is sigmoid
if region_class_order is not None:
properties_dict['regions_class_order'] = region_class_order
save_pickle(properties_dict, resampled_npz_fname[:-4] + ".pkl")
if region_class_order is None:
seg_old_spacing = seg_old_spacing.argmax(0)
else:
seg_old_spacing_final = np.zeros(seg_old_spacing.shape[1:])
for i, c in enumerate(region_class_order):
seg_old_spacing_final[seg_old_spacing[i] > 0.5] = c
seg_old_spacing = seg_old_spacing_final
bbox = properties_dict.get('crop_bbox')
if bbox is not None:
seg_old_size = np.zeros(shape_original_before_cropping)
for c in range(3):
bbox[c][1] = np.min((bbox[c][0] + seg_old_spacing.shape[c], shape_original_before_cropping[c]))
seg_old_size[bbox[0][0]:bbox[0][1],
bbox[1][0]:bbox[1][1],
bbox[2][0]:bbox[2][1]] = seg_old_spacing
else:
seg_old_size = seg_old_spacing
if seg_postprogess_fn is not None:
seg_old_size_postprocessed = seg_postprogess_fn(np.copy(seg_old_size), *seg_postprocess_args)
else:
seg_old_size_postprocessed = seg_old_size
seg_resized_itk = sitk.GetImageFromArray(seg_old_size_postprocessed.astype(np.uint8))
seg_resized_itk.SetSpacing(properties_dict['itk_spacing'])
seg_resized_itk.SetOrigin(properties_dict['itk_origin'])
seg_resized_itk.SetDirection(properties_dict['itk_direction'])
sitk.WriteImage(seg_resized_itk, out_fname)
if (non_postprocessed_fname is not None) and (seg_postprogess_fn is not None):
seg_resized_itk = sitk.GetImageFromArray(seg_old_size.astype(np.uint8))
seg_resized_itk.SetSpacing(properties_dict['itk_spacing'])
seg_resized_itk.SetOrigin(properties_dict['itk_origin'])
seg_resized_itk.SetDirection(properties_dict['itk_direction'])
sitk.WriteImage(seg_resized_itk, non_postprocessed_fname)
def save_segmentation_nifti(segmentation, out_fname, dct, order=1, force_separate_z=None, order_z=0):
"""
faster and uses less ram than save_segmentation_nifti_from_softmax, but maybe less precise and also does not support
softmax export (which is needed for ensembling). So it's a niche function that may be useful in some cases.
:param segmentation:
:param out_fname:
:param dct:
:param order:
:param force_separate_z:
:return:
"""
# suppress output
print("force_separate_z:", force_separate_z, "interpolation order:", order)
sys.stdout = open(os.devnull, 'w')
if isinstance(segmentation, str):
assert isfile(segmentation), "If isinstance(segmentation_softmax, str) then " \
"isfile(segmentation_softmax) must be True"
del_file = deepcopy(segmentation)
segmentation = np.load(segmentation)
os.remove(del_file)
# first resample, then put result into bbox of cropping, then save
current_shape = segmentation.shape
shape_original_after_cropping = dct.get('size_after_cropping')
shape_original_before_cropping = dct.get('original_size_of_raw_data')
# current_spacing = dct.get('spacing_after_resampling')
# original_spacing = dct.get('original_spacing')
if np.any(np.array(current_shape) != np.array(shape_original_after_cropping)):
if order == 0:
seg_old_spacing = resize_segmentation(segmentation, shape_original_after_cropping, 0, 0)
else:
if force_separate_z is None:
if get_do_separate_z(dct.get('original_spacing')):
do_separate_z = True
lowres_axis = get_lowres_axis(dct.get('original_spacing'))
elif get_do_separate_z(dct.get('spacing_after_resampling')):
do_separate_z = True
lowres_axis = get_lowres_axis(dct.get('spacing_after_resampling'))
else:
do_separate_z = False
lowres_axis = None
else:
do_separate_z = force_separate_z
if do_separate_z:
lowres_axis = get_lowres_axis(dct.get('original_spacing'))
else:
lowres_axis = None
print("separate z:", do_separate_z, "lowres axis", lowres_axis)
seg_old_spacing = resample_data_or_seg(segmentation[None], shape_original_after_cropping, is_seg=True,
axis=lowres_axis, order=order, do_separate_z=do_separate_z, cval=0,
order_z=order_z)[0]
else:
seg_old_spacing = segmentation
bbox = dct.get('crop_bbox')
if bbox is not None:
seg_old_size = np.zeros(shape_original_before_cropping)
for c in range(3):
bbox[c][1] = np.min((bbox[c][0] + seg_old_spacing.shape[c], shape_original_before_cropping[c]))
seg_old_size[bbox[0][0]:bbox[0][1],
bbox[1][0]:bbox[1][1],
bbox[2][0]:bbox[2][1]] = seg_old_spacing
else:
seg_old_size = seg_old_spacing
seg_resized_itk = sitk.GetImageFromArray(seg_old_size.astype(np.uint8))
seg_resized_itk.SetSpacing(dct['itk_spacing'])
seg_resized_itk.SetOrigin(dct['itk_origin'])
seg_resized_itk.SetDirection(dct['itk_direction'])
sitk.WriteImage(seg_resized_itk, out_fname)
sys.stdout = sys.__stdout__
| [
"[email protected]"
] | |
755ce3602c7d4642c4b0aca6891d7446594eb0b1 | 48fff0f472066dc6e5b5a15d16dcc33738e7a2c2 | /train2/chatbot/broadcast.py | 027badb7eddef0c0ba8411820cb20092bd9088f5 | [] | no_license | hasadna/OpenTrainCommunity | 228a4f078829f6653e62db1294da01488be55b64 | 3c7a941b730160c40cc400ed94ed77ffa9189f0a | refs/heads/master | 2023-01-23T14:39:10.462114 | 2020-06-08T11:36:27 | 2020-06-08T11:36:27 | 19,729,986 | 23 | 16 | null | 2023-01-13T22:57:43 | 2014-05-13T07:34:15 | HTML | UTF-8 | Python | false | false | 1,036 | py | import logging
from django.conf import settings
import telegram
from django.template.loader import render_to_string
from . import models
logger = logging.getLogger(__name__)
def broadcast_new_report_to_telegram_channel(report: models.ChatReport):
message = render_to_string('chatbot/new_report_message.html', context={
'report': report,
})
_broadcast(message)
def broadcast_wrong_report_to_telegram_channel(report: models.ChatReport):
message = render_to_string('chatbot/wrong_report_message.html', context={
'report': report,
})
_broadcast(message)
def _broadcast(message: str):
channel = '@' + settings.TELEGRAM_CHANNEL
try:
bot = telegram.Bot(settings.TELEGRAM_TOKEN)
bot.send_message(
channel,
message,
parse_mode='html',
disable_web_page_preview=True)
logger.info("Broadcasting to channel %s:\n%s", channel, message)
except Exception:
logger.exception('Failed to broadcast to channel')
| [
"[email protected]"
] | |
8f51618dff92b6609f174e7d9e48161f653dd784 | fdba533d128e5fcc237abacda12de9545ddce39c | /keras/optimizer_experimental/optimizer_lib.py | d180179dde8284a872030aa0a8c1237796c3da4d | [
"Apache-2.0"
] | permissive | hhuang97/keras | 5949d754dcaed47df011fb4218d6552251e265e2 | f5fea878c271e38946c6681c1c2434e72d0ab977 | refs/heads/master | 2021-12-24T00:01:26.759181 | 2021-12-14T18:21:47 | 2021-12-14T18:22:26 | 90,206,289 | 0 | 1 | null | 2017-05-04T00:54:28 | 2017-05-04T00:54:28 | null | UTF-8 | Python | false | false | 3,775 | py | """Library of helper classes of optimizer."""
class GradientsClipOption:
"""Gradients clip option for optimizer class.
Attributes:
clipnorm: float. If set, the gradient of each weight is individually clipped
so that its norm is no higher than this value.
clipvalue: float. If set, the gradient of each weight is clipped to be no
higher than this value.
global_clipnorm: float. If set, the gradient of all weights is clipped so
that their global norm is no higher than this value.
"""
def __init__(self, clipnorm=None, clipvalue=None, global_clipnorm=None):
if clipnorm is not None and global_clipnorm is not None:
raise ValueError(f"At most one of `clipnorm` and `global_clipnorm` can "
f"be set. Received: clipnorm={clipnorm}, "
f"global_clipnorm={global_clipnorm}.")
if clipnorm and clipnorm <= 0:
raise ValueError("Clipnorm should be a positive number, but received "
f"clipnorm={clipnorm}.")
if global_clipnorm and global_clipnorm <= 0:
raise ValueError("global_clipnorm should be a positive number, but "
f"received global_clipnorm={global_clipnorm}.")
if clipvalue and clipvalue <= 0:
raise ValueError("clipvalue should be a positive number, but received "
f"clipvalue={clipvalue}.")
self.clipnorm = clipnorm
self.global_clipnorm = global_clipnorm
self.clipvalue = clipvalue
def get_config(self):
return {
"clipnorm": self.clipnorm,
"global_clipnorm": self.global_clipnorm,
"clipvalue": self.clipvalue,
}
class EMAOption:
# TODO(b/207532340): Add examples on how to use this EMAOption.
"""EMA option for optimizer class.
Attributes:
use_ema: boolean, default to False. If True, exponential moving average
(EMA) is applied. EMA consists of computing an exponential moving average
of the weights of the model (as the weight values change after each
training batch), and periodically overwriting the weights with their
moving average.
ema_momentum: float, default to 0.99. Only used if `use_ema=True`. This is
the momentum to use when computing the EMA of the model's weights:
`new_average = ema_momentum * old_average + (1 - ema_momentum) *
current_variable_value`.
ema_overwrite_frequency: int or None, default to 100. Only used if
`use_ema=True`. Every `ema_overwrite_frequency` steps of iterations, we
overwrite the model variable by its stored moving average. If None, we do
not overwrite model variables in the middle of training, and users need to
explicitly overwrite the model variable by calling
`finalize_variable_update()`.
"""
def __init__(self,
use_ema=False,
ema_momentum=0.99,
ema_overwrite_frequency=100):
self.use_ema = use_ema
if use_ema:
# Verify the arguments related to EMA.
if ema_momentum > 1 or ema_momentum < 0:
raise ValueError("`ema_momentum` must be in the range [0, 1]. "
f"Received: ema_momentum={ema_momentum}")
if ema_overwrite_frequency and (not isinstance(
ema_overwrite_frequency, int) or ema_overwrite_frequency < 1):
raise ValueError(
"`ema_overwrite_frequency` must be an integer > 1 or None. "
f"Received: ema_overwrite_frequency={ema_overwrite_frequency}")
self.ema_momentum = ema_momentum
self.ema_overwrite_frequency = ema_overwrite_frequency
def get_config(self):
return {
"use_ema": self.use_ema,
"ema_momentum": self.ema_momentum,
"ema_overwrite_frequency": self.ema_overwrite_frequency,
}
| [
"[email protected]"
] | |
595eacd1e61bf19c94b33e3e829d75688a0ceefb | ecb21f6dd5d49b44826974a1c06357a8405aa89e | /code/train/csnn_two_level_inhibition.py | e9cbb3ff928be0a7613608bfd7d116895e9fcef6 | [] | no_license | gumpfly/stdp-mnist | 9924279014e66cb26fab75f1136f38fd05d8fa3e | 03c32a47e9cd6fe8f902d134c2aa1d04bff037aa | refs/heads/master | 2021-08-15T11:30:13.544379 | 2017-11-17T18:59:49 | 2017-11-17T18:59:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 55,633 | py | '''
Convolutional spiking neural network training, testing, and evaluation script. Evaluation can be done outside of this script; however, it is most straightforward to call this
script with mode=train, then mode=test on HPC systems, where in the test mode, the network evaluation is written to disk.
'''
import warnings
warnings.filterwarnings('ignore')
import matplotlib.cm as cmap
import brian_no_units
import networkx as nx
import cPickle as p
import pandas as pd
import numpy as np
import brian as b
import argparse
import random
import timeit
import time
import math
import os
from mpl_toolkits.axes_grid1 import make_axes_locatable
from scipy.spatial.distance import euclidean
from sklearn.metrics import confusion_matrix
from struct import unpack
from brian import *
from util import *
np.set_printoptions(threshold=np.nan, linewidth=200)
# only show log messages of level ERROR or higher
b.log_level_error()
# set these appropriate to your directory structure
top_level_path = os.path.join('..', '..')
MNIST_data_path = os.path.join(top_level_path, 'data')
model_name = 'csnn_two_level_inhibition'
results_path = os.path.join(top_level_path, 'results', model_name)
performance_dir = os.path.join(top_level_path, 'performance', model_name)
activity_dir = os.path.join(top_level_path, 'activity', model_name)
deltas_dir = os.path.join(top_level_path, 'deltas', model_name)
spikes_dir = os.path.join(top_level_path, 'spikes', model_name)
weights_dir = os.path.join(top_level_path, 'weights', model_name)
best_weights_dir = os.path.join(weights_dir, 'best')
end_weights_dir = os.path.join(weights_dir, 'end')
assignments_dir = os.path.join(top_level_path, 'assignments', model_name)
best_assignments_dir = os.path.join(assignments_dir, 'best')
end_assignments_dir = os.path.join(assignments_dir, 'end')
misc_dir = os.path.join(top_level_path, 'misc', model_name)
best_misc_dir = os.path.join(misc_dir, 'best')
end_misc_dir = os.path.join(misc_dir, 'end')
for d in [ performance_dir, activity_dir, weights_dir, deltas_dir, misc_dir, best_misc_dir,
assignments_dir, best_assignments_dir, MNIST_data_path, results_path,
best_weights_dir, end_weights_dir, end_misc_dir, end_assignments_dir, spikes_dir ]:
if not os.path.isdir(d):
os.makedirs(d)
def normalize_weights():
'''
Squash the input to excitatory synaptic weights to sum to a prespecified number.
'''
for conn_name in input_connections:
connection = input_connections[conn_name][:].todense()
for feature in xrange(conv_features):
feature_connection = connection[:, feature * n_e : (feature + 1) * n_e]
column_sums = np.sum(np.asarray(feature_connection), axis=0)
column_factors = weight['ee_input'] / column_sums
for n in xrange(n_e):
dense_weights = input_connections[conn_name][:, feature * n_e + n].todense()
dense_weights[convolution_locations[n]] *= column_factors[n]
input_connections[conn_name][:, feature * n_e + n] = dense_weights
def plot_input(rates):
'''
Plot the current input example during the training procedure.
'''
fig = plt.figure(fig_num, figsize = (5, 5))
im = plt.imshow(rates.reshape((28, 28)), interpolation='nearest', vmin=0, vmax=64, cmap='binary')
plt.colorbar(im)
plt.title('Current input example')
fig.canvas.draw()
return im, fig
def update_input(rates, im, fig):
'''
Update the input image to use for input plotting.
'''
im.set_array(rates.reshape((28, 28)))
fig.canvas.draw()
return im
def update_assignments_plot(assignments, im):
im.set_array(assignments.reshape((int(np.sqrt(n_e_total)), int(np.sqrt(n_e_total)))).T)
return im
def get_2d_input_weights():
'''
Get the weights from the input to excitatory layer and reshape it to be two
dimensional and square.
'''
# specify the desired shape of the reshaped input -> excitatory weights
rearranged_weights = np.zeros((conv_features * conv_size, conv_size * n_e))
# get the input -> excitatory synaptic weights
connection = input_connections['XeAe'][:]
for n in xrange(n_e):
for feature in xrange(conv_features):
temp = connection[:, feature * n_e + (n // n_e_sqrt) * n_e_sqrt + (n % n_e_sqrt)].todense()
rearranged_weights[ feature * conv_size : (feature + 1) * conv_size, n * conv_size : (n + 1) * conv_size ] = \
temp[convolution_locations[n]].reshape((conv_size, conv_size))
if n_e == 1:
ceil_sqrt = int(math.ceil(math.sqrt(conv_features)))
square_weights = np.zeros((28 * ceil_sqrt, 28 * ceil_sqrt))
for n in xrange(conv_features):
square_weights[(n // ceil_sqrt) * 28 : ((n // ceil_sqrt) + 1) * 28,
(n % ceil_sqrt) * 28 : ((n % ceil_sqrt) + 1) * 28] = rearranged_weights[n * 28 : (n + 1) * 28, :]
return square_weights.T
else:
square_weights = np.zeros((conv_size * features_sqrt * n_e_sqrt, conv_size * features_sqrt * n_e_sqrt))
for n_1 in xrange(n_e_sqrt):
for n_2 in xrange(n_e_sqrt):
for f_1 in xrange(features_sqrt):
for f_2 in xrange(features_sqrt):
square_weights[conv_size * (n_2 * features_sqrt + f_2) : conv_size * (n_2 * features_sqrt + f_2 + 1), \
conv_size * (n_1 * features_sqrt + f_1) : conv_size * (n_1 * features_sqrt + f_1 + 1)] = \
rearranged_weights[(f_1 * features_sqrt + f_2) * conv_size : (f_1 * features_sqrt + f_2 + 1) * conv_size, \
(n_1 * n_e_sqrt + n_2) * conv_size : (n_1 * n_e_sqrt + n_2 + 1) * conv_size]
return square_weights.T
def get_input_weights(weight_matrix):
'''
Get the weights from the input to excitatory layer and reshape it to be two
dimensional and square.
'''
weights = []
# for each convolution feature
for feature in xrange(conv_features):
# for each excitatory neuron in this convolution feature
for n in xrange(n_e):
temp = weight_matrix[:, feature * n_e + (n // n_e_sqrt) * n_e_sqrt + (n % n_e_sqrt)]
weights.append(np.ravel(temp[convolution_locations[n]]))
# return the rearranged weights to display to the user
return weights
def plot_weights_and_assignments(assignments):
'''
Plot the weights from input to excitatory layer to view during training.
'''
weights = get_2d_input_weights()
fig = plt.figure(fig_num, figsize=(18, 9))
ax1 = plt.subplot(121)
image1 = ax1.imshow(weights, interpolation='nearest', vmin=0, vmax=wmax_ee, cmap=cmap.get_cmap('hot_r'))
ax1.set_title(ending.replace('_', ' '))
ax2 = plt.subplot(122)
color = plt.get_cmap('RdBu', 11)
reshaped_assignments = assignments.reshape((int(np.sqrt(n_e_total)), int(np.sqrt(n_e_total)))).T
image2 = ax2.matshow(reshaped_assignments, cmap=color, vmin=-1.5, vmax=9.5)
ax2.set_title('Neuron labels')
divider1 = make_axes_locatable(ax1)
divider2 = make_axes_locatable(ax2)
cax1 = divider1.append_axes("right", size="5%", pad=0.05)
cax2 = divider2.append_axes("right", size="5%", pad=0.05)
plt.colorbar(image1, cax=cax1)
plt.colorbar(image2, cax=cax2, ticks=np.arange(-1, 10))
if n_e != 1:
ax1.set_xticks(xrange(conv_size, conv_size * n_e_sqrt * features_sqrt + 1, conv_size), xrange(1, conv_size * n_e_sqrt * features_sqrt + 1))
ax1.set_yticks(xrange(conv_size, conv_size * n_e_sqrt * features_sqrt + 1, conv_size), xrange(1, conv_size * n_e_sqrt * features_sqrt + 1))
for pos in xrange(conv_size * features_sqrt, conv_size * features_sqrt * n_e_sqrt, conv_size * features_sqrt):
ax1.axhline(pos)
ax1.axvline(pos)
else:
ax1.set_xticks(xrange(conv_size, conv_size * (int(np.sqrt(conv_features)) + 1), conv_size), xrange(1, int(np.sqrt(conv_features)) + 1))
ax1.set_yticks(xrange(conv_size, conv_size * (int(np.sqrt(conv_features)) + 1), conv_size), xrange(1, int(np.sqrt(conv_features)) + 1))
plt.tight_layout()
fig.canvas.draw()
return fig, ax1, ax2, image1, image2
def update_weights_and_assignments(fig, ax1, ax2, im1, im2, assignments, spike_counts):
'''
Update the plot of the weights from input to excitatory layer to view during training.
'''
weights = get_2d_input_weights()
im1.set_array(weights)
reshaped_assignments = assignments.reshape((int(np.sqrt(n_e_total)), int(np.sqrt(n_e_total)))).T
im2.set_array(reshaped_assignments)
for txt in ax2.texts:
txt.set_visible(False)
spike_counts_reshaped = spike_counts.reshape([features_sqrt, features_sqrt])
for x in xrange(features_sqrt):
for y in xrange(features_sqrt):
c = spike_counts_reshaped[x, y]
if c > 0:
ax2.text(x, y, str(c), va='center', ha='center', weight='heavy', fontsize=16)
else:
ax2.text(x, y, '', va='center', ha='center')
fig.canvas.draw()
def get_current_performance(performances, current_example_num):
'''
Evaluate the performance of the network on the past 'update_interval' training
examples.
'''
global input_numbers
current_evaluation = int(current_example_num / update_interval)
if current_example_num == num_examples -1:
current_evaluation+=1
start_num = current_example_num - update_interval
end_num = current_example_num
wrong_idxs = {}
wrong_labels = {}
for scheme in performances.keys():
difference = output_numbers[scheme][start_num : end_num, 0] - input_numbers[start_num : end_num]
correct = len(np.where(difference == 0)[0])
wrong_idxs[scheme] = np.where(difference != 0)[0]
wrong_labels[scheme] = output_numbers[scheme][start_num : end_num, 0][np.where(difference != 0)[0]]
performances[scheme][current_evaluation] = correct / float(update_interval) * 100
return performances, wrong_idxs, wrong_labels
def plot_performance(fig_num, performances, num_evaluations):
'''
Set up the performance plot for the beginning of the simulation.
'''
time_steps = range(0, num_evaluations)
fig = plt.figure(fig_num, figsize = (12, 4))
fig_num += 1
for performance in performances:
plt.plot(time_steps[:np.size(np.nonzero(performances[performance]))], \
np.extract(np.nonzero(performances[performance]), performances[performance]), label=performance)
lines = plt.gca().lines
plt.ylim(ymax=100)
plt.xticks(xrange(0, num_evaluations + 10, 10), xrange(0, ((num_evaluations + 10) * update_interval), 10))
plt.legend()
plt.grid(True)
plt.title('Classification performance per update interval')
fig.canvas.draw()
return lines, fig_num, fig
def update_performance_plot(lines, performances, current_example_num, fig):
'''
Update the plot of the performance based on results thus far.
'''
performances, wrong_idxs, wrong_labels = get_current_performance(performances, current_example_num)
for line, performance in zip(lines, performances):
line.set_xdata(range((current_example_num / update_interval) + 1))
line.set_ydata(performances[performance][:(current_example_num / update_interval) + 1])
fig.canvas.draw()
return lines, performances, wrong_idxs, wrong_labels
def plot_deltas(fig_num, deltas, num_weight_updates):
'''
Set up the performance plot for the beginning of the simulation.
'''
time_steps = range(0, num_weight_updates)
fig = plt.figure(fig_num, figsize = (12, 4))
fig_num += 1
plt.plot([], [], label='Absolute difference in weights')
lines = plt.gca().lines
plt.ylim(ymin=0, ymax=conv_size*n_e_total)
plt.xticks(xrange(0, num_weight_updates + weight_update_interval, 100), \
xrange(0, ((num_weight_updates + weight_update_interval) * weight_update_interval), 100))
plt.legend()
plt.grid(True)
plt.title('Absolute difference in weights per weight update interval')
fig.canvas.draw()
return lines[0], fig_num, fig
def update_deltas_plot(line, deltas, current_example_num, fig):
'''
Update the plot of the performance based on results thus far.
'''
delta = deltas[int(current_example_num / weight_update_interval)]
line.set_xdata(range(int(current_example_num / weight_update_interval) + 1))
ydata = list(line.get_ydata())
ydata.append(delta)
line.set_ydata(ydata)
fig.canvas.draw()
return line, deltas
def predict_label(assignments, spike_rates, accumulated_rates, spike_proportions):
'''
Given the label assignments of the excitatory layer and their spike rates over
the past 'update_interval', get the ranking of each of the categories of input.
'''
output_numbers = {}
for scheme in voting_schemes:
summed_rates = [0] * 10
num_assignments = [0] * 10
if scheme == 'all':
for i in xrange(10):
num_assignments[i] = len(np.where(assignments == i)[0])
if num_assignments[i] > 0:
summed_rates[i] = np.sum(spike_rates[assignments == i]) / num_assignments[i]
elif scheme == 'all_active':
for i in xrange(10):
num_assignments[i] = len(np.where(assignments == i)[0])
if num_assignments[i] > 0:
summed_rates[i] = np.sum(np.nonzero(spike_rates[assignments == i])) / num_assignments[i]
elif scheme == 'activity_neighborhood':
for i in xrange(10):
num_assignments[i] = len(np.where(assignments == i)[0])
if num_assignments[i] > 0:
for idx in np.where(assignments == i)[0]:
if np.sum(spike_rates[idx]) != 0:
neighbors = [idx]
neighbors.extend(get_neighbors(idx, features_sqrt))
summed_rates[i] += np.sum(spike_rates[neighbors]) / \
np.size(np.nonzero(spike_rates[neighbors].ravel()))
elif scheme == 'most_spiked_neighborhood':
neighborhood_activity = np.zeros([conv_features, n_e])
most_spiked_array = np.array(np.zeros((conv_features, n_e)), dtype=bool)
for i in xrange(10):
num_assignments[i] = len(np.where(assignments == i)[0])
if num_assignments[i] > 0:
for idx in np.where(assignments == i)[0]:
if np.sum(spike_rates[idx]) != 0:
neighbors = [idx]
neighbors.extend(get_neighbors(idx, features_sqrt))
if np.size(np.nonzero(spike_rates[neighbors])) > 0:
neighborhood_activity[idx] = np.sum(spike_rates[neighbors]) / \
np.size(np.nonzero(spike_rates[neighbors].ravel()))
for n in xrange(n_e):
# find the excitatory neuron which spiked the most in this input location
most_spiked_array[np.argmax(neighborhood_activity[:, n : n + 1]), n] = True
# for each label
for i in xrange(10):
# get the number of label assignments of this type
num_assignments[i] = len(np.where(assignments[most_spiked_array] == i)[0])
if len(spike_rates[np.where(assignments[most_spiked_array] == i)]) > 0:
# sum the spike rates of all excitatory neurons with this label, which fired the most in its patch
summed_rates[i] = np.sum(spike_rates[np.where(np.logical_and(assignments == i,
most_spiked_array))]) / float(np.sum(spike_rates[most_spiked_array]))
elif scheme == 'most_spiked_patch':
most_spiked_array = np.array(np.zeros((conv_features, n_e)), dtype=bool)
for feature in xrange(conv_features):
# count up the spikes for the neurons in this convolution patch
column_sums = np.sum(spike_rates[feature : feature + 1, :], axis=0)
# find the excitatory neuron which spiked the most
most_spiked_array[feature, np.argmax(column_sums)] = True
# for each label
for i in xrange(10):
# get the number of label assignments of this type
num_assignments[i] = len(np.where(assignments[most_spiked_array] == i)[0])
if len(spike_rates[np.where(assignments[most_spiked_array] == i)]) > 0:
# sum the spike rates of all excitatory neurons with this label, which fired the most in its patch
summed_rates[i] = np.sum(spike_rates[np.where(np.logical_and(assignments == i,
most_spiked_array))]) / float(np.sum(spike_rates[most_spiked_array]))
elif scheme == 'most_spiked_location':
most_spiked_array = np.array(np.zeros((conv_features, n_e)), dtype=bool)
for n in xrange(n_e):
# find the excitatory neuron which spiked the most in this input location
most_spiked_array[np.argmax(spike_rates[:, n : n + 1]), n] = True
# for each label
for i in xrange(10):
# get the number of label assignments of this type
num_assignments[i] = len(np.where(assignments[most_spiked_array] == i)[0])
if len(spike_rates[np.where(assignments[most_spiked_array] == i)]) > 0:
# sum the spike rates of all excitatory neurons with this label, which fired the most in its patch
summed_rates[i] = np.sum(spike_rates[np.where(np.logical_and(assignments == i,
most_spiked_array))]) / float(np.sum(spike_rates[most_spiked_array]))
elif scheme == 'confidence_weighting':
for i in xrange(10):
num_assignments[i] = np.count_nonzero(assignments == i)
if num_assignments[i] > 0:
summed_rates[i] = np.sum(spike_rates[assignments == i] * spike_proportions[(assignments == i).ravel(), i]) / num_assignments[i]
output_numbers[scheme] = np.argsort(summed_rates)[::-1]
return output_numbers
def assign_labels(result_monitor, input_numbers, accumulated_rates, accumulated_inputs):
'''
Based on the results from the previous 'update_interval', assign labels to the
excitatory neurons.
'''
for j in xrange(10):
num_assignments = len(np.where(input_numbers == j)[0])
if num_assignments > 0:
accumulated_inputs[j] += num_assignments
accumulated_rates[:, j] = accumulated_rates[:, j] * 0.9 + \
np.ravel(np.sum(result_monitor[input_numbers == j], axis=0) / num_assignments)
assignments = np.argmax(accumulated_rates, axis=1).reshape((conv_features, n_e))
spike_proportions = np.divide(accumulated_rates, np.sum(accumulated_rates, axis=0))
return assignments, accumulated_rates, spike_proportions
def build_network():
global fig_num, assignments
neuron_groups['e'] = b.NeuronGroup(n_e_total, neuron_eqs_e, threshold=v_thresh_e, refractory=refrac_e, reset=scr_e, compile=True, freeze=True)
neuron_groups['i'] = b.NeuronGroup(n_e_total, neuron_eqs_i, threshold=v_thresh_i, refractory=refrac_i, reset=v_reset_i, compile=True, freeze=True)
for name in population_names:
print '...Creating neuron group:', name
# get a subgroup of size 'n_e' from all exc
neuron_groups[name + 'e'] = neuron_groups['e'].subgroup(conv_features * n_e)
# get a subgroup of size 'n_i' from the inhibitory layer
neuron_groups[name + 'i'] = neuron_groups['i'].subgroup(conv_features * n_e)
# start the membrane potentials of these groups 40mV below their resting potentials
neuron_groups[name + 'e'].v = v_rest_e - 40. * b.mV
neuron_groups[name + 'i'].v = v_rest_i - 40. * b.mV
print '...Creating recurrent connections'
for name in population_names:
# if we're in test mode / using some stored weights
if test_mode:
# load up adaptive threshold parameters
if save_best_model:
neuron_groups['e'].theta = np.load(os.path.join(best_weights_dir, '_'.join(['theta_A', ending +'_best.npy'])))
else:
neuron_groups['e'].theta = np.load(os.path.join(end_weights_dir, '_'.join(['theta_A', ending +'_end.npy'])))
else:
# otherwise, set the adaptive additive threshold parameter at 20mV
neuron_groups['e'].theta = np.ones((n_e_total)) * 20.0 * b.mV
for conn_type in recurrent_conn_names:
if conn_type == 'ei':
# create connection name (composed of population and connection types)
conn_name = name + conn_type[0] + name + conn_type[1]
# create a connection from the first group in conn_name with the second group
connections[conn_name] = b.Connection(neuron_groups[conn_name[0:2]], neuron_groups[conn_name[2:4]], structure='sparse', state='g' + conn_type[0])
# instantiate the created connection
for feature in xrange(conv_features):
for n in xrange(n_e):
connections[conn_name][feature * n_e + n, feature * n_e + n] = 10.4
elif conn_type == 'ie' and not (test_no_inhibition and test_mode):
# create connection name (composed of population and connection types)
conn_name = name + conn_type[0] + name + conn_type[1]
# get weight matrix depending on training or test phase
if test_mode:
if save_best_model and not test_max_inhibition:
weight_matrix = np.load(os.path.join(best_weights_dir, '_'.join([conn_name, ending + '_best.npy'])))
elif test_max_inhibition:
weight_matrix = max_inhib * np.ones((n_e_total, n_e_total))
else:
weight_matrix = np.load(os.path.join(end_weights_dir, '_'.join([conn_name, ending + '_end.npy'])))
# create a connection from the first group in conn_name with the second group
connections[conn_name] = b.Connection(neuron_groups[conn_name[0:2]], neuron_groups[conn_name[2:4]], structure='sparse', state='g' + conn_type[0])
# define the actual synaptic connections and strengths
for feature in xrange(conv_features):
for other_feature in xrange(conv_features):
if feature != other_feature:
if n_e == 1:
x, y = feature // np.sqrt(n_e_total), feature % np.sqrt(n_e_total)
x_, y_ = other_feature // np.sqrt(n_e_total), other_feature % np.sqrt(n_e_total)
else:
x, y = feature // np.sqrt(conv_features), feature % np.sqrt(conv_features)
x_, y_ = other_feature // np.sqrt(conv_features), other_feature % np.sqrt(conv_features)
for n in xrange(n_e):
if test_mode:
connections[conn_name][feature * n_e + n, other_feature * n_e + n] = \
weight_matrix[feature * n_e + n, other_feature * n_e + n]
else:
if inhib_scheme == 'increasing':
connections[conn_name][feature * n_e + n, other_feature * n_e + n] = \
min(max_inhib, start_inhib * \
np.sqrt(euclidean([x, y], [x_, y_])))
elif inhib_scheme == 'eth':
connections[conn_name][feature * n_e + n, \
other_feature * n_e + n] = max_inhib
elif inhib_scheme == 'mhat':
connections[conn_name][feature * n_e + n, \
other_feature * n_e + n] = \
min(max_inhib, start_inhib * \
mhat(np.sqrt(euclidean([x, y], [x_, y_])), \
sigma=1.0, scale=1.0, shift=0.0))
print '...Creating monitors for:', name
# spike rate monitors for excitatory and inhibitory neuron populations
rate_monitors[name + 'e'] = b.PopulationRateMonitor(neuron_groups[name + 'e'], bin=(single_example_time + resting_time) / b.second)
rate_monitors[name + 'i'] = b.PopulationRateMonitor(neuron_groups[name + 'i'], bin=(single_example_time + resting_time) / b.second)
spike_counters[name + 'e'] = b.SpikeCounter(neuron_groups[name + 'e'])
# record neuron population spikes if specified
if record_spikes and do_plot:
spike_monitors[name + 'e'] = b.SpikeMonitor(neuron_groups[name + 'e'])
spike_monitors[name + 'i'] = b.SpikeMonitor(neuron_groups[name + 'i'])
if record_spikes and do_plot:
b.figure(fig_num, figsize=(8, 6))
fig_num += 1
b.ion()
b.subplot(211)
b.raster_plot(spike_monitors['Ae'], refresh=1000 * b.ms, showlast=1000 * b.ms, title='Excitatory spikes per neuron')
b.subplot(212)
b.raster_plot(spike_monitors['Ai'], refresh=1000 * b.ms, showlast=1000 * b.ms, title='Inhibitory spikes per neuron')
b.tight_layout()
# creating Poission spike train from input image (784 vector, 28x28 image)
for name in input_population_names:
input_groups[name + 'e'] = b.PoissonGroup(n_input, 0)
rate_monitors[name + 'e'] = b.PopulationRateMonitor(input_groups[name + 'e'], bin=(single_example_time + resting_time) / b.second)
# creating connections from input Poisson spike train to excitatory neuron population(s)
for name in input_connection_names:
print '\n...Creating connections between', name[0], 'and', name[1]
# for each of the input connection types (in this case, excitatory -> excitatory)
for conn_type in input_conn_names:
# saved connection name
conn_name = name[0] + conn_type[0] + name[1] + conn_type[1]
# get weight matrix depending on training or test phase
if test_mode:
if save_best_model:
weight_matrix = np.load(os.path.join(best_weights_dir, '_'.join([conn_name, ending + '_best.npy'])))
else:
weight_matrix = np.load(os.path.join(end_weights_dir, '_'.join([conn_name, ending + '_end.npy'])))
# create connections from the windows of the input group to the neuron population
input_connections[conn_name] = b.Connection(input_groups['Xe'], neuron_groups[name[1] + conn_type[1]], \
structure='sparse', state='g' + conn_type[0], delay=True, max_delay=delay[conn_type][1])
if test_mode:
for feature in xrange(conv_features):
for n in xrange(n_e):
for idx in xrange(conv_size ** 2):
input_connections[conn_name][convolution_locations[n][idx], feature * n_e + n] = \
weight_matrix[convolution_locations[n][idx], feature * n_e + n]
else:
for feature in xrange(conv_features):
for n in xrange(n_e):
for idx in xrange(conv_size ** 2):
input_connections[conn_name][convolution_locations[n][idx], feature * n_e + n] = (b.random() + 0.01) * 0.3
if test_mode:
if do_plot:
plot_weights_and_assignments(assignments)
fig_num += 1
# if excitatory -> excitatory STDP is specified, add it here (input to excitatory populations)
if not test_mode:
print '...Creating STDP for connection', name
# STDP connection name
conn_name = name[0] + conn_type[0] + name[1] + conn_type[1]
# create the STDP object
stdp_methods[conn_name] = b.STDP(input_connections[conn_name], eqs=eqs_stdp_ee, \
pre=eqs_stdp_pre_ee, post=eqs_stdp_post_ee, wmin=0., wmax=wmax_ee)
print '\n'
def run_train():
global fig_num, input_intensity, previous_spike_count, rates, assignments, clusters, cluster_assignments, \
simple_clusters, simple_cluster_assignments, index_matrix, accumulated_rates, \
accumulated_inputs, spike_proportions
if do_plot:
input_image_monitor, input_image = plot_input(rates)
fig_num += 1
weights_assignments_figure, weights_axes, assignments_axes, weights_image, \
assignments_image = plot_weights_and_assignments(assignments)
fig_num += 1
# set up performance recording and plotting
num_evaluations = int(num_examples / update_interval) + 1
performances = { voting_scheme : np.zeros(num_evaluations) for voting_scheme in voting_schemes }
num_weight_updates = int(num_examples / weight_update_interval)
all_deltas = np.zeros((num_weight_updates, n_e_total))
deltas = np.zeros(num_weight_updates)
if do_plot:
performance_monitor, fig_num, fig_performance = plot_performance(fig_num, performances, num_evaluations)
line, fig_num, deltas_figure = plot_deltas(fig_num, deltas, num_weight_updates)
if plot_all_deltas:
lines, fig_num, all_deltas_figure = plot_all_deltas(fig_num, all_deltas, num_weight_updates)
else:
performances, wrong_idxs, wrong_labels = get_current_performance(performances, 0)
# initialize network
j = 0
num_retries = 0
b.run(0)
if save_best_model:
best_performance = 0.0
# start recording time
start_time = timeit.default_timer()
last_weights = input_connections['XeAe'][:].todense()
current_inhib = start_inhib
while j < num_examples:
# get the firing rates of the next input example
rates = (data['x'][j % data_size, :, :] / 8.0) * input_intensity * \
((noise_const * np.random.randn(n_input_sqrt, n_input_sqrt)) + 1.0)
# sets the input firing rates
input_groups['Xe'].rate = rates.reshape(n_input)
# plot the input at this step
if do_plot:
input_image_monitor = update_input(rates, input_image_monitor, input_image)
# run the network for a single example time
b.run(single_example_time)
# add Gaussian noise to weights after each iteration
if weights_noise:
input_connections['XeAe'].W.alldata[:] *= 1 + (np.random.randn(n_input * conv_features) * weights_noise_constant)
# get new neuron label assignments every 'update_interval'
if j % update_interval == 0 and j > 0:
assignments, accumulated_rates, spike_proportions = assign_labels(result_monitor, input_numbers[j - update_interval : j], accumulated_rates, accumulated_inputs)
# get count of spikes over the past iteration
current_spike_count = np.copy(spike_counters['Ae'].count[:]).reshape((conv_features, n_e)) - previous_spike_count
previous_spike_count = np.copy(spike_counters['Ae'].count[:]).reshape((conv_features, n_e))
# make sure synapse weights don't grow too large
normalize_weights()
if not j % weight_update_interval == 0 and save_weights:
save_connections(weights_dir, connections, input_connections, ending, j)
save_theta(weights_dir, population_names, neuron_groups, ending, j)
np.save(os.path.join(assignments_dir, '_'.join(['assignments', ending, str(j)])), assignments)
np.save(os.path.join(misc_dir, '_'.join(['accumulated_rates', ending, str(j)])), accumulated_rates)
np.save(os.path.join(misc_dir, '_'.join(['spike_proportions', ending, str(j)])), spike_proportions)
if j % weight_update_interval == 0:
deltas[j / weight_update_interval] = np.sum(np.abs((input_connections['XeAe'][:].todense() - last_weights)))
if plot_all_deltas:
all_deltas[j / weight_update_interval, :] = np.ravel(input_connections['XeAe'][:].todense() - last_weights)
last_weights = input_connections['XeAe'][:].todense()
# pickling performance recording and iteration number
p.dump((j, deltas), open(os.path.join(deltas_dir, ending + '.p'), 'wb'))
# update weights every 'weight_update_interval'
if j % weight_update_interval == 0 and do_plot:
update_weights_and_assignments(weights_assignments_figure, weights_axes, assignments_axes, \
weights_image, assignments_image, assignments, current_spike_count)
# if the neurons in the network didn't spike more than four times
if np.sum(current_spike_count) < 5 and num_retries < 3:
# increase the intensity of input
input_intensity += 2
num_retries += 1
# set all network firing rates to zero
for name in input_population_names:
input_groups[name + 'e'].rate = 0
# let the network relax back to equilibrium
if not reset_state_vars:
b.run(resting_time)
else:
for neuron_group in neuron_groups:
neuron_groups[neuron_group].v = v_reset_e
neuron_groups[neuron_group].ge = 0
neuron_groups[neuron_group].gi = 0
# otherwise, record results and continue simulation
else:
num_retries = 0
if j == increase_iter:
for feature in xrange(conv_features):
for other_feature in xrange(conv_features):
if feature != other_feature:
for n in xrange(n_e):
connections['AiAe'][feature * n_e + n, \
other_feature * n_e + n] = max_inhib
# record the current number of spikes
result_monitor[j % update_interval, :] = current_spike_count
# get true label of last input example
input_numbers[j] = data['y'][j % data_size][0]
activity = result_monitor[j % update_interval, :] / np.sum(result_monitor[j % update_interval, :])
if do_plot and save_spikes:
fig = plt.figure(9, figsize = (8, 8))
plt.imshow(rates.reshape((28, 28)), interpolation='nearest', vmin=0, vmax=64, cmap='binary')
plt.title(str(data['y'][j % data_size][0]) + ' : ' + ', '.join( \
[str(int(output_numbers[scheme][j, 0])) for scheme in voting_schemes]))
fig = plt.figure(10, figsize = (7, 7))
plt.xticks(xrange(features_sqrt))
plt.yticks(xrange(features_sqrt))
plt.title('Activity heatmap (total spikes = ' + str(np.sum(result_monitor[j % update_interval, :])) + ')')
plt.imshow(activity.reshape((features_sqrt, features_sqrt)).T, interpolation='nearest', cmap='binary')
plt.grid(True)
fig.canvas.draw()
if save_spikes:
np.save(os.path.join(spikes_dir, '_'.join([ending, 'spike_counts', str(j)])), current_spike_count)
np.save(os.path.join(spikes_dir, '_'.join([ending, 'rates', str(j)])), rates)
# get network filter weights
filters = input_connections['XeAe'][:].todense()
# get the output classifications of the network
for scheme, outputs in predict_label(assignments, result_monitor[j % update_interval, :], \
accumulated_rates, spike_proportions).items():
if scheme != 'distance':
output_numbers[scheme][j, :] = outputs
elif scheme == 'distance':
current_input = (rates * (weight['ee_input'] / np.sum(rates))).ravel()
output_numbers[scheme][j, 0] = assignments[np.argmin([ euclidean(current_input, \
filters[:, i]) for i in xrange(conv_features) ])]
# print progress
if j % print_progress_interval == 0 and j > 0:
print 'runs done:', j, 'of', int(num_examples), '(time taken for past', print_progress_interval, 'runs:', str(timeit.default_timer() - start_time) + ')'
start_time = timeit.default_timer()
if j % weight_update_interval == 0 and do_plot:
update_deltas_plot(line, deltas, j, deltas_figure)
if plot_all_deltas:
update_all_deltas_plot(lines, all_deltas, j, all_deltas_figure)
# plot performance if appropriate
if (j % update_interval == 0 or j == num_examples - 1) and j > 0:
if do_plot:
# updating the performance plot
perf_plot, performances, wrong_idxs, wrong_labels = update_performance_plot(performance_monitor, performances, j, fig_performance)
else:
performances, wrong_idxs, wrong_labels = get_current_performance(performances, j)
# pickling performance recording and iteration number
p.dump((j, performances), open(os.path.join(performance_dir, ending + '.p'), 'wb'))
# Save the best model's weights and theta parameters (if so specified)
if save_best_model:
for performance in performances:
if performances[performance][int(j / float(update_interval))] > best_performance:
print '\n', 'Best model thus far! Voting scheme:', performance, '\n'
best_performance = performances[performance][int(j / float(update_interval))]
save_connections(best_weights_dir, connections, input_connections, ending, 'best')
save_theta(best_weights_dir, population_names, neuron_groups, ending, 'best')
np.save(os.path.join(best_assignments_dir, '_'.join(['assignments', ending, 'best'])), assignments)
np.save(os.path.join(best_misc_dir, '_'.join(['accumulated_rates', ending, 'best'])), accumulated_rates)
np.save(os.path.join(best_misc_dir, '_'.join(['spike_proportions', ending, 'best'])), spike_proportions)
# Print out performance progress intermittently
for performance in performances:
print '\nClassification performance (' + performance + ')', performances[performance][1:int(j / float(update_interval)) + 1], \
'\nAverage performance:', sum(performances[performance][1:int(j / float(update_interval)) + 1]) / \
float(len(performances[performance][1:int(j / float(update_interval)) + 1])), \
'\nBest performance:', max(performances[performance][1:int(j / float(update_interval)) + 1]), '\n'
# set input firing rates back to zero
for name in input_population_names:
input_groups[name + 'e'].rate = 0
# run the network for 'resting_time' to relax back to rest potentials
if not reset_state_vars:
b.run(resting_time)
else:
for neuron_group in neuron_groups:
neuron_groups[neuron_group].v = v_reset_e
neuron_groups[neuron_group].ge = 0
neuron_groups[neuron_group].gi = 0
# bookkeeping
input_intensity = start_input_intensity
j += 1
# ensure weights don't grow without bound
normalize_weights()
print '\n'
def run_test():
global fig_num, input_intensity, previous_spike_count, rates, assignments, clusters, cluster_assignments, \
simple_clusters, simple_cluster_assignments, index_matrix, accumulated_rates, \
accumulated_inputs, spike_proportions
# set up performance recording and plotting
num_evaluations = int(num_examples / update_interval) + 1
performances = { voting_scheme : np.zeros(num_evaluations) for voting_scheme in voting_schemes }
num_weight_updates = int(num_examples / weight_update_interval)
all_deltas = np.zeros((num_weight_updates, (conv_size ** 2) * n_e_total))
deltas = np.zeros(num_weight_updates)
# initialize network
j = 0
num_retries = 0
b.run(0)
# get network filter weights
filters = input_connections['XeAe'][:].todense()
# start recording time
start_time = timeit.default_timer()
while j < num_examples:
# get the firing rates of the next input example
rates = (data['x'][j % data_size, :, :] / 8.0) * input_intensity
# sets the input firing rates
input_groups['Xe'].rate = rates.reshape(n_input)
# run the network for a single example time
b.run(single_example_time)
# get count of spikes over the past iteration
current_spike_count = np.copy(spike_counters['Ae'].count[:]).reshape((conv_features, n_e)) - previous_spike_count
previous_spike_count = np.copy(spike_counters['Ae'].count[:]).reshape((conv_features, n_e))
# if the neurons in the network didn't spike more than four times
if np.sum(current_spike_count) < 5 and num_retries < 3:
# increase the intensity of input
input_intensity += 2
num_retries += 1
# set all network firing rates to zero
for name in input_population_names:
input_groups[name + 'e'].rate = 0
# let the network relax back to equilibrium
if not reset_state_vars:
b.run(resting_time)
else:
for neuron_group in neuron_groups:
neuron_groups[neuron_group].v = v_reset_e
neuron_groups[neuron_group].ge = 0
neuron_groups[neuron_group].gi = 0
# otherwise, record results and continue simulation
else:
num_retries = 0
# record the current number of spikes
result_monitor[j % update_interval, :] = current_spike_count
# get true label of the past input example
input_numbers[j] = data['y'][j % data_size][0]
# get the output classifications of the network
for scheme, outputs in predict_label(assignments, result_monitor[j % update_interval, :], accumulated_rates, spike_proportions).items():
if scheme != 'distance':
output_numbers[scheme][j, :] = outputs
elif scheme == 'distance':
current_input = (rates * (weight['ee_input'] / np.sum(rates))).ravel()
output_numbers[scheme][j, 0] = assignments[np.argmin([ euclidean(current_input, \
filters[:, i]) for i in xrange(conv_features) ])]
# print progress
if j % print_progress_interval == 0 and j > 0:
print 'runs done:', j, 'of', int(num_examples), '(time taken for past', print_progress_interval, 'runs:', str(timeit.default_timer() - start_time) + ')'
start_time = timeit.default_timer()
# set input firing rates back to zero
for name in input_population_names:
input_groups[name + 'e'].rate = 0
# run the network for 'resting_time' to relax back to rest potentials
if not reset_state_vars:
b.run(resting_time)
else:
for neuron_group in neuron_groups:
neuron_groups[neuron_group].v = v_reset_e
neuron_groups[neuron_group].ge = 0
neuron_groups[neuron_group].gi = 0
# bookkeeping
input_intensity = start_input_intensity
j += 1
print '\n'
def save_results():
'''
Save results of simulation (train or test)
'''
print '...Saving results'
if not test_mode:
save_connections(end_weights_dir, connections, input_connections, ending, 'end')
save_theta(end_weights_dir, population_names, neuron_groups, ending, 'end')
np.save(os.path.join(end_assignments_dir, '_'.join(['assignments', ending, 'end'])), assignments)
np.save(os.path.join(end_misc_dir, '_'.join(['accumulated_rates', ending, 'end'])), accumulated_rates)
np.save(os.path.join(end_misc_dir, '_'.join(['spike_proportions', ending, 'end'])), spike_proportions)
else:
np.save(os.path.join(activity_dir, '_'.join(['results', str(num_examples), ending])), result_monitor)
np.save(os.path.join(activity_dir, '_'.join(['input_numbers', str(num_examples), ending])), input_numbers)
print '\n'
def evaluate_results():
'''
Evalute the network using the various voting schemes in test mode
'''
global update_interval
test_results = {}
for scheme in voting_schemes:
test_results[scheme] = np.zeros((10, num_examples))
print '\n...Calculating accuracy per voting scheme'
# get network filter weights
filters = input_connections['XeAe'][:].todense()
# for idx in xrange(end_time_testing - end_time_training):
for idx in xrange(num_examples):
label_rankings = predict_label(assignments, result_monitor[idx, :], accumulated_rates, spike_proportions)
for scheme in voting_schemes:
if scheme != 'distance':
test_results[scheme][:, idx] = label_rankings[scheme]
elif scheme == 'distance':
rates = (data['x'][idx % data_size, :, :] / 8.0) * input_intensity
current_input = (rates * (weight['ee_input'] / np.sum(rates))).ravel()
results = np.zeros(10)
results[0] = assignments[np.argmin([ euclidean(current_input, \
filters[:, i]) for i in xrange(conv_features) ])]
test_results[scheme][:, idx] = results
print test_results
differences = { scheme : test_results[scheme][0, :] - input_numbers for scheme in voting_schemes }
correct = { scheme : len(np.where(differences[scheme] == 0)[0]) for scheme in voting_schemes }
incorrect = { scheme : len(np.where(differences[scheme] != 0)[0]) for scheme in voting_schemes }
accuracies = { scheme : correct[scheme] / float(num_examples) * 100 for scheme in voting_schemes }
conf_matrices = np.array([confusion_matrix(test_results[scheme][0, :], \
input_numbers) for scheme in voting_schemes])
np.save(os.path.join(results_path, '_'.join(['confusion_matrix', ending]) + '.npy'), conf_matrices)
print '\nConfusion matrix:\n\n', conf_matrices
for scheme in voting_schemes:
print '\n-', scheme, 'accuracy:', accuracies[scheme]
results = pd.DataFrame([ [ ending ] + accuracies.values() ], columns=[ 'Model' ] + accuracies.keys())
filename = '_'.join([str(conv_features), 'results.csv'])
if not filename in os.listdir(results_path):
results.to_csv(os.path.join(results_path, filename), index=False)
else:
all_results = pd.read_csv(os.path.join(results_path, filename))
all_results = pd.concat([all_results, results], ignore_index=True)
all_results.to_csv(os.path.join(results_path, filename), index=False)
print '\n'
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--mode', default='train', help='Network operating mode: \
"train" mode learns the synaptic weights of the network, and \
"test" mode holds the weights fixed / evaluates accuracy on test data.')
parser.add_argument('--conv_size', type=int, default=28, help='Side length of the square convolution \
window used by the input -> excitatory layer of the network.')
parser.add_argument('--conv_stride', type=int, default=0, help='Horizontal, vertical stride \
of the convolution window used by input layer of the network.')
parser.add_argument('--conv_features', type=int, default=100, help='Number of excitatory \
convolutional features / filters / patches used in the network.')
parser.add_argument('--do_plot', type=str, default='False', help='Whether or not to display plots during network \
training / testing. Defaults to False, as this makes the \
network operation speedier, and possible to run on HPC resources.')
parser.add_argument('--num_train', type=int, default=10000, help='The number of \
examples for which to train the network on.')
parser.add_argument('--num_test', type=int, default=10000, help='The number of \
examples for which to test the network on.')
parser.add_argument('--random_seed', type=int, default=0, help='The random seed \
(any integer) from which to generate random numbers.')
parser.add_argument('--save_weights', type=str, default='False', help='Whether or not to \
save the weights of the model every `weight_update_interval`.')
parser.add_argument('--weight_update_interval', type=int, default=10, help='How often \
to update the plot of network filter weights.')
parser.add_argument('--save_best_model', type=str, default='True', help='Whether \
to save the current best version of the model.')
parser.add_argument('--update_interval', type=int, default=250, help='How often \
to update neuron labels and classify new inputs.')
parser.add_argument('--plot_all_deltas', type=str, default='False', help='Whether or not to \
plot weight changes for all neurons from input to excitatory layer.')
parser.add_argument('--train_remove_inhibition', type=str, default='False', help='Whether or not to \
remove lateral inhibition during the training phase.')
parser.add_argument('--test_no_inhibition', type=str, default='False', help='Whether or not to \
remove lateral inhibition during the test phase.')
parser.add_argument('--test_max_inhibition', type=str, default='False', help='Whether or not to \
use ETH-style inhibition during the test phase.')
parser.add_argument('--start_inhib', type=float, default=0.1, help='The beginning value \
of inhibiton for the increasing scheme.')
parser.add_argument('--max_inhib', type=float, default=17.4, help='The maximum synapse \
weight for inhibitory to excitatory connections.')
parser.add_argument('--reset_state_vars', type=str, default='False', help='Whether to \
reset neuron / synapse state variables or run a "reset" period.')
parser.add_argument('--inhib_update_interval', type=int, default=250, \
help='How often to increase the inhibition strength.')
parser.add_argument('--inhib_schedule', type=str, default='linear', help='How to \
update the strength of inhibition as the training progresses.')
parser.add_argument('--save_spikes', type=str, default='False', help='Whether or not to \
save 2D graphs of spikes to later use to make an activity time-lapse.')
parser.add_argument('--normalize_inputs', type=str, default='False', help='Whether or not \
to ensure all inputs contain the same amount of "ink".')
parser.add_argument('--proportion_low', type=float, default=0.5, help='What proportion of \
the training to grow the inhibition from "start_inhib" to "max_inhib".')
parser.add_argument('--noise_const', type=float, default=0.0, help='The scale of the \
noise added to input examples.')
parser.add_argument('--inhib_scheme', type=str, default='increasing', help='How inhibition from \
inhibitory to excitatory neurons is handled.')
parser.add_argument('--weights_noise', type=str, default='False', help='Whether to use multiplicative \
Gaussian noise on synapse weights on each iteration.')
parser.add_argument('--weights_noise_constant', type=float, default=1e-2, help='The spread of the \
Gaussian noise used on synapse weights ')
parser.add_argument('--start_input_intensity', type=float, default=2.0, help='The intensity at which the \
input is (default) presented to the network.')
parser.add_argument('--test_adaptive_threshold', type=str, default='False', help='Whether or not to allow \
neuron thresholds to adapt during the test phase.')
parser.add_argument('--train_time', type=float, default=0.35, help='How long training \
inputs are presented to the network.')
parser.add_argument('--train_rest', type=float, default=0.15, help='How long the network is allowed \
to settle back to equilibrium between training examples.')
parser.add_argument('--test_time', type=float, default=0.35, help='How long test \
inputs are presented to the network.')
parser.add_argument('--test_rest', type=float, default=0.15, help='How long the network is allowed \
to settle back to equilibrium between test examples.')
parser.add_argument('--dt', type=float, default=0.25, help='Integration time step in milliseconds.')
# parse arguments and place them in local scope
args = parser.parse_args()
args = vars(args)
locals().update(args)
print '\nOptional argument values:'
for key, value in args.items():
print '-', key, ':', value
print '\n'
for var in [ 'do_plot', 'plot_all_deltas', 'reset_state_vars', 'test_max_inhibition', \
'normalize_inputs', 'save_weights', 'save_best_model', 'test_no_inhibition', \
'save_spikes', 'weights_noise', 'test_adaptive_threshold' ]:
if locals()[var] == 'True':
locals()[var] = True
elif locals()[var] == 'False':
locals()[var] = False
else:
raise Exception('Expecting True or False-valued command line argument "' + var + '".')
# test or training mode
test_mode = mode == 'test'
if test_mode:
num_examples = num_test
else:
num_examples = num_train
if test_mode:
data_size = 10000
else:
data_size = 60000
# At which iteration do we increase the inhibition to the ETH level?
increase_iter = int(num_train * proportion_low)
# set brian global preferences
b.set_global_preferences(defaultclock = b.Clock(dt=dt*b.ms), useweave = True, gcc_options = ['-ffast-math -march=native'], usecodegen = True,
usecodegenweave = True, usecodegenstateupdate = True, usecodegenthreshold = False, usenewpropagate = True, usecstdp = True, openmp = False,
magic_useframes = False, useweave_linear_diffeq = True)
# for reproducibility's sake
np.random.seed(random_seed)
start = timeit.default_timer()
data = get_labeled_data(os.path.join(MNIST_data_path, 'testing' if test_mode else 'training'),
not test_mode, False, xrange(10), 1000, normalize_inputs)
print 'Time needed to load data:', timeit.default_timer() - start
# set parameters for simulation based on train / test mode
record_spikes = True
# number of inputs to the network
n_input = 784
n_input_sqrt = int(math.sqrt(n_input))
# number of neurons parameters
if conv_size == 28 and conv_stride == 0:
n_e = 1
else:
n_e = ((n_input_sqrt - conv_size) / conv_stride + 1) ** 2
n_e_total = n_e * conv_features
n_e_sqrt = int(math.sqrt(n_e))
n_i = n_e
features_sqrt = int(math.ceil(math.sqrt(conv_features)))
# time (in seconds) per data example presentation and rest period in between
if not test_mode:
single_example_time = train_time * b.second
resting_time = train_rest * b.second
else:
single_example_time = test_time * b.second
resting_time = test_rest * b.second
# set the update interval
if test_mode:
update_interval = num_examples
# weight updates and progress printing intervals
print_progress_interval = 10
# rest potential parameters, reset potential parameters, threshold potential parameters, and refractory periods
v_rest_e, v_rest_i = -65. * b.mV, -60. * b.mV
v_reset_e, v_reset_i = -65. * b.mV, -45. * b.mV
v_thresh_e, v_thresh_i = -52. * b.mV, -40. * b.mV
refrac_e, refrac_i = 5. * b.ms, 2. * b.ms
# dictionaries for weights and delays
weight, delay = {}, {}
# populations, connections, saved connections, etc.
input_population_names = [ 'X' ]
population_names = [ 'A' ]
input_connection_names = [ 'XA' ]
save_conns = [ 'XeAe', 'AeAe' ]
# weird and bad names for variables, I think
input_conn_names = [ 'ee_input' ]
recurrent_conn_names = [ 'ei', 'ie', 'ee' ]
# setting weight, delay, and intensity parameters
weight['ee_input'] = (conv_size ** 2) * 0.099489796
delay['ee_input'] = (0 * b.ms, 10 * b.ms)
delay['ei_input'] = (0 * b.ms, 5 * b.ms)
input_intensity = start_input_intensity
current_inhibition = 1.0
# time constants, learning rates, max weights, weight dependence, etc.
tc_pre_ee, tc_post_ee = 20 * b.ms, 20 * b.ms
nu_ee_pre, nu_ee_post = 0.0001, 0.01
nu_AeAe_pre, nu_Ae_Ae_post = 0.1, 0.5
wmax_ee = 1.0
exp_ee_post = exp_ee_pre = 0.2
w_mu_pre, w_mu_post = 0.2, 0.2
# setting up differential equations (depending on train / test mode)
if test_mode and not test_adaptive_threshold:
scr_e = 'v = v_reset_e; timer = 0*ms'
else:
tc_theta = 1e7 * b.ms
theta_plus_e = 0.05 * b.mV
scr_e = 'v = v_reset_e; theta += theta_plus_e; timer = 0*ms'
offset = 20.0 * b.mV
v_thresh_e = '(v>(theta - offset + ' + str(v_thresh_e) + ')) * (timer>refrac_e)'
# equations for neurons
neuron_eqs_e = '''
dv/dt = ((v_rest_e - v) + (I_synE + I_synI) / nS) / (100 * ms) : volt
I_synE = ge * nS * -v : amp
I_synI = gi * nS * (-100.*mV-v) : amp
dge/dt = -ge/(1.0*ms) : 1
dgi/dt = -gi/(2.0*ms) : 1
'''
if test_mode:
neuron_eqs_e += '\n theta :volt'
else:
neuron_eqs_e += '\n dtheta/dt = -theta / (tc_theta) : volt'
neuron_eqs_e += '\n dtimer/dt = 100.0 : ms'
neuron_eqs_i = '''
dv/dt = ((v_rest_i - v) + (I_synE + I_synI) / nS) / (10*ms) : volt
I_synE = ge * nS * -v : amp
I_synI = gi * nS * (-85.*mV-v) : amp
dge/dt = -ge/(1.0*ms) : 1
dgi/dt = -gi/(2.0*ms) : 1
'''
# STDP synaptic traces
eqs_stdp_ee = '''
dpre/dt = -pre / tc_pre_ee : 1.0
dpost/dt = -post / tc_post_ee : 1.0
'''
eqs_stdp_AeAe = '''
dpre/dt = -pre / tc_pre_ee : 1.0
dpost/dt = -post / tc_post_ee : 1.0
'''
# STDP rule (post-pre, no weight dependence)
eqs_stdp_pre_ee = 'pre = 1.; w -= nu_ee_pre * post'
eqs_stdp_post_ee = 'w += nu_ee_post * pre; post = 1.'
eqs_stdp_pre_AeAe = 'pre += 1.; w -= nu_AeAe_pre * post'
eqs_stdp_post_AeAe = 'w += nu_AeAe_post * pre; post += 1.'
print '\n'
# set ending of filename saves
ending = '_'.join([ str(conv_size), str(conv_stride), str(conv_features), str(n_e), \
str(num_train), str(random_seed), str(proportion_low), \
str(start_inhib), str(max_inhib) ])
b.ion()
fig_num = 1
# creating dictionaries for various objects
neuron_groups, input_groups, connections, input_connections, stdp_methods, \
rate_monitors, spike_monitors, spike_counters, output_numbers = {}, {}, {}, {}, {}, {}, {}, {}, {}
# creating convolution locations inside the input image
convolution_locations = {}
for n in xrange(n_e):
convolution_locations[n] = [ ((n % n_e_sqrt) * conv_stride + (n // n_e_sqrt) * n_input_sqrt * \
conv_stride) + (x * n_input_sqrt) + y for y in xrange(conv_size) for x in xrange(conv_size) ]
# instantiating neuron "vote" monitor
result_monitor = np.zeros((update_interval, conv_features, n_e))
# bookkeeping variables
previous_spike_count = np.zeros((conv_features, n_e))
input_numbers = np.zeros(num_examples)
rates = np.zeros((n_input_sqrt, n_input_sqrt))
if test_mode:
assignments = np.load(os.path.join(best_assignments_dir, '_'.join(['assignments', ending, 'best.npy'])))
accumulated_rates = np.load(os.path.join(best_misc_dir, '_'.join(['accumulated_rates', ending, 'best.npy'])))
spike_proportions = np.load(os.path.join(best_misc_dir, '_'.join(['spike_proportions', ending, 'best.npy'])))
else:
assignments = -1 * np.ones((conv_features, n_e))
# build the spiking neural network
build_network()
if test_mode:
voting_schemes = ['all', 'all_active', 'most_spiked_patch', 'most_spiked_location', 'confidence_weighting', \
'activity_neighborhood', 'most_spiked_neighborhood', 'distance']
else:
voting_schemes = ['all', 'all_active', 'most_spiked_patch', 'most_spiked_location', \
'confidence_weighting', 'activity_neighborhood', 'most_spiked_neighborhood']
for scheme in voting_schemes:
output_numbers[scheme] = np.zeros((num_examples, 10))
if not test_mode:
accumulated_rates = np.zeros((conv_features * n_e, 10))
accumulated_inputs = np.zeros(10)
spike_proportions = np.zeros((conv_features * n_e, 10))
# run the simulation of the network
if test_mode:
run_test()
else:
run_train()
# save and plot results
save_results()
# evaluate results
if test_mode:
evaluate_results()
| [
"[email protected]"
] | |
f2744c340d84c765a7f38427e107dcf0e0339605 | 6ba72ce01fe8c08a10a7607536858cfd40b2dc16 | /kirppuauth/migrations/0001_initial.py | a501f184d3eaff89282c7871370678d0bb60b7eb | [
"MIT"
] | permissive | jlaunonen/kirppu | dcafc5537d325b2605daf98cdde4115a759dd2ce | fb694a0d1f827f4f4aae870589eb4e57ddf9bc97 | refs/heads/master | 2023-07-20T03:13:10.814349 | 2023-07-14T16:46:55 | 2023-07-14T16:46:55 | 18,244,187 | 0 | 6 | MIT | 2023-01-10T20:48:08 | 2014-03-29T15:36:30 | Python | UTF-8 | Python | false | false | 2,830 | py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import django.utils.timezone
import django.core.validators
class Migration(migrations.Migration):
dependencies = [
('auth', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='User',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('password', models.CharField(max_length=128, verbose_name='password')),
('last_login', models.DateTimeField(default=django.utils.timezone.now, verbose_name='last login')),
('is_superuser', models.BooleanField(default=False, help_text='Designates that this user has all permissions without explicitly assigning them.', verbose_name='superuser status')),
('username', models.CharField(help_text='Required. 30 characters or fewer. Letters, digits and @/./+/-/_ only.', unique=True, max_length=30, verbose_name='username', validators=[django.core.validators.RegexValidator('^[\\w.@+-]+$', 'Enter a valid username.', 'invalid')])),
('first_name', models.CharField(max_length=30, verbose_name='first name', blank=True)),
('last_name', models.CharField(max_length=30, verbose_name='last name', blank=True)),
('email', models.EmailField(max_length=75, verbose_name='email address', blank=True)),
('is_staff', models.BooleanField(default=False, help_text='Designates whether the user can log into this admin site.', verbose_name='staff status')),
('is_active', models.BooleanField(default=True, help_text='Designates whether this user should be treated as active. Unselect this instead of deleting accounts.', verbose_name='active')),
('date_joined', models.DateTimeField(default=django.utils.timezone.now, verbose_name='date joined')),
('phone', models.CharField(max_length=64)),
('last_checked', models.DateTimeField(auto_now_add=True)),
('groups', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Group', blank=True, help_text='The groups this user belongs to. A user will get all permissions granted to each of his/her group.', verbose_name='groups')),
('user_permissions', models.ManyToManyField(related_query_name='user', related_name='user_set', to='auth.Permission', blank=True, help_text='Specific permissions for this user.', verbose_name='user permissions')),
],
options={
'abstract': False,
'verbose_name': 'user',
'verbose_name_plural': 'users',
},
bases=(models.Model,),
),
]
| [
"[email protected]"
] | |
f56f9bcaa5d9294fc2f4329958f2a02d6b674c23 | 4503a3bfd940dce760b5f70e90e6fe2fe0cc4881 | /week02/lectures/FirstCode.py | 9d2c8a972ddc17efc1eafc379c1a682a399067d9 | [] | no_license | RicardoLima17/lecture | dba7de5c61507f51d51e3abc5c7c4c22ecda504f | b41f1201ab938fe0cab85566998390166c7fa7d8 | refs/heads/main | 2023-04-18T11:12:39.769760 | 2021-04-21T18:36:09 | 2021-04-21T18:36:09 | 334,456,464 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 103 | py | # Programn that greets the user
# Author: Ricardo Rodrigues
print('Hello world! \nHow are you today')
| [
"[email protected]"
] | |
39ea59b534f5e9b9fe9597d3cb0a435cf20a1224 | 29fb2eb3b9bb21b529e814da53518fab2958693a | /thesis/images/ionizationStructure.py | bab29ae268d132cad0aaf36517a5e29f94941e0a | [] | no_license | Vital-Fernandez/thesis_pipeline | acca734b1a2ce11b0bee5bd41fab534022ea295e | 1253e2ed94e0f502a16cae6b88f84b633d0f16c2 | refs/heads/master | 2022-05-31T10:15:47.241645 | 2021-05-18T17:43:44 | 2021-05-18T17:43:44 | 90,319,650 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,922 | py | from dazer_methods import Dazer
from numpy import nanmean, nanstd, mean, nan as np_nan
from uncertainties import ufloat, unumpy, umath
import pandas as pd
# Generate dazer object
dz = Dazer()
# Declare figure format
size_dict = {'figure.figsize': (14, 6), 'axes.labelsize': 20, 'legend.fontsize': 20, 'font.family': 'Times New Roman',
'mathtext.default': 'regular', 'xtick.labelsize': 20, 'ytick.labelsize': 20}
dz.FigConf(plotSize=size_dict)
# Declare data location
folder_data = '/home/vital/Dropbox/Astrophysics/Seminars/Cloudy School 2017/teporingos/Grid_data_vital/'
file_name_list_S = ['TGrid_Mass100000.0_age5.48_zStar-2.1_zGas0.008.ele_S',
'TGrid_Mass200000.0_age5.48_zStar-2.1_zGas0.008.ele_S']
file_name_list_O = ['TGrid_Mass100000.0_age5.48_zStar-2.1_zGas0.008.ele_O',
'TGrid_Mass200000.0_age5.48_zStar-2.1_zGas0.008.ele_O']
z_list = ['100000', '200000']
ions_list_S = ['S+', 'S+2', 'S+3']
ions_labels_S = [r'$S^{+}$', r'$S^{2+}$', r'$S^{3+}$']
ions_list_O = ['O+', 'O+2']
ions_labels_O = [r'$O^{+}$', r'$O^{2+}$']
labels_coords_S = [[(1.60e18, 0.98), (2.35e18, 0.98)],
[(1.0e18, 0.72), (1.77e18, 0.72)],
[(0.75e18, 0.000005), (2.0e18, 0.015)]]
labels_coords_O = [[(1.55e18, 0.5), (2.3e18, 0.5)],
[(1.03e18, 0.6), (1.8e18, 0.6)]]
# Generate the color map
dz.gen_colorList(0, 5)
# ions_colors_S = [dz.get_color(0), dz.get_color(1), dz.get_color(2)]
# ions_colors_O = [dz.get_color(3), dz.get_color(4)]
ions_colors_S = ['tab:orange', 'tab:red', 'tab:brown']
ions_colors_O = ['tab:blue', 'tab:green']
line_type = ['--', '-']
for i in range(len(file_name_list_S)):
file_name = file_name_list_S[i]
elemIon_df = pd.read_csv(folder_data + file_name, sep='\t')
for j in range(len(ions_list_S)):
ion = ions_list_S[j]
radious = elemIon_df['#depth'].values
ion_frac = elemIon_df[ion].values
label = r'{0:1.1e} $M_\odot$'.format(float(z_list[i]))
dz.data_plot(radious / 1e19, ion_frac, color=ions_colors_S[j], linestyle=line_type[i],
label=r'Cluster mass {}'.format(label), linewidth=2)
dz.plot_text(labels_coords_S[j][i][0] / 1e19, labels_coords_S[j][i][1], text=ions_labels_S[j],
color=ions_colors_S[j], fontsize=20, axis_plot=None)
file_name = file_name_list_O[i]
elemIon_df = pd.read_csv(folder_data + file_name, sep='\t')
for j in range(len(ions_list_O)):
ion = ions_list_O[j]
radious = elemIon_df['#depth'].values
ion_frac = elemIon_df[ion].values
label = r'{0:1.1e} $M_\odot$'.format(float(z_list[i]))
dz.data_plot(radious / 1e19, ion_frac, color=ions_colors_O[j], linestyle=line_type[i],
label=r'Cluster mass {}'.format(label))
dz.plot_text(labels_coords_O[j][i][0] / 1e19, labels_coords_O[j][i][1], text=ions_labels_O[j],
color=ions_colors_O[j], fontsize=20, axis_plot=None)
dz.FigWording(r'$R_{19}$ $(10^{19}cm)$', r'$X(A^{+i})$', '', ncols_leg=1)
leg = dz.Axis.get_legend()
leg.legendHandles[0].set_color('black')
leg.legendHandles[1].set_color('black')
# dz.display_fig()
dz.display_fig()
# dz.savefig('/home/vital/Dropbox/Astrophysics/Papers/Yp_AlternativeMethods/images/SulfurIonization_fraction_vs_cloudThickness')
# dz.savefig('/home/vital/Dropbox/Astrophysics/Seminars/Stasinska conference/SulfurIonization_fraction_vs_cloudThickness')
# #Load catalogue dataframe
# catalogue_dict = dz.import_catalogue()
# catalogue_df = dz.load_excel_DF('/home/vital/Dropbox/Astrophysics/Data/WHT_observations/WHT_Galaxies_properties.xlsx')
#
# #Define plot frame and colors
# size_dict = {'axes.labelsize':24, 'legend.fontsize':24, 'font.family':'Times New Roman', 'mathtext.default':'regular', 'xtick.labelsize':22, 'ytick.labelsize':22}
# dz.FigConf(plotSize = size_dict)
#
# dz.quick_indexing(catalogue_df)
# idcs = (pd.notnull(catalogue_df.OI_HI_emis2nd)) & (pd.notnull(catalogue_df.NI_HI_emis2nd)) & (pd.notnull(catalogue_df.HeII_HII_from_O_emis2nd)) & (catalogue_df.quick_index.notnull()) & (~catalogue_df.index.isin(['SHOC593']))
#
# #Prepare data
# O_values = catalogue_df.loc[idcs].OI_HI_emis2nd.values
# N_values = catalogue_df.loc[idcs].NI_HI_emis2nd.values
# HeII_HI = catalogue_df.loc[idcs].HeII_HII_from_O_emis2nd.values
# objects = catalogue_df.loc[idcs].quick_index.values
#
# N_O_ratio = N_values/O_values
#
# dz.data_plot(unumpy.nominal_values(HeII_HI), unumpy.nominal_values(N_O_ratio), label = '', markerstyle='o', x_error=unumpy.std_devs(HeII_HI), y_error=unumpy.std_devs(N_O_ratio))
# dz.plot_text(unumpy.nominal_values(HeII_HI), unumpy.nominal_values(N_O_ratio), text=objects, x_pad=1.005, y_pad=1.01)
#
# dz.FigWording(r'y', r'$N/O$', '')
# # dz.display_fig()
# dz.savefig('/home/vital/Dropbox/Astrophysics/Papers/Yp_AlternativeMethods/images/NO_to_y')
# from dazer_methods import Dazer
# from numpy import nanmean, nanstd, mean, nan as np_nan
# from uncertainties import ufloat, unumpy, umath
# import pandas as pd
#
# # Generate dazer object
# dz = Dazer()
#
# # Declare figure format
# size_dict = {'figure.figsize': (10, 10), 'axes.labelsize': 24, 'legend.fontsize': 14, 'font.family': 'Times New Roman',
# 'mathtext.default': 'regular', 'xtick.labelsize': 22, 'ytick.labelsize': 22}
# dz.FigConf(plotSize=size_dict)
#
# # Declare data location
# folder_data = '/home/vital/Dropbox/Astrophysics/Seminars/Cloudy School 2017/teporingos/Grid_data_vital/'
# file_name_list_S = [
# 'TGrid_Mass100000.0_age5.48_zStar-2.1_zGas0.008.ele_S'] # , 'TGrid_Mass100000.0_age5.48_zStar-2.1_zGas0.008.ele_S']
# file_name_list_O = [
# 'TGrid_Mass100000.0_age5.48_zStar-2.1_zGas0.008.ele_O'] # , 'TGrid_Mass100000.0_age5.48_zStar-2.1_zGas0.008.ele_O']
# z_list = ['200000', '100000']
# ions_list_S = ['S+', 'S+2', 'S+3']
# ions_labels_S = [r'$S^{+}$', r'$S^{2+}$', r'$S^{3+}$']
# ions_list_O = ['O+', 'O+2']
# ions_labels_O = [r'$O^{+}$', r'$O^{2+}$']
# labels_coords_S = [[(1.65e18, 1.0),
# (2.4e18, 1.0)],
# [(1.0e18, 0.75),
# (1.77e18, 0.75)],
# [(1.2e18, 0.015),
# (2.0e18, 0.015)]]
# labels_coords_O = [[(1.55e18, 0.5),
# (2.3e18, 0.5)],
# [(1.03e18, 0.6),
# (1.8e18, 0.6)]]
# ions_colors_S = ['tab:orange', 'tab:red', 'tab:brown']
# ions_colors_O = ['tab:blue', 'tab:green']
#
# line_type = ['-', '--']
#
# for i in range(len(file_name_list_S)):
#
# file_name = file_name_list_S[i]
#
# elemIon_df = pd.read_csv(folder_data + file_name, sep='\t')
#
# for j in range(len(ions_list_S)):
# ion = ions_list_S[j]
# radious = elemIon_df['#depth'].values
# ion_frac = elemIon_df[ion].values
# label = r'{0:1.1e} $M_\odot$'.format(float(z_list[i]))
# dz.data_plot(radious / 1e19, ion_frac, color=ions_colors_S[j], linestyle=line_type[i],
# label=r'Cluster mass {}'.format(label), linewidth=3)
# dz.plot_text(labels_coords_S[j][i][0] / 1e19, labels_coords_S[j][i][1], text=ions_labels_S[j],
# color=ions_colors_S[j], fontsize=17, axis_plot=None)
#
# file_name = file_name_list_O[i]
#
# elemIon_df = pd.read_csv(folder_data + file_name, sep='\t')
#
# for j in range(len(ions_list_O)):
# ion = ions_list_O[j]
# radious = elemIon_df['#depth'].values
# ion_frac = elemIon_df[ion].values
# label = r'{0:1.1e} $M_\odot$'.format(float(z_list[i]))
# dz.data_plot(radious / 1e19, ion_frac, color=ions_colors_O[j], linestyle=line_type[i],
# label=r'Cluster mass {}'.format(label))
# dz.plot_text(labels_coords_O[j][i][0] / 1e19, labels_coords_O[j][i][1], text=ions_labels_O[j],
# color=ions_colors_O[j], fontsize=17, axis_plot=None)
#
# dz.FigWording(r'$R_{19}$ $(10^{19}cm)$', r'$X(A^{+i})$', '', ncols_leg=1)
#
# leg = dz.Axis.get_legend()
# leg.legendHandles[0].set_color('black')
# # leg.legendHandles[1].set_color('black')
#
# # dz.display_fig()
# # dz.savefig('/home/vital/Dropbox/Astrophysics/Papers/Yp_AlternativeMethods/images/SulfurIonization_fraction_vs_cloudThickness')
# dz.savefig('/home/vital/Dropbox/Astrophysics/Seminars/Stasinska conference/SulfurIonization_fraction_vs_cloudThickness')
#
# # #Load catalogue dataframe
# # catalogue_dict = dz.import_catalogue()
# # catalogue_df = dz.load_excel_DF('/home/vital/Dropbox/Astrophysics/Data/WHT_observations/WHT_Galaxies_properties.xlsx')
# #
# # #Define plot frame and colors
# # size_dict = {'axes.labelsize':24, 'legend.fontsize':24, 'font.family':'Times New Roman', 'mathtext.default':'regular', 'xtick.labelsize':22, 'ytick.labelsize':22}
# # dz.FigConf(plotSize = size_dict)
# #
# # dz.quick_indexing(catalogue_df)
# # idcs = (pd.notnull(catalogue_df.OI_HI_emis2nd)) & (pd.notnull(catalogue_df.NI_HI_emis2nd)) & (pd.notnull(catalogue_df.HeII_HII_from_O_emis2nd)) & (catalogue_df.quick_index.notnull()) & (~catalogue_df.index.isin(['SHOC593']))
# #
# # #Prepare data
# # O_values = catalogue_df.loc[idcs].OI_HI_emis2nd.values
# # N_values = catalogue_df.loc[idcs].NI_HI_emis2nd.values
# # HeII_HI = catalogue_df.loc[idcs].HeII_HII_from_O_emis2nd.values
# # objects = catalogue_df.loc[idcs].quick_index.values
# #
# # N_O_ratio = N_values/O_values
# #
# # dz.data_plot(unumpy.nominal_values(HeII_HI), unumpy.nominal_values(N_O_ratio), label = '', markerstyle='o', x_error=unumpy.std_devs(HeII_HI), y_error=unumpy.std_devs(N_O_ratio))
# # dz.plot_text(unumpy.nominal_values(HeII_HI), unumpy.nominal_values(N_O_ratio), text=objects, x_pad=1.005, y_pad=1.01)
# #
# # dz.FigWording(r'y', r'$N/O$', '')
# # # dz.display_fig()
# # dz.savefig('/home/vital/Dropbox/Astrophysics/Papers/Yp_AlternativeMethods/images/NO_to_y') | [
"[email protected]"
] | |
1a72065a811121f0dd9d16e8dd072b751fba6a6a | 917a99fdf14097dd8001b5c98cc48c8716f8f969 | /webElement/ass_service/syslogElement.py | 8bffc624415e17423f022ce3b8b9a646794ed0be | [] | no_license | isomper/testIsomptySecret | 722eba4cbefe9495a3292d8d10e8ad9c4a34c8a7 | 968bbee05af730cfb7717f1531286f11a7f99cf3 | refs/heads/master | 2020-03-19T07:29:28.487913 | 2018-07-13T06:25:50 | 2018-07-13T06:25:50 | 136,118,583 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,408 | py | #coding=utf-8
u'''
#文件名:
#被测软件版本号:V2.8.1
#作成人:李择优
#生成日期:2018/1/24
#模块描述:SYSLOG
#历史修改记录
#修改人:
#修改日期:
#修改内容:
'''
import sys
reload(sys)
sys.setdefaultencoding('utf-8')
import time
sys.path.append("/testIsompSecret/common/")
from _icommon import getElement,selectElement,frameElement,commonFun
from _cnEncode import cnEncode
from _log import log
sys.path.append("/testIsompSecret/webElement/ass_service/")
from ntpElement import NtpService
class Syslog:
#运行状态开关
SWITCH = "btn_qh"
#IP
HOST = "host"
#端口
PORT = "port"
#协议
PROTOCOL = "protocol"
#标识
IDENT = "ident"
#机制
FACILITY = "facility"
#测试按钮
TEST_BUTTON = "test_syslog"
#保存按钮
SAVE_BUTTON = "save_syslog"
def __init__(self,driver):
self.driver = driver
self.log = log()
self.getElem = getElement(driver)
self.select = selectElement(driver)
self.frameElem = frameElement(driver)
self.cmf = commonFun(driver)
self.ntp = NtpService(driver)
self.cnEnde = cnEncode()
u'''填写变量内容
parameters:
var_text : 变量内容
value : 定位方式值
'''
def set_common_func(self,var_text,value):
try:
revar_text = self.cnEnde.is_float(var_text)
var_elem =self.getElem.find_element_with_wait_EC("id",value)
var_elem.clear()
var_elem.send_keys(revar_text)
except Exception as e:
print ("set user common text error: ") + str(revar_text) + str(e)
u'''输入IP
parameters:
setIp : IP
'''
def set_ip(self,setIp):
return self.set_common_func(setIp,self.HOST)
u'''输入端口
parameters:
setPort : 端口
'''
def set_port(self,setPort):
return self.set_common_func(setPort,self.PORT)
u'''选择协议
Parameters:
value:select选项中的value属性值,udp代表UDP,tcp代表TCP,nix_syslog代表nix_syslog
'''
def set_protocol(self, value):
valu = self.cnEnde.is_float(value)
self.frameElem.from_frame_to_otherFrame("rigthFrame")
selem = self.getElem.find_element_with_wait_EC("id",self.PROTOCOL)
self.select.select_element_by_value(selem, valu)
u'''输入标识
parameters:
setIdent : 标识
'''
def set_ident(self,setIdent):
return self.set_common_func(setIdent,self.IDENT)
u'''选择机制
Parameters:
value:select选项中的value属性值,32代表facility
'''
def set_facility(self, value):
valu = self.cnEnde.is_float(value)
self.frameElem.from_frame_to_otherFrame("rigthFrame")
selem = self.getElem.find_element_with_wait_EC("id",self.FACILITY)
self.select.select_element_by_value(selem, valu)
u'''点击测试按钮'''
def test_button(self):
self.frameElem.from_frame_to_otherFrame("rigthFrame")
self.getElem.find_element_wait_and_click_EC("id",self.TEST_BUTTON)
u'''点击保存按钮'''
def save_button(self):
self.frameElem.from_frame_to_otherFrame("rigthFrame")
self.getElem.find_element_wait_and_click_EC("id",self.SAVE_BUTTON)
u'''改变开关状态'''
def change_switch_status(self):
self.frameElem.from_frame_to_otherFrame("rigthFrame")
try:
button_elem = self.getElem.find_element_with_wait_EC("id",self.SWITCH)
class_attr = button_elem.get_attribute("class")
off_status = "switch_off"
on_status = "switch_on"
if class_attr == on_status:
self.ntp.click_left_moudle(1)
self.frameElem.from_frame_to_otherFrame("rigthFrame")
button_elem = self.getElem.find_element_with_wait_EC("id",self.SWITCH)
time.sleep(1)
button_elem.click()
button_elem.click()
else:
button_elem.click()
except Exception as e:
print ("Change button status error: ") + str(e)
| [
"[email protected]"
] | |
c35de16dd47821fb8bd0c74d9ed293dc5ee70956 | 34ef83114e02b173bd2d55eb53ad399e738a8e3c | /django/code_test/teka/teka/settings.py | ca05f4f1f4426561e47f91918dae0a82be1e722d | [] | no_license | vavilon/Python3 | e976a18eb301e4953696d1e3f4730ed890da015a | 8c79729747ce51d60ad685e6a2e58292954ed7eb | refs/heads/master | 2023-01-09T13:44:37.408601 | 2018-01-25T22:41:14 | 2018-01-25T22:41:14 | 100,892,055 | 0 | 1 | null | 2022-12-26T20:29:27 | 2017-08-20T22:23:06 | Python | UTF-8 | Python | false | false | 3,232 | py | """
Django settings for teka project.
Generated by 'django-admin startproject' using Django 1.11.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '7&(=23s0&zbaks8=)r=a=5xb^mz61l1&m2&=to8_j*2h^c0ld9'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps',
'bootstrap3',
'bootstrap4',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'teka.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'teka.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static")),
| [
"[email protected]"
] | |
44a0a7737d19c1e6e47ddf5525d0d632188aabd2 | 10f397d1fe8c68dc5af033e0b88cb99be56bc4f2 | /Statistics/models.py | 28a3d7a4f4138e1e4c55db79c7ee134721781ded | [] | no_license | wsqy/AccountRandmomAmount | db3905bd425c074935c256aab62f437fe7cb0ffc | b69bc1269a666c48fe954ac423a25d111e01176b | refs/heads/master | 2023-06-29T12:49:35.947729 | 2020-06-30T03:27:05 | 2020-06-30T03:27:05 | 271,683,993 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,592 | py | from django.db import models
# from django.conf import settings
from django.utils import timezone
from Account.models import Buyer, Seller, Company
class DayBuyer(models.Model):
"""
买方日交易额总量表
"""
date = models.DateField(verbose_name='任务日期', default=timezone.now)
buyer = models.ForeignKey(Buyer, on_delete=models.PROTECT, verbose_name='买方')
amount_total = models.IntegerField(verbose_name='日总交易金额(万元)', default=0)
class Meta:
verbose_name = '买方日交易额总量'
verbose_name_plural = verbose_name
def __str__(self):
return '{}于{}总交易额{}万元'.format(self.buyer, self.date, self.amount_total)
class DaySeller(models.Model):
"""
卖方日交易额总量表
"""
date = models.DateField(verbose_name='任务日期', default=timezone.now)
seller = models.ForeignKey(Seller, on_delete=models.PROTECT, verbose_name='卖方')
amount_total = models.IntegerField(verbose_name='日总交易金额(万元)', default=0)
class Meta:
verbose_name = '卖方日交易额总量'
verbose_name_plural = verbose_name
def __str__(self):
return '{}于{}总交易额{}万元'.format(self.seller, self.date, self.amount_total)
class DayCompany(models.Model):
"""
集团子公司日交易额总量表
"""
date = models.DateField(verbose_name='任务日期', default=timezone.now)
company = models.ForeignKey(Company, on_delete=models.PROTECT, verbose_name='集团子公司')
amount_total = models.IntegerField(verbose_name='日总交易金额(万元)', default=0)
class Meta:
verbose_name = '集团子公司日交易额总量'
verbose_name_plural = verbose_name
def __str__(self):
return '{}于{}总交易额{}万元'.format(self.company, self.date, self.amount_total)
class MouthBuyer(models.Model):
"""
买方月交易额总量表
"""
date = models.CharField(max_length=8, verbose_name='月份')
buyer = models.ForeignKey(Buyer, on_delete=models.PROTECT, verbose_name='买方')
amount_total = models.IntegerField(verbose_name='月总交易金额(万元)', default=0)
class Meta:
verbose_name = '买方月交易额总量'
verbose_name_plural = verbose_name
def __str__(self):
return '{}于{}总交易额{}万元'.format(self.buyer, self.date, self.amount_total)
class MouthSeller(models.Model):
"""
卖方月交易额总量表
"""
date = models.CharField(max_length=8, verbose_name='月份')
seller = models.ForeignKey(Seller, on_delete=models.PROTECT, verbose_name='卖方')
amount_total = models.IntegerField(verbose_name='月总交易金额(万元)', default=0)
class Meta:
verbose_name = '卖方月交易额总量'
verbose_name_plural = verbose_name
def __str__(self):
return '{}于{}总交易额{}万元'.format(self.seller, self.date, self.amount_total)
class MouthCompany(models.Model):
"""
集团子公司月交易额总量表
"""
date = models.CharField(max_length=8, verbose_name='月份')
company = models.ForeignKey(Company, on_delete=models.PROTECT, verbose_name='集团子公司')
amount_total = models.IntegerField(verbose_name='月总交易金额(万元)', default=0)
class Meta:
verbose_name = '集团子公司月交易额总量'
verbose_name_plural = verbose_name
def __str__(self):
return '{}于{}总交易额{}万元'.format(self.company, self.date, self.amount_total) | [
"[email protected]"
] | |
624253db8803ba4e60460ddc4c11d392b0bac60d | 297497957c531d81ba286bc91253fbbb78b4d8be | /third_party/libwebrtc/tools/grit/grit/tool/postprocess_unittest.py | 21ca5e2f774610e4a7efa36f398ec1fb87b4cddc | [
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | marco-c/gecko-dev-comments-removed | 7a9dd34045b07e6b22f0c636c0a836b9e639f9d3 | 61942784fb157763e65608e5a29b3729b0aa66fa | refs/heads/master | 2023-08-09T18:55:25.895853 | 2023-08-01T00:40:39 | 2023-08-01T00:40:39 | 211,297,481 | 0 | 0 | NOASSERTION | 2019-09-29T01:27:49 | 2019-09-27T10:44:24 | C++ | UTF-8 | Python | false | false | 1,705 | py |
'''Unit test that checks postprocessing of files.
Tests postprocessing by having the postprocessor
modify the grd data tree, changing the message name attributes.
'''
from __future__ import print_function
import os
import re
import sys
if __name__ == '__main__':
sys.path.append(os.path.join(os.path.dirname(__file__), '../..'))
import unittest
import grit.tool.postprocess_interface
from grit.tool import rc2grd
class PostProcessingUnittest(unittest.TestCase):
def testPostProcessing(self):
rctext = '''STRINGTABLE
BEGIN
DUMMY_STRING_1 "String 1"
// Some random description
DUMMY_STRING_2 "This text was added during preprocessing"
END
'''
tool = rc2grd.Rc2Grd()
class DummyOpts(object):
verbose = False
extra_verbose = False
tool.o = DummyOpts()
tool.post_process = 'grit.tool.postprocess_unittest.DummyPostProcessor'
result = tool.Process(rctext, '.\resource.rc')
self.failUnless(
result.children[2].children[2].children[0].attrs['name'] == 'SMART_STRING_1')
self.failUnless(
result.children[2].children[2].children[1].attrs['name'] == 'SMART_STRING_2')
class DummyPostProcessor(grit.tool.postprocess_interface.PostProcessor):
'''
Post processing replaces all message name attributes containing "DUMMY" to
"SMART".
'''
def Process(self, rctext, rcpath, grdnode):
smarter = re.compile(r'(DUMMY)(.*)')
messages = grdnode.children[2].children[2]
for node in messages.children:
name_attr = node.attrs['name']
m = smarter.search(name_attr)
if m:
node.attrs['name'] = 'SMART' + m.group(2)
return grdnode
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
f9520500f015b1f2f85946de085b6dfb1d169031 | b92b0e9ba2338ab311312dcbbeefcbb7c912fc2e | /build/shogun_lib/examples/documented/python_modular/distance_sparseeuclidean_modular.py | 2e8e098e832784cb39eb330eae2d7c48c0f9148f | [] | no_license | behollis/muViewBranch | 384f8f97f67723b2a4019294854969d6fc1f53e8 | 1d80914f57e47b3ad565c4696861f7b3213675e0 | refs/heads/master | 2021-01-10T13:22:28.580069 | 2015-10-27T21:43:20 | 2015-10-27T21:43:20 | 45,059,082 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,057 | py | # In this example a sparse euclidean distance is computed for sparse toy data.
from tools.load import LoadMatrix
lm=LoadMatrix()
traindat = lm.load_numbers('../data/fm_train_real.dat')
testdat = lm.load_numbers('../data/fm_test_real.dat')
parameter_list = [[traindat,testdat],[traindat,testdat]]
def distance_sparseeuclidean_modular (fm_train_real=traindat,fm_test_real=testdat):
from shogun.Features import RealFeatures, SparseRealFeatures
from shogun.Distance import SparseEuclidianDistance
realfeat=RealFeatures(fm_train_real)
feats_train=SparseRealFeatures()
feats_train.obtain_from_simple(realfeat)
realfeat=RealFeatures(fm_test_real)
feats_test=SparseRealFeatures()
feats_test.obtain_from_simple(realfeat)
distance=SparseEuclidianDistance(feats_train, feats_train)
dm_train=distance.get_distance_matrix()
distance.init(feats_train, feats_test)
dm_test=distance.get_distance_matrix()
return distance,dm_train,dm_test
if __name__=='__main__':
print('SparseEuclidianDistance')
distance_sparseeuclidean_modular(*parameter_list[0])
| [
"prosen@305cdda6-5ce1-45b3-a98d-dfc68c8b3305"
] | prosen@305cdda6-5ce1-45b3-a98d-dfc68c8b3305 |
c02054b0e7144f761e863a5a249d40a75b1c8cc5 | 6a609bc67d6a271c1bd26885ce90b3332995143c | /exercises/exhaustive-search/combinations_ii.py | 46561751342eaead6317019aa18b093dfc811644 | [] | no_license | nahgnaw/data-structure | 1c38b3f7e4953462c5c46310b53912a6e3bced9b | 18ed31a3edf20a3e5a0b7a0b56acca5b98939693 | refs/heads/master | 2020-04-05T18:33:46.321909 | 2016-07-29T21:14:12 | 2016-07-29T21:14:12 | 44,650,911 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 668 | py | # -*- coding: utf-8 -*-
"""
Given a list of integer lists. Each time take an item from each list. Find all the combinations.
"""
class Solution(object):
def combine(self, arr):
"""
:type arr: List[List[init]]
:rtype: List[List[int]]
"""
def dfs(res, pos):
if len(res) == len(arr):
results.append(res)
return
for i in xrange(len(arr[pos])):
dfs(res + [arr[pos][i]], pos + 1)
results = []
dfs([], 0)
return results
if __name__ == '__main__':
arr = [[1,2], [3,4], [5,6,7]]
sol = Solution()
print sol.combine(arr)
| [
"[email protected]"
] | |
c359621f88fe116601d909b5dce736eebf473a4f | 132c7b0c8ba606a249fbdfe24f9d73e7e224d260 | /sanyuapp/urls.py | 718ddf53afbfca5bc73c30fb4040c7281a875e3f | [] | no_license | sanyuOnline/sanyu-webapp | dafa3505d7f3d6eca225ca6b4dce3fa683d5e9fe | c8e3824146bb9eb4dcf971a1cdef2bc4475385f1 | refs/heads/main | 2023-08-31T12:52:06.104078 | 2021-10-27T07:03:09 | 2021-10-27T07:03:09 | 406,589,964 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,073 | py | """sanyuapp URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
from django.conf import settings
from django.conf.urls.static import static
urlpatterns = [
path('admin/', admin.site.urls),
path('users/', include('users.urls')),
path('users/', include('django.contrib.auth.urls')),
path('', include('pages.urls')),
path('', include('blog.urls')),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
| [
"[email protected]"
] | |
75b8cf6fde95fbd9a46ab0c2c5277b706714856b | ce6538b5b7da162c1c690a346e7ec9ae0a6291f3 | /wild_cat_zoo/project/cheetah.py | 92f02831fe0904412100be1467c39f16c02a2ad7 | [] | no_license | DaniTodorowa/Softuni | 391f13dd61a6d16cd48ee06e9b35b2fd931375df | f7c875fda4e13ec63152671509aaa6eca29d7f50 | refs/heads/master | 2022-11-25T23:34:49.744315 | 2020-08-02T08:23:44 | 2020-08-02T08:23:44 | 278,938,559 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 298 | py | class Cheetah:
def __init__(self, name, gender, age):
self.name = name
self.gender = gender
self.age = age
@staticmethod
def get_needs(self):
return 60
def __repr__(self):
return f"Name: {self.name}, Age: {self.age}, Gender: {self.gender}"
| [
"[email protected]"
] | |
cec69055122f7d6681aafa3340f9b1e6c99ab682 | 999879f8d18e041d7fa313132408b252aded47f8 | /01-codes/scipy-master/scipy/linalg/_cython_signature_generator.py | 3e32f4ee3bff69c241712515ab32a5fa027911ff | [
"MIT"
] | permissive | QPanProjects/Surrogate-Model | ebcaf05728e82dcbcd924c2edca1b490ab085173 | 848c7128201218b0819c9665e2cec72e3b1d29ac | refs/heads/master | 2022-10-11T19:03:55.224257 | 2020-06-09T14:37:35 | 2020-06-09T14:37:35 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,372 | py | """
A script that uses f2py to generate the signature files used to make
the Cython BLAS and LAPACK wrappers from the fortran source code for
LAPACK and the reference BLAS.
To generate the BLAS wrapper signatures call:
python _cython_signature_generator.py blas <blas_directory> <out_file>
To generate the LAPACK wrapper signatures call:
python _cython_signature_generator.py lapack <lapack_src_directory> <out_file>
"""
import glob
from numpy.f2py import crackfortran
sig_types = {'integer': 'int',
'complex': 'c',
'double precision': 'd',
'real': 's',
'complex*16': 'z',
'double complex': 'z',
'character': 'char',
'logical': 'bint'}
def get_type(info, arg):
argtype = sig_types[info['vars'][arg]['typespec']]
if argtype == 'c' and info['vars'][arg].get('kindselector') is not None:
argtype = 'z'
return argtype
def make_signature(filename):
info = crackfortran.crackfortran(filename)[0]
name = info['name']
if info['block'] == 'subroutine':
return_type = 'void'
else:
return_type = get_type(info, name)
arglist = [' *'.join([get_type(info, arg), arg]) for arg in info['args']]
args = ', '.join(arglist)
# Eliminate strange variable naming that replaces rank with rank_bn.
args = args.replace('rank_bn', 'rank')
return '{0} {1}({2})\n'.format(return_type, name, args)
def get_sig_name(line):
return line.split('(')[0].split(' ')[-1]
def sigs_from_dir(directory, outfile, manual_wrappers=None, exclusions=None):
if directory[-1] in ['/', '\\']:
directory = directory[:-1]
files = glob.glob(directory + '/*.f*')
if exclusions is None:
exclusions = []
if manual_wrappers is not None:
exclusions += [get_sig_name(l) for l in manual_wrappers.split('\n')]
signatures = []
for filename in files:
name = filename.split('\\')[-1][:-2]
if name in exclusions:
continue
signatures.append(make_signature(filename))
if manual_wrappers is not None:
signatures += [l + '\n' for l in manual_wrappers.split('\n')]
signatures.sort(key=get_sig_name)
comment = ["# This file was generated by _cython_wrapper_generators.py.\n",
"# Do not edit this file directly.\n\n"]
with open(outfile, 'w') as f:
f.writelines(comment)
f.writelines(signatures)
# The signature that is used for zcgesv in lapack 3.1.0 and 3.1.1 changed
# in version 3.2.0. The version included in the clapack on OSX has the
# more recent signature though.
# slamch and dlamch are not in the lapack src directory, but,since they
# already have Python wrappers, we'll wrap them as well.
# The other manual signatures are used because the signature generating
# functions don't work when function pointer arguments are used.
lapack_manual_wrappers = '''void cgees(char *jobvs, char *sort, cselect1 *select, int *n, c *a, int *lda, int *sdim, c *w, c *vs, int *ldvs, c *work, int *lwork, s *rwork, bint *bwork, int *info)
void cgeesx(char *jobvs, char *sort, cselect1 *select, char *sense, int *n, c *a, int *lda, int *sdim, c *w, c *vs, int *ldvs, s *rconde, s *rcondv, c *work, int *lwork, s *rwork, bint *bwork, int *info)
void cgges(char *jobvsl, char *jobvsr, char *sort, cselect2 *selctg, int *n, c *a, int *lda, c *b, int *ldb, int *sdim, c *alpha, c *beta, c *vsl, int *ldvsl, c *vsr, int *ldvsr, c *work, int *lwork, s *rwork, bint *bwork, int *info)
void cggesx(char *jobvsl, char *jobvsr, char *sort, cselect2 *selctg, char *sense, int *n, c *a, int *lda, c *b, int *ldb, int *sdim, c *alpha, c *beta, c *vsl, int *ldvsl, c *vsr, int *ldvsr, s *rconde, s *rcondv, c *work, int *lwork, s *rwork, int *iwork, int *liwork, bint *bwork, int *info)
void dgees(char *jobvs, char *sort, dselect2 *select, int *n, d *a, int *lda, int *sdim, d *wr, d *wi, d *vs, int *ldvs, d *work, int *lwork, bint *bwork, int *info)
void dgeesx(char *jobvs, char *sort, dselect2 *select, char *sense, int *n, d *a, int *lda, int *sdim, d *wr, d *wi, d *vs, int *ldvs, d *rconde, d *rcondv, d *work, int *lwork, int *iwork, int *liwork, bint *bwork, int *info)
void dgges(char *jobvsl, char *jobvsr, char *sort, dselect3 *selctg, int *n, d *a, int *lda, d *b, int *ldb, int *sdim, d *alphar, d *alphai, d *beta, d *vsl, int *ldvsl, d *vsr, int *ldvsr, d *work, int *lwork, bint *bwork, int *info)
void dggesx(char *jobvsl, char *jobvsr, char *sort, dselect3 *selctg, char *sense, int *n, d *a, int *lda, d *b, int *ldb, int *sdim, d *alphar, d *alphai, d *beta, d *vsl, int *ldvsl, d *vsr, int *ldvsr, d *rconde, d *rcondv, d *work, int *lwork, int *iwork, int *liwork, bint *bwork, int *info)
d dlamch(char *cmach)
void ilaver(int *vers_major, int *vers_minor, int *vers_patch)
void sgees(char *jobvs, char *sort, sselect2 *select, int *n, s *a, int *lda, int *sdim, s *wr, s *wi, s *vs, int *ldvs, s *work, int *lwork, bint *bwork, int *info)
void sgeesx(char *jobvs, char *sort, sselect2 *select, char *sense, int *n, s *a, int *lda, int *sdim, s *wr, s *wi, s *vs, int *ldvs, s *rconde, s *rcondv, s *work, int *lwork, int *iwork, int *liwork, bint *bwork, int *info)
void sgges(char *jobvsl, char *jobvsr, char *sort, sselect3 *selctg, int *n, s *a, int *lda, s *b, int *ldb, int *sdim, s *alphar, s *alphai, s *beta, s *vsl, int *ldvsl, s *vsr, int *ldvsr, s *work, int *lwork, bint *bwork, int *info)
void sggesx(char *jobvsl, char *jobvsr, char *sort, sselect3 *selctg, char *sense, int *n, s *a, int *lda, s *b, int *ldb, int *sdim, s *alphar, s *alphai, s *beta, s *vsl, int *ldvsl, s *vsr, int *ldvsr, s *rconde, s *rcondv, s *work, int *lwork, int *iwork, int *liwork, bint *bwork, int *info)
s slamch(char *cmach)
void zgees(char *jobvs, char *sort, zselect1 *select, int *n, z *a, int *lda, int *sdim, z *w, z *vs, int *ldvs, z *work, int *lwork, d *rwork, bint *bwork, int *info)
void zgeesx(char *jobvs, char *sort, zselect1 *select, char *sense, int *n, z *a, int *lda, int *sdim, z *w, z *vs, int *ldvs, d *rconde, d *rcondv, z *work, int *lwork, d *rwork, bint *bwork, int *info)
void zgges(char *jobvsl, char *jobvsr, char *sort, zselect2 *selctg, int *n, z *a, int *lda, z *b, int *ldb, int *sdim, z *alpha, z *beta, z *vsl, int *ldvsl, z *vsr, int *ldvsr, z *work, int *lwork, d *rwork, bint *bwork, int *info)
void zggesx(char *jobvsl, char *jobvsr, char *sort, zselect2 *selctg, char *sense, int *n, z *a, int *lda, z *b, int *ldb, int *sdim, z *alpha, z *beta, z *vsl, int *ldvsl, z *vsr, int *ldvsr, d *rconde, d *rcondv, z *work, int *lwork, d *rwork, int *iwork, int *liwork, bint *bwork, int *info)'''
if __name__ == '__main__':
from sys import argv
libname, src_dir, outfile = argv[1:]
# Exclude scabs and sisnan since they aren't currently included
# in the scipy-specific ABI wrappers.
if libname.lower() == 'blas':
sigs_from_dir(src_dir, outfile, exclusions=['scabs1', 'xerbla'])
elif libname.lower() == 'lapack':
# Exclude all routines that do not have consistent interfaces from
# LAPACK 3.1.0 through 3.6.0.
# Also exclude routines with string arguments to avoid
# compatibility woes with different standards for string arguments.
# Exclude sisnan and slaneg since they aren't currently included in
# The ABI compatibility wrappers.
exclusions = ['sisnan', 'csrot', 'zdrot', 'ilaenv', 'iparmq', 'lsamen',
'xerbla', 'zcgesv', 'dlaisnan', 'slaisnan', 'dlazq3',
'dlazq4', 'slazq3', 'slazq4', 'dlasq3', 'dlasq4',
'slasq3', 'slasq4', 'dlasq5', 'slasq5', 'slaneg',
# Routines deprecated in LAPACK 3.6.0
'cgegs', 'cgegv', 'cgelsx', 'cgeqpf', 'cggsvd', 'cggsvp',
'clahrd', 'clatzm', 'ctzrqf', 'dgegs', 'dgegv', 'dgelsx',
'dgeqpf', 'dggsvd', 'dggsvp', 'dlahrd', 'dlatzm', 'dtzrqf',
'sgegs', 'sgegv', 'sgelsx', 'sgeqpf', 'sggsvd', 'sggsvp',
'slahrd', 'slatzm', 'stzrqf', 'zgegs', 'zgegv', 'zgelsx',
'zgeqpf', 'zggsvd', 'zggsvp', 'zlahrd', 'zlatzm', 'ztzrqf']
sigs_from_dir(src_dir, outfile, manual_wrappers=lapack_manual_wrappers,
exclusions=exclusions)
| [
"[email protected]"
] | |
e9b4572ab1f8e1c87a7d0030bcf82691a6a035e5 | 880103c6f9bdc9d5a7a8633c3e4d108c964e9b89 | /devil/devil/android/tools/device_monitor.py | 2b3503174c8a364463e242fa2f450a76e5b3047f | [
"BSD-3-Clause"
] | permissive | ateleshev/catapult | c3645f0fb0d4e929b5baebea33307b765225cb2f | faf60eb37f8b9828eddb30c8397b333eb1d89204 | refs/heads/master | 2021-01-22T19:08:47.140355 | 2017-03-16T01:01:54 | 2017-03-16T01:01:54 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,936 | py | #!/usr/bin/env python
# Copyright 2015 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Launches a daemon to monitor android device temperatures & status.
This script will repeatedly poll the given devices for their temperatures and
status every 60 seconds and dump the stats to file on the host.
"""
import argparse
import collections
import json
import logging
import logging.handlers
import os
import re
import socket
import sys
import time
if __name__ == '__main__':
sys.path.append(
os.path.abspath(os.path.join(os.path.dirname(__file__),
'..', '..', '..')))
from devil import devil_env
from devil.android import battery_utils
from devil.android import device_blacklist
from devil.android import device_errors
from devil.android import device_utils
# Various names of sensors used to measure cpu temp
CPU_TEMP_SENSORS = [
# most nexus devices
'tsens_tz_sensor0',
# android one
'mtktscpu',
# nexus 9
'CPU-therm',
]
DEVICE_FILE_VERSION = 1
# TODO(bpastene): Remove the old file once sysmon has been updated to read the
# new status file.
DEVICE_FILES = [
os.path.join(os.path.expanduser('~'), 'android_device_status.json'),
os.path.join(
os.path.expanduser('~'), '.android',
'%s__android_device_status.json' % socket.gethostname().split('.')[0]
),
]
MEM_INFO_REGEX = re.compile(r'.*?\:\s*(\d+)\s*kB') # ex: 'MemTotal: 185735 kB'
def get_device_status(device):
"""Polls the given device for various info.
Returns: A dict of the following format:
{
'battery': {
'level': 100,
'temperature': 123
},
'build': {
'build.id': 'ABC12D',
'product.device': 'chickenofthesea'
},
'mem': {
'avail': 1000000,
'total': 1234567,
},
'processes': 123,
'state': 'good',
'temp': {
'some_sensor': 30
},
'uptime': 1234.56,
}
"""
status = collections.defaultdict(dict)
# Battery
battery = battery_utils.BatteryUtils(device)
battery_info = battery.GetBatteryInfo()
try:
level = int(battery_info.get('level'))
except (KeyError, TypeError, ValueError):
level = None
if level and level >= 0 and level <= 100:
status['battery']['level'] = level
try:
temperature = int(battery_info.get('temperature'))
except (KeyError, TypeError, ValueError):
temperature = None
if temperature:
status['battery']['temperature'] = temperature
# Build
status['build']['build.id'] = device.build_id
status['build']['product.device'] = device.build_product
# Memory
mem_info = ''
try:
mem_info = device.ReadFile('/proc/meminfo')
except device_errors.AdbShellCommandFailedError:
logging.exception('Unable to read /proc/meminfo')
for line in mem_info.splitlines():
match = MEM_INFO_REGEX.match(line)
if match:
try:
value = int(match.group(1))
except ValueError:
continue
key = line.split(':')[0].strip()
if 'MemTotal' == key:
status['mem']['total'] = value
elif 'MemFree' == key:
status['mem']['free'] = value
# Process
try:
lines = device.RunShellCommand('ps', check_return=True)
status['processes'] = len(lines) - 1 # Ignore the header row.
except device_errors.AdbShellCommandFailedError:
logging.exception('Unable to count process list.')
# CPU Temps
# Find a thermal sensor that matches one in CPU_TEMP_SENSORS and read its
# temperature.
files = []
try:
files = device.RunShellCommand(
'grep -lE "%s" /sys/class/thermal/thermal_zone*/type' % '|'.join(
CPU_TEMP_SENSORS), check_return=True)
except device_errors.AdbShellCommandFailedError:
logging.exception('Unable to list thermal sensors.')
for f in files:
try:
sensor_name = device.ReadFile(f).strip()
temp = float(device.ReadFile(f[:-4] + 'temp').strip()) # s/type^/temp
status['temp'][sensor_name] = temp
except (device_errors.AdbShellCommandFailedError, ValueError):
logging.exception('Unable to read thermal sensor %s', f)
# Uptime
try:
uptimes = device.ReadFile('/proc/uptime').split()
status['uptime'] = float(uptimes[0]) # Take the first field (actual uptime)
except (device_errors.AdbShellCommandFailedError, ValueError):
logging.exception('Unable to read /proc/uptime')
status['state'] = 'available'
return status
def get_all_status(blacklist):
status_dict = {
'version': DEVICE_FILE_VERSION,
'devices': {},
}
healthy_devices = device_utils.DeviceUtils.HealthyDevices(blacklist)
parallel_devices = device_utils.DeviceUtils.parallel(healthy_devices)
results = parallel_devices.pMap(get_device_status).pGet(None)
status_dict['devices'] = {
device.serial: result for device, result in zip(healthy_devices, results)
}
if blacklist:
for device, reason in blacklist.Read().iteritems():
status_dict['devices'][device] = {
'state': reason.get('reason', 'blacklisted')}
status_dict['timestamp'] = time.time()
return status_dict
def main(argv):
"""Launches the device monitor.
Polls the devices for their battery and cpu temperatures and scans the
blacklist file every 60 seconds and dumps the data to DEVICE_FILE.
"""
parser = argparse.ArgumentParser(
description='Launches the device monitor.')
parser.add_argument('--adb-path', help='Path to adb binary.')
parser.add_argument('--blacklist-file', help='Path to device blacklist file.')
args = parser.parse_args(argv)
logger = logging.getLogger()
logger.setLevel(logging.DEBUG)
handler = logging.handlers.RotatingFileHandler(
'/tmp/device_monitor.log', maxBytes=10 * 1024 * 1024, backupCount=5)
fmt = logging.Formatter('%(asctime)s %(levelname)s %(message)s',
datefmt='%y%m%d %H:%M:%S')
handler.setFormatter(fmt)
logger.addHandler(handler)
devil_dynamic_config = devil_env.EmptyConfig()
if args.adb_path:
devil_dynamic_config['dependencies'].update(
devil_env.LocalConfigItem(
'adb', devil_env.GetPlatform(), args.adb_path))
devil_env.config.Initialize(configs=[devil_dynamic_config])
blacklist = (device_blacklist.Blacklist(args.blacklist_file)
if args.blacklist_file else None)
logging.info('Device monitor running with pid %d, adb: %s, blacklist: %s',
os.getpid(), args.adb_path, args.blacklist_file)
while True:
start = time.time()
status_dict = get_all_status(blacklist)
for device_file in DEVICE_FILES:
with open(device_file, 'wb') as f:
json.dump(status_dict, f, indent=2, sort_keys=True)
logging.info('Got status of all devices in %.2fs.', time.time() - start)
time.sleep(60)
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| [
"[email protected]"
] | |
54dda2fd3569a59243cb0d5b8b7dda99f42080cb | ad6ffe99c0ec781b0bb286ce5cb10ca3735bde97 | /hello-docstring.py | 6af55db124aa30b82bd510f054f9bb043640e19d | [
"MIT"
] | permissive | crunchy-devops/tp-bac-4 | db7f9ac3824bfcf7cd8b3570c6d45284be53b9d0 | 4784d9c0505ad62bcad7b28d536826126ded435c | refs/heads/master | 2023-02-26T09:18:31.182264 | 2021-02-01T09:26:27 | 2021-02-01T09:26:27 | 330,283,640 | 0 | 10 | null | null | null | null | UTF-8 | Python | false | false | 118 | py | #!/usr/bin/env python3
"""
Author: Herve Meftah <[email protected]>
Purpose: Say hello
"""
print('Hello, World!')
| [
"[email protected]"
] | |
544535a12e8aebb81a573d170387a9a53d5e9f99 | 1e21f0939d4c46db8eeca9fa8ef034ed14b7a549 | /PhotonTnP_SampleProduction/crab/tnpDatasetDef.py | 948c5aa42935bdee103b37980cbee893ae9ef5e9 | [] | no_license | Ming-Yan/photonTnp | 4e46286998d4e2806e423e2e27893c0a8675494f | 5468bea3eff51b21eed2701cda4f3e5d2ad9e6bf | refs/heads/master | 2021-10-08T20:33:55.910375 | 2018-10-22T09:12:26 | 2018-10-22T09:12:26 | 162,109,988 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,961 | py | datasetsForTnP = [
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_DYToLL_madgraph_Spring16_reHLT', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_DYToLL_madgraph_Spring16_reHLT.root', 'lumi': -1, 'nEvts': -1},
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'SingleElectron_2016rereco_RunB', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/data//TnPTree_SingleElectron_2016rereco_RunB.root', 'lumi': 5.767, 'nEvts': -1},
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'SingleElectron_2016rereco_RunC', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/data//TnPTree_SingleElectron_2016rereco_RunC.root', 'lumi': 2.646, 'nEvts': -1},
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'SingleElectron_2016rereco_RunD', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/data//TnPTree_SingleElectron_2016rereco_RunD.root', 'lumi': 4.353, 'nEvts': -1},
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'SingleElectron_2016rereco_RunE', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/data//TnPTree_SingleElectron_2016rereco_RunE.root', 'lumi': 3.985, 'nEvts': -1},
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'SingleElectron_2016rereco_RunF', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/data//TnPTree_SingleElectron_2016rereco_RunF.root', 'lumi': 3.160, 'nEvts': -1},
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'SingleElectron_2016rereco_RunG', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/data//TnPTree_SingleElectron_2016rereco_RunG.root', 'lumi': 7.539, 'nEvts': -1},
{'lumiProcessedFile': None, 'campaign': 'crab_project_Moriond17_v1', 'dataset': 'SingleElectron_2016prompt_RunH', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/Moriond17_v1/data//TnPTree_SingleElectron_2016prompt_RunH.root', 'lumi': 8.762, 'nEvts': -1},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_DYToLL_madgraph_Winter2017//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_DYToLL_madgraph_Winter2017', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_DYToLL_madgraph_Winter2017.root', 'lumi': -1, 'nEvts': 49748967},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20_vRECO/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016rereco_RunB//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016rereco_RunB', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016rereco_RunB.root', 'lumi': 5.899, 'nEvts': 238592033},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016rereco_RunC//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016rereco_RunC', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016rereco_RunC.root', 'lumi': 2.646, 'nEvts': 93326652},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016rereco_RunD//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016rereco_RunD', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016rereco_RunD.root', 'lumi': 4.353, 'nEvts': 146480008},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20_vRECO/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016rereco_RunE//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016rereco_RunE', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016rereco_RunE.root', 'lumi': 4.050, 'nEvts': 113169852},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016rereco_RunF//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016rereco_RunF', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016rereco_RunF.root', 'lumi': 3.160, 'nEvts': 70143321},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20_vRECO/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016rereco_RunG//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016rereco_RunG', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016rereco_RunG.root', 'lumi': 7.391, 'nEvts': 148781520},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20_vRECO/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016prompt_RunHv2//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016prompt_RunH', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016prompt_RunH.root', 'lumi': 8.762, 'nEvts': 123900510},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016rereco_RunB//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016rereco_RunB', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016rereco_RunB.root', 'lumi': 5.349, 'nEvts': 235522176},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016rereco_RunC//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016rereco_RunC', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016rereco_RunC.root', 'lumi': 2.363, 'nEvts': 92067646},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016rereco_RunD//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016rereco_RunD', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016rereco_RunD.root', 'lumi': 4.256, 'nEvts': 146495223},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016rereco_RunE//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016rereco_RunE', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016rereco_RunE.root', 'lumi': 3.981, 'nEvts': 111208237},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016rereco_RunF//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016rereco_RunF', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016rereco_RunF.root', 'lumi': 3.105, 'nEvts': 70143321},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016rereco_RunG//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016rereco_RunG', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016rereco_RunG.root', 'lumi': 7.544, 'nEvts': 152098617},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016prompt_RunH//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016prompt_RunH', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016prompt_RunH.root', 'lumi': 6.105, 'nEvts': 89863172},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v1/crab_DYToLL_madgraph//results/processedLumis.json', 'campaign': 'crab_projects_v1', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v1/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_DYToLL_madgraph.root', 'lumi': -1, 'nEvts': 33584160},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016prompt_RunB//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016prompt_RunB', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016prompt_RunB.root', 'lumi': -1, 'nEvts': 7384544},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016prompt_RunC//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016prompt_RunC', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016prompt_RunC.root', 'lumi': -1, 'nEvts': 79103372},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016prompt_RunD//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016prompt_RunD', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016prompt_RunD.root', 'lumi': -1, 'nEvts': 124968333},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016prompt_RunF//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016prompt_RunF', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016prompt_RunF.root', 'lumi': -1, 'nEvts': 64744869},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v2/crab_2016prompt_RunG//results/processedLumis.json', 'campaign': 'crab_projects_v2', 'dataset': 'SingleElectron_2016prompt_RunG', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v2/data//TnPTree_SingleElectron_2016prompt_RunG.root', 'lumi': -1, 'nEvts': 138296792},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_DYToLL_madgraph//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_DYToLL_madgraph', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8_DYToLL_madgraph.root', 'lumi': -1, 'nEvts': 44983870},
{'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOSFs_2016/crab_2016rereco_RunB//results/processedLumis.json', 'campaign': 'crab_projects_RECOSFs_2016', 'dataset': 'SingleElectron_2016rereco_RunB', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOSFs_2016/data//TnPTree_SingleElectron_2016rereco_RunB.root', 'lumi': -1, 'nEvts': 116256313},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v1/crab_2016rereco_RunB//results/processedLumis.json', 'campaign': 'crab_projects_v1', 'dataset': 'SingleElectron_2016rereco_RunB', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v1/data//TnPTree_SingleElectron_2016rereco_RunB.root', 'lumi': 5.364, 'nEvts': 236233675},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v1/crab_2016rereco_RunC//results/processedLumis.json', 'campaign': 'crab_projects_v1', 'dataset': 'SingleElectron_2016rereco_RunC', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v1/data//TnPTree_SingleElectron_2016rereco_RunC.root', 'lumi': 1.810, 'nEvts': 70759545},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v1/crab_2016rereco_RunD//results/processedLumis.json', 'campaign': 'crab_projects_v1', 'dataset': 'SingleElectron_2016rereco_RunD', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v1/data//TnPTree_SingleElectron_2016rereco_RunD.root', 'lumi': 4.241, 'nEvts': 145990095},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v1/crab_2016rereco_RunE//results/processedLumis.json', 'campaign': 'crab_projects_v1', 'dataset': 'SingleElectron_2016rereco_RunE', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v1/data//TnPTree_SingleElectron_2016rereco_RunE.root', 'lumi': 4.054, 'nEvts': 113169852},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v1/crab_2016rereco_RunF//results/processedLumis.json', 'campaign': 'crab_projects_v1', 'dataset': 'SingleElectron_2016rereco_RunF', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v1/data//TnPTree_SingleElectron_2016rereco_RunF.root', 'lumi': 3.085, 'nEvts': 69673589},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_20/src/PhysicsTools/TagAndProbe/crab/crab_projects_v1/crab_2016rereco_RunG//results/processedLumis.json', 'campaign': 'crab_projects_v1', 'dataset': 'SingleElectron_2016rereco_RunG', 'file': '/store/group/phys_egamma/tnp/80X/PhoEleIDs/v1/data//TnPTree_SingleElectron_2016rereco_RunG.root', 'lumi': 7.521, 'nEvts': 151654044},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_elev2/crab_2016_RunB/results/processedLumis.json', 'campaign': 'crab_projects_elev2', 'dataset': 'SingleElectron_2016_RunB', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/elev2/data//TnPTree_SingleElectron_2016_RunB.root', 'lumi': 5.657, 'nEvts': 228669688},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_13_patch1/src/PhysicsTools/TagAndProbe/crab/crab_projects_elev2/crab_2016_RunC//results/processedLumis.json', 'campaign': 'crab_projects_elev2', 'dataset': 'SingleElectron_2016_RunC', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/elev2/data//TnPTree_SingleElectron_2016_RunC.root', 'lumi': 1.761, 'nEvts': 63419101},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_elev2/crab_DYToLL_madgraph/results/processedLumis.json', 'campaign': 'crab_projects_elev2', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/elev2/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8.root', 'lumi': -1, 'nEvts': 36311064},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_elev2/crab_DYToLL_mcAtNLO//processedLumis.json', 'campaign': 'crab_projects_elev2', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/elev2/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8.root', 'lumi': -1, 'nEvts': 28696958},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_elev2/crab_WJets_madgraph//results/processedLumis.json', 'campaign': 'crab_projects_elev2', 'dataset': 'WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/elev2/mc//TnPTree_WJetsToLNu_TuneCUETP8M1_13TeV-madgraphMLM-pythia8.root', 'lumi': -1, 'nEvts': 24908024},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_phov2/crab_2016_RunB//results/processedLumis.json', 'campaign': 'crab_projects_phov2', 'dataset': 'SingleElectron', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/phov2/data//TnPTree_SingleElectron.root', 'lumi': 5.848, 'nEvts': 236203597},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_phov2/crab_DYToLL_madgraph//results/processedLumis.json', 'campaign': 'crab_projects_phov2', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/phov2/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8.root', 'lumi': -1, 'nEvts': 41253879},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_phov2/crab_DYToLL_mcAtNLO//results/processedLumis.json', 'campaign': 'crab_projects_phov2', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/Photons_76Xids/phov2/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8.root', 'lumi': -1, 'nEvts': 28696958},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOv3/crab_2016_RunB//results/processedLumis.json', 'campaign': 'crab_projects_RECOv3', 'dataset': 'SingleElectron_2016_RunB', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOv3/data//TnPTree_SingleElectron_2016_RunB.root', 'lumi': 5.401, 'nEvts': 211415403},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/CMSSW_8_0_13_patch1/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOv3/crab_2016_RunC//results/processedLumis.json', 'campaign': 'crab_projects_RECOv3', 'dataset': 'SingleElectron_2016_RunC', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOv3/data//TnPTree_SingleElectron_2016_RunC.root', 'lumi': 1.730, 'nEvts': 62332526},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOv3/crab_DYToLL_madgraph//results/processedLumis.json', 'campaign': 'crab_projects_RECOv3', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOv3/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-madgraphMLM-pythia8.root', 'lumi': -1, 'nEvts': 49877138},
# {'lumiProcessedFile': '/afs/cern.ch/work/f/fcouderc/public/EGamma/TnP/new/CMSSW_8_0_10/src/PhysicsTools/TagAndProbe/crab/crab_projects_RECOv3/crab_DYToLL_mcAtNLO//results/processedLumis.json', 'campaign': 'crab_projects_RECOv3', 'dataset': 'DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8', 'file': '/store/group/phys_egamma/tnp/80X/RecoSF/RECOv3/mc//TnPTree_DYJetsToLL_M-50_TuneCUETP8M1_13TeV-amcatnloFXFX-pythia8.root', 'lumi': -1, 'nEvts': 28328619}
]
| [
"[email protected]"
] | |
4bfb6408fe3122c020282667a4a2da27d9bce309 | ea2cf796332879d86561f80882da93b672966448 | /configs/csl/rotated_retinanet_obb_csl_gaussian_r50_adamw_fpn_1x_dior_le90.py | 06b08881da2ee813a6c9b31343d7fc13168ee2d2 | [
"Apache-2.0"
] | permissive | yangxue0827/h2rbox-mmrotate | 968c34adf22eca073ab147b670226884ea80ac61 | cfd7f1fef6ae4d4e17cb891d1ec144ece8b5d7f5 | refs/heads/main | 2023-05-23T10:02:58.344148 | 2023-02-14T05:28:38 | 2023-02-14T05:28:38 | 501,580,810 | 68 | 8 | null | null | null | null | UTF-8 | Python | false | false | 669 | py | _base_ = \
['../rotated_retinanet/rotated_retinanet_hbb_r50_adamw_fpn_1x_dior_oc.py']
angle_version = 'le90'
model = dict(
bbox_head=dict(
type='CSLRRetinaHead',
angle_coder=dict(
type='CSLCoder',
angle_version=angle_version,
omega=4,
window='gaussian',
radius=3),
loss_cls=dict(
type='FocalLoss',
use_sigmoid=True,
gamma=2.0,
alpha=0.25,
loss_weight=1.0),
loss_bbox=dict(type='L1Loss', loss_weight=1.0),
loss_angle=dict(
type='SmoothFocalLoss', gamma=2.0, alpha=0.25, loss_weight=0.8)))
| [
"[email protected]"
] | |
628776070784ddd0d523624b6c9462eea95ff6bf | 0d8486c1d55c40bebea7c5428930f18165d2d0e9 | /tests/asp/AllAnswerSets/aggregates/count.example4.test.py | 76a4479b403d3b6ffc18810437b0f4bc40d563f8 | [
"Apache-2.0"
] | permissive | bernardocuteri/wasp | 6f81bf6aa8fb273c91bbf68ecce4ecb195a55953 | 05c8f961776dbdbf7afbf905ee00fc262eba51ad | refs/heads/master | 2021-06-08T11:58:25.080818 | 2020-10-05T16:57:37 | 2020-10-05T16:57:37 | 124,245,808 | 0 | 0 | Apache-2.0 | 2018-03-07T14:13:16 | 2018-03-07T14:13:16 | null | UTF-8 | Python | false | false | 625 | py | input = """
1 2 0 0
1 3 0 0
1 4 0 0
1 5 0 0
1 6 2 1 7 8
1 7 2 1 6 8
1 8 0 0
1 9 2 1 10 11
1 10 2 1 9 11
1 11 0 0
1 12 2 1 13 14
1 13 2 1 12 14
1 14 0 0
1 15 2 1 16 17
1 16 2 1 15 17
1 17 0 0
2 18 2 0 2 12 6
1 1 1 0 18
2 19 2 0 2 15 9
1 1 1 0 19
1 20 1 0 15
1 20 1 0 12
1 21 1 0 9
1 21 1 0 6
1 1 1 1 21
1 1 1 1 20
0
20 ad(a)
21 ad(b)
6 a(b,2)
9 a(b,1)
12 a(a,2)
15 a(a,1)
4 c(a)
5 c(b)
7 na(b,2)
10 na(b,1)
13 na(a,2)
16 na(a,1)
2 b(1)
3 b(2)
0
B+
0
B-
1
0
1
"""
output = """
{b(1), b(2), c(a), c(b), a(b,2), na(b,1), na(a,2), a(a,1), ad(a), ad(b)}
{b(1), b(2), c(a), c(b), na(b,2), a(b,1), a(a,2), na(a,1), ad(a), ad(b)}
"""
| [
"[email protected]"
] | |
f79c9ac3da69afb6f18aca5cfd8be28254cb7292 | 811b67fca9efd7b6a2b95500dfdfbd981a2be9a7 | /Machine Learning For Finance/Lesson5_2.py | c7d259b4d24c65ca989844a257248ee28f058f98 | [] | no_license | inaheaven/Finance_Tool | a978ae534dca646088a12b58e00a8ce995b08920 | adeaf05307dc1d4af002bea3d39c3273e93af347 | refs/heads/master | 2020-05-23T13:41:33.912276 | 2019-07-03T02:06:28 | 2019-07-03T02:06:28 | 186,781,682 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,658 | py | import pandas as pd
import matplotlib.pyplot as plt
import os
def symbol_to_path(symbol, base_dir="Data/data"):
return os.path.join(base_dir, "{}.csv".format(str(symbol)))
def get_data(symbols, dates):
df = pd.DataFrame(index=dates)
if 'SPY' not in symbols:
symbols.insert(0, 'SPY')
for symbol in symbols:
df_tmp = pd.read_csv(symbol_to_path(symbol), usecols=['Date', 'Adj Close'], index_col='Date', parse_dates=True,
na_values=['NaN'])
df_tmp = df_tmp.rename(columns={'Adj Close': symbol})
df = df.join(df_tmp)
df = df.dropna(subset=['SPY'])
print(df)
return df
def normalize_data(df):
return df / df.ix[0, :]
def plot_data(df, title="STOCK PRICE"):
ax = df.plot(title=title, fontsize=12)
ax.set_xlabel("Date")
ax.set_ylabel("Price")
plt.show()
def plot_selected(df, columns, start_index, end_index):
plot_data(df.ix[start_index: end_index, columns], title="STOCK PRICE")
def test_run():
dates = pd.date_range('2012-01-01', '2012-12-31')
symbols = ['SPY']
df = get_data(symbols, dates)
print("df", df)
# df = normalize_data(df)
# plot_selected(df, ['GOOG', 'SPY', 'IBM', 'GLD'], '2010-01-01', '2010-05-01')
# print("MEAN", df.mean())
# print("MEDIAN", df.median())
# print("STD", df.std())
ax = df['SPY'].plot(title="SPY ROLLING MEAN", label='SPY')
rm_SPY = df['SPY'].rolling(20).mean()
rm_SPY.plot(label="Rolling mean", ax = ax)
ax.set_xlabel("DATE")
ax.set_ylabel("PRICE")
ax.legend(loc="upper left")
plt.show()
if __name__ == '__main__':
test_run() | [
"[email protected]"
] | |
a93164796eaa571c517dc3a2993e7377c297e581 | faa54203033398d264c75814b899d253edf71c9b | /pyflux/gas/__init__.py | dfb1ab71874ada0b2da30c57785c375b3036e9ae | [
"BSD-3-Clause"
] | permissive | th3nolo/pyflux | 4a9e646f9ee0e650676b82134d3810c0a98d8963 | 21bc18ddeabce277e4485e75962e5fa5ff3a46ea | refs/heads/master | 2020-12-02T15:08:53.257900 | 2016-07-24T17:47:59 | 2016-07-24T17:47:59 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | from .gas import GAS
from .gasx import GASX
from .gasmodels import GASPoisson, GASNormal, GASLaplace, GASt, GASSkewt, GASExponential
from .gasllm import GASLLEV
from .gasllt import GASLLT
from .gasreg import GASReg
from .scores import BetatScore, SkewtScore | [
"[email protected]"
] | |
8dc0a87dd10e8d1d8503e312210b327d6098d695 | 6ab31b5f3a5f26d4d534abc4b197fe469a68e8e5 | /katas/beta/only_readable_once_list.py | b075f9fe98d07c99e321b7906fe37c77f51fe6d7 | [
"MIT"
] | permissive | mveselov/CodeWars | e4259194bfa018299906f42cd02b8ef4e5ab6caa | 1eafd1247d60955a5dfb63e4882e8ce86019f43a | refs/heads/master | 2021-06-09T04:17:10.053324 | 2017-01-08T06:36:17 | 2017-01-08T06:36:17 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 377 | py | class SecureList(object):
def __init__(self, lst):
self.lst = list(lst)
def __getitem__(self, item):
return self.lst.pop(item)
def __len__(self):
return len(self.lst)
def __repr__(self):
tmp, self.lst = self.lst, []
return repr(tmp)
def __str__(self):
tmp, self.lst = self.lst, []
return str(tmp)
| [
"[email protected]"
] | |
905b497b63c36d0df8721fcbe09be8f5bcd07c97 | 7e409f6490957f96a1ea17161a3b791256a2ec4f | /31 - Form Field dan Option/mywebsite/forms.py | 0019079b233c0b91fd26b93c04438c8e64622c04 | [] | no_license | CuteCatCrying/Django | 9fb8fd06f4793ef754e6e3dfd63b9caad03317f8 | 563119a66c81bf57616f62855bc78f448204ba83 | refs/heads/master | 2022-04-02T14:13:23.700165 | 2020-02-05T09:34:46 | 2020-02-05T09:34:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,360 | py | from django import forms
class FormField(forms.Form):
# python data type
integer_field = forms.IntegerField(required=False)
decimal_field = forms.DecimalField(required=False)
float_field = forms.FloatField(required=False)
boolean_field = forms.BooleanField(required=False)
char_field = forms.CharField(max_length=10, required=False)
# string input
email_field = forms.EmailField(required=False)
regex_field = forms.RegexField(regex=r'(P?<test>)')
slug_field = forms.SlugField()
url_field = forms.URLField()
ip_field = forms.GenericIPAddressField()
# select input
PILIHAN = (
('nilai1', 'Pilihan1'),
('nilai2', 'Pilihan2'),
('nilai3', 'Pilihan3'),
)
choice_field = forms.ChoiceField(choices=PILIHAN)
multi_choice_field = forms.MultipleChoiceField(choices=PILIHAN)
multi_typed_field = forms.TypedMultipleChoiceField(choices=PILIHAN)
null_boolean_field = forms.NullBooleanField()
# date time
date_field = forms.DateField()
datetime_field = forms.DateTimeField()
duration_field = forms.DurationField()
time_field = forms.TimeField()
splidatetime_field = forms.SplitDateTimeField()
# file input
file_field = forms.FileField()
image_field = forms.ImageField() | [
"[email protected]"
] | |
38927db265890ba6187601389420bb4787358329 | 5aef1c2397b96a352f26a9bc3c280bd69171da4c | /Environments/django-env/lib/python3.6/warnings.py | 9425815c0fab76c7114648e38564d2b1a5e2efcd | [] | no_license | FRANKLIU90/Python | ed01cb6aa31c313cdcbb1b514df5db830d0090d3 | b6066bde6e452c5463a4680a688d5628f4a1b898 | refs/heads/master | 2020-04-01T03:08:39.765409 | 2018-10-12T22:44:47 | 2018-10-12T22:44:47 | 152,811,094 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53 | py | /Users/frankyoung/anaconda3/lib/python3.6/warnings.py | [
"[email protected]"
] | |
ebe79df1ad52ea7869c585362ad2a6af388c05ff | eb4877802021fa9f20962a7cfb176239dfb1e169 | /py/testdir_single_jvm/test_GBMGrid_basic.py | bc789ffa818ccfead0bc492810e7f8731686b00e | [
"Apache-2.0"
] | permissive | jinbochen/h2o | bd6f2b937884a6c51ccd5673310c64d6a9e1839b | 48a5196cc790ed46f7c4a556258f8d2aeb7eb1c1 | refs/heads/master | 2021-01-17T23:33:42.765997 | 2013-11-14T20:06:23 | 2013-11-14T20:08:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,703 | py | import unittest, time, sys
sys.path.extend(['.','..','py'])
import h2o, h2o_cmd, h2o_glm, h2o_hosts, h2o_import as h2i, h2o_jobs
DO_CLASSIFICATION = True
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global localhost
localhost = h2o.decide_if_localhost()
if (localhost):
h2o.build_cloud(1)
else:
h2o_hosts.build_cloud_with_hosts(1)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_GBMGrid_basic_benign(self):
csvFilename = "benign.csv"
print "\nStarting", csvFilename
csvPathname = 'logreg/' + csvFilename
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, hex_key=csvFilename + ".hex", schema='put')
# columns start at 0
# cols 0-13. 3 is output
# no member id in this one
# fails with n_folds
print "Not doing n_folds with benign. Fails with 'unable to solve?'"
# check the first in the models list. It should be the best
colNames = [ 'STR','OBS','AGMT','FNDX','HIGD','DEG','CHK', 'AGP1','AGMN','NLV','LIV','WT','AGLP','MST' ]
modelKey = 'GBMGrid_benign'
# 'cols', 'ignored_cols_by_name', and 'ignored_cols' have to be exclusive
params = {
'destination_key': modelKey,
'ignored_cols_by_name': 'STR',
'learn_rate': .1,
'ntrees': 2,
'max_depth': 8,
'min_rows': 1,
'response': 'FNDX',
'classification': 1 if DO_CLASSIFICATION else 0,
}
kwargs = params.copy()
timeoutSecs = 1800
start = time.time()
GBMFirstResult = h2o_cmd.runGBM(parseResult=parseResult, noPoll=True,**kwargs)
print "\nGBMFirstResult:", h2o.dump_json(GBMFirstResult)
# no pattern waits for all
h2o_jobs.pollWaitJobs(pattern=None, timeoutSecs=300, pollTimeoutSecs=10, retryDelaySecs=5)
elapsed = time.time() - start
print "GBM training completed in", elapsed, "seconds."
gbmTrainView = h2o_cmd.runGBMView(model_key=modelKey)
# errrs from end of list? is that the last tree?
errsLast = gbmTrainView['gbm_model']['errs'][-1]
print "GBM 'errsLast'", errsLast
if DO_CLASSIFICATION:
cm = gbmTrainView['gbm_model']['cm']
pctWrongTrain = h2o_gbm.pp_cm_summary(cm);
print "Last line of this cm might be NAs, not CM"
print "\nTrain\n==========\n"
print h2o_gbm.pp_cm(cm)
else:
print "GBMTrainView:", h2o.dump_json(gbmTrainView['gbm_model']['errs'])
def test_GBMGrid_basic_prostate(self):
csvFilename = "prostate.csv"
print "\nStarting", csvFilename
# columns start at 0
csvPathname = 'logreg/' + csvFilename
parseResult = h2i.import_parse(bucket='smalldata', path=csvPathname, hex_key=csvFilename + ".hex", schema='put')
colNames = ['ID','CAPSULE','AGE','RACE','DPROS','DCAPS','PSA','VOL','GLEASON']
modelKey = 'GBMGrid_prostate'
# 'cols', 'ignored_cols_by_name', and 'ignored_cols' have to be exclusive
params = {
'destination_key': modelKey,
'ignored_cols_by_name': 'ID',
'learn_rate': .1,
'ntrees': 2,
'max_depth': 8,
'min_rows': 1,
'response': 'CAPSULE',
'classification': 1 if DO_CLASSIFICATION else 0,
}
kwargs = params.copy()
timeoutSecs = 1800
start = time.time()
GBMFirstResult = h2o_cmd.runGBM(parseResult=parseResult, noPoll=True,**kwargs)
print "\nGBMFirstResult:", h2o.dump_json(GBMFirstResult)
# no pattern waits for all
h2o_jobs.pollWaitJobs(pattern=None, timeoutSecs=300, pollTimeoutSecs=10, retryDelaySecs=5)
elapsed = time.time() - start
print "GBM training completed in", elapsed, "seconds."
gbmTrainView = h2o_cmd.runGBMView(model_key=modelKey)
# errrs from end of list? is that the last tree?
errsLast = gbmTrainView['gbm_model']['errs'][-1]
print "GBM 'errsLast'", errsLast
if DO_CLASSIFICATION:
cm = gbmTrainView['gbm_model']['cm']
pctWrongTrain = h2o_gbm.pp_cm_summary(cm);
print "Last line of this cm might be NAs, not CM"
print "\nTrain\n==========\n"
print h2o_gbm.pp_cm(cm)
else:
print "GBMTrainView:", h2o.dump_json(gbmTrainView['gbm_model']['errs'])
if __name__ == '__main__':
h2o.unit_main()
| [
"[email protected]"
] | |
f9c8be44958d2e203e046c76d6dfb7572522327f | 1c59524a45a5859d1bff39f83f7b4e6b2f9fdfbb | /lib/mAP/mAP.py | 78f5e3d6d62f015872801f3a79c328360dc00c04 | [] | no_license | fendaq/Arithmetic_Func_detection_for_CTPN | d62087547e863f22df4c219ddd616ced4103a42b | 2bf6e05cd706189918ef892666d151894a049fad | refs/heads/master | 2020-03-30T04:17:10.971584 | 2018-09-28T09:48:27 | 2018-09-28T09:48:27 | 150,734,626 | 2 | 0 | null | 2018-09-28T12:05:15 | 2018-09-28T12:05:15 | null | UTF-8 | Python | false | false | 26,963 | py | import glob
import json
import os
import shutil
import operator
import sys
import argparse
MINOVERLAP = 0.5 # default value (defined in the PASCAL VOC2012 challenge)
parser = argparse.ArgumentParser()
parser.add_argument('-na', '--no-animation', help="no animation is shown.", action="store_true")
parser.add_argument('-np', '--no-plot', help="no plot is shown.", action="store_true")
parser.add_argument('-q', '--quiet', help="minimalistic console output.", action="store_true")
# argparse receiving list of classes to be ignored
parser.add_argument('-i', '--ignore', nargs='+', type=str, help="ignore a list of classes.")
# argparse receiving list of classes with specific IoU
parser.add_argument('--set-class-iou', nargs='+', type=str, help="set IoU for a specific class.")
args = parser.parse_args()
# if there are no classes to ignore then replace None by empty list
if args.ignore is None:
args.ignore = []
specific_iou_flagged = False
if args.set_class_iou is not None:
specific_iou_flagged = True
# if there are no images then no animation can be shown
img_path = 'data/val_img'
if os.path.exists(img_path):
for dirpath, dirnames, files in os.walk(img_path):
if not files:
# no image files found
args.no_animation = True
else:
args.no_animation = True
# try to import OpenCV if the user didn't choose the option --no-animation
show_animation = False
if not args.no_animation:
try:
import cv2
show_animation = True
except ImportError:
print("\"opencv-python\" not found, please install to visualize the results.")
args.no_animation = True
# try to import Matplotlib if the user didn't choose the option --no-plot
draw_plot = False
if not args.no_plot:
try:
import matplotlib.pyplot as plt
draw_plot = True
except ImportError:
print("\"matplotlib\" not found, please install it to get the resulting plots.")
args.no_plot = True
"""
throw error and exit
"""
def error(msg):
print(msg)
sys.exit(0)
"""
check if the number is a float between 0.0 and 1.0
"""
def is_float_between_0_and_1(value):
try:
val = float(value)
if val > 0.0 and val < 1.0:
return True
else:
return False
except ValueError:
return False
"""
Calculate the AP given the recall and precision array
1st) We compute a version of the measured precision/recall curve with
precision monotonically decreasing
2nd) We compute the AP as the area under this curve by numerical integration.
"""
def voc_ap(rec, prec):
"""
--- Official matlab code VOC2012---
mrec=[0 ; rec ; 1];
mpre=[0 ; prec ; 0];
for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
end
i=find(mrec(2:end)~=mrec(1:end-1))+1;
ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
rec.insert(0, 0.0) # insert 0.0 at begining of list
rec.append(1.0) # insert 1.0 at end of list
mrec = rec[:]
prec.insert(0, 0.0) # insert 0.0 at begining of list
prec.append(0.0) # insert 0.0 at end of list
mpre = prec[:]
"""
This part makes the precision monotonically decreasing
(goes from the end to the beginning)
matlab: for i=numel(mpre)-1:-1:1
mpre(i)=max(mpre(i),mpre(i+1));
"""
# matlab indexes start in 1 but python in 0, so I have to do:
# range(start=(len(mpre) - 2), end=0, step=-1)
# also the python function range excludes the end, resulting in:
# range(start=(len(mpre) - 2), end=-1, step=-1)
for i in range(len(mpre)-2, -1, -1):
mpre[i] = max(mpre[i], mpre[i+1])
"""
This part creates a list of indexes where the recall changes
matlab: i=find(mrec(2:end)~=mrec(1:end-1))+1;
"""
i_list = []
for i in range(1, len(mrec)):
if mrec[i] != mrec[i-1]:
i_list.append(i) # if it was matlab would be i + 1
"""
The Average Precision (AP) is the area under the curve
(numerical integration)
matlab: ap=sum((mrec(i)-mrec(i-1)).*mpre(i));
"""
ap = 0.0
for i in i_list:
ap += ((mrec[i]-mrec[i-1])*mpre[i])
return ap, mrec, mpre
"""
Convert the lines of a file to a list
"""
def file_lines_to_list(path):
# open txt file lines to a list
with open(path) as f:
content = f.readlines()
# remove whitespace characters like `\n` at the end of each line
content = [x.strip() for x in content]
return content
"""
Draws text in image
"""
def draw_text_in_image(img, text, pos, color, line_width):
font = cv2.FONT_HERSHEY_PLAIN
fontScale = 1
lineType = 1
bottomLeftCornerOfText = pos
cv2.putText(img, text,
bottomLeftCornerOfText,
font,
fontScale,
color,
lineType)
text_width, _ = cv2.getTextSize(text, font, fontScale, lineType)[0]
return img, (line_width + text_width)
"""
Plot - adjust axes
"""
def adjust_axes(r, t, fig, axes):
# get text width for re-scaling
bb = t.get_window_extent(renderer=r)
text_width_inches = bb.width / fig.dpi
# get axis width in inches
current_fig_width = fig.get_figwidth()
new_fig_width = current_fig_width + text_width_inches
propotion = new_fig_width / current_fig_width
# get axis limit
x_lim = axes.get_xlim()
axes.set_xlim([x_lim[0], x_lim[1]*propotion])
"""
Draw plot using Matplotlib
"""
def draw_plot_func(dictionary, n_classes, window_title, plot_title, x_label, output_path, to_show, plot_color, true_p_bar):
# sort the dictionary by decreasing value, into a list of tuples
sorted_dic_by_value = sorted(dictionary.items(), key=operator.itemgetter(1))
# unpacking the list of tuples into two lists
sorted_keys, sorted_values = zip(*sorted_dic_by_value)
#
if true_p_bar != "":
"""
Special case to draw in (green=true predictions) & (red=false predictions)
"""
fp_sorted = []
tp_sorted = []
for key in sorted_keys:
fp_sorted.append(dictionary[key] - true_p_bar[key])
tp_sorted.append(true_p_bar[key])
plt.barh(range(n_classes), fp_sorted, align='center', color='crimson', label='False Predictions')
plt.barh(range(n_classes), tp_sorted, align='center', color='forestgreen', label='True Predictions', left=fp_sorted)
# add legend
plt.legend(loc='lower right')
"""
Write number on side of bar
"""
fig = plt.gcf() # gcf - get current figure
axes = plt.gca()
r = fig.canvas.get_renderer()
for i, val in enumerate(sorted_values):
fp_val = fp_sorted[i]
tp_val = tp_sorted[i]
fp_str_val = " " + str(fp_val)
tp_str_val = fp_str_val + " " + str(tp_val)
# trick to paint multicolor with offset:
# first paint everything and then repaint the first number
t = plt.text(val, i, tp_str_val, color='forestgreen', va='center', fontweight='bold')
plt.text(val, i, fp_str_val, color='crimson', va='center', fontweight='bold')
if i == (len(sorted_values)-1): # largest bar
adjust_axes(r, t, fig, axes)
else:
plt.barh(range(n_classes), sorted_values, color=plot_color)
"""
Write number on side of bar
"""
fig = plt.gcf() # gcf - get current figure
axes = plt.gca()
r = fig.canvas.get_renderer()
for i, val in enumerate(sorted_values):
str_val = " " + str(val) # add a space before
if val < 1.0:
str_val = " {0:.2f}".format(val)
t = plt.text(val, i, str_val, color=plot_color, va='center', fontweight='bold')
# re-set axes to show number inside the figure
if i == (len(sorted_values)-1): # largest bar
adjust_axes(r, t, fig, axes)
# set window title
fig.canvas.set_window_title(window_title)
# write classes in y axis
tick_font_size = 12
plt.yticks(range(n_classes), sorted_keys, fontsize=tick_font_size)
"""
Re-scale height accordingly
"""
init_height = fig.get_figheight()
# comput the matrix height in points and inches
dpi = fig.dpi
height_pt = n_classes * (tick_font_size * 1.4) # 1.4 (some spacing)
height_in = height_pt / dpi
# compute the required figure height
top_margin = 0.15 # in percentage of the figure height
bottom_margin = 0.05 # in percentage of the figure height
figure_height = height_in / (1 - top_margin - bottom_margin)
# set new height
if figure_height > init_height:
fig.set_figheight(figure_height)
# set plot title
plt.title(plot_title, fontsize=14)
# set axis titles
# plt.xlabel('classes')
plt.xlabel(x_label, fontsize='large')
# adjust size of window
fig.tight_layout()
# save the plot
fig.savefig(output_path)
# show image
if to_show:
plt.show()
# close the plot
plt.close()
"""
Create a "tmp_files/" and "results/" directory
"""
tmp_files_path = "data/mAP/tmp_files"
if not os.path.exists(tmp_files_path): # if it doesn't exist already
os.makedirs(tmp_files_path)
results_files_path = "data/mAP/results"
if os.path.exists(results_files_path): # if it exist already
# reset the results directory
shutil.rmtree(results_files_path)
os.makedirs(results_files_path)
if draw_plot:
os.makedirs(results_files_path + "/classes")
if show_animation:
os.makedirs(results_files_path + "/images")
"""
Ground-Truth
Load each of the ground-truth files into a temporary ".json" file.
Create a list of all the class names present in the ground-truth (gt_classes).
"""
# get a list with the ground-truth files
ground_truth_files_list = glob.glob('data/mAP/ground_truth/*.txt')
if len(ground_truth_files_list) == 0:
error("Error: No ground-truth files found!")
ground_truth_files_list.sort()
# dictionary with counter per class
gt_counter_per_class = {}
for txt_file in ground_truth_files_list:
#print(txt_file)
file_id = txt_file.split(".txt",1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
# check if there is a correspondent predicted objects file
if not os.path.exists('data/mAP/predicted/' + file_id + ".txt"):
error_msg = "Error. File not found: predicted/" + file_id + ".txt\n"
error_msg += "(You can avoid this error message by running extra/intersect-gt-and-pred.py)"
error(error_msg)
lines_list = file_lines_to_list(txt_file)
# create ground-truth dictionary
bounding_boxes = []
is_difficult = False
for line in lines_list:
try:
if "difficult" in line:
class_name, left, top, right, bottom, _difficult = line.split()
is_difficult = True
else:
class_name, left, top, right, bottom = line.split()
except ValueError:
error_msg = "Error: File " + txt_file + " in the wrong format.\n"
error_msg += " Expected: <class_name> <left> <top> <right> <bottom> ['difficult']\n"
error_msg += " Received: " + line
error_msg += "\n\nIf you have a <class_name> with spaces between words you should remove them\n"
error_msg += "by running the script \"remove_space.py\" or \"rename_class.py\" in the \"extra/\" folder."
error(error_msg)
# check if class is in the ignore list, if yes skip
if class_name in args.ignore:
continue
bbox = left + " " + top + " " + right + " " +bottom
if is_difficult:
bounding_boxes.append({"class_name":class_name, "bbox":bbox, "used":False, "difficult":True})
is_difficult = False
else:
bounding_boxes.append({"class_name":class_name, "bbox":bbox, "used":False})
# count that object
if class_name in gt_counter_per_class:
gt_counter_per_class[class_name] += 1
else:
# if class didn't exist yet
gt_counter_per_class[class_name] = 1
# dump bounding_boxes into a ".json" file
with open(tmp_files_path + "/" + file_id + "_ground_truth.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
gt_classes = list(gt_counter_per_class.keys())
# let's sort the classes alphabetically
gt_classes = sorted(gt_classes)
n_classes = len(gt_classes)
#print(gt_classes)
#print(gt_counter_per_class)
"""
Check format of the flag --set-class-iou (if used)
e.g. check if class exists
"""
if specific_iou_flagged:
n_args = len(args.set_class_iou)
error_msg = \
'\n --set-class-iou [class_1] [IoU_1] [class_2] [IoU_2] [...]'
if n_args % 2 != 0:
error('Error, missing arguments. Flag usage:' + error_msg)
# [class_1] [IoU_1] [class_2] [IoU_2]
# specific_iou_classes = ['class_1', 'class_2']
specific_iou_classes = args.set_class_iou[::2] # even
# iou_list = ['IoU_1', 'IoU_2']
iou_list = args.set_class_iou[1::2] # odd
if len(specific_iou_classes) != len(iou_list):
error('Error, missing arguments. Flag usage:' + error_msg)
for tmp_class in specific_iou_classes:
if tmp_class not in gt_classes:
error('Error, unknown class \"' + tmp_class + '\". Flag usage:' + error_msg)
for num in iou_list:
if not is_float_between_0_and_1(num):
error('Error, IoU must be between 0.0 and 1.0. Flag usage:' + error_msg)
"""
Predicted
Load each of the predicted files into a temporary ".json" file.
"""
# get a list with the predicted files
predicted_files_list = glob.glob('data/mAP/predicted/*.txt')
predicted_files_list.sort()
for class_index, class_name in enumerate(gt_classes):
bounding_boxes = []
for txt_file in predicted_files_list:
#print(txt_file)
# the first time it checks if all the corresponding ground-truth files exist
file_id = txt_file.split(".txt",1)[0]
file_id = os.path.basename(os.path.normpath(file_id))
if class_index == 0:
if not os.path.exists('data/mAP/ground_truth/' + file_id + ".txt"):
error_msg = "Error. File not found: ground-truth/" + file_id + ".txt\n"
error_msg += "(You can avoid this error message by running extra/intersect-gt-and-pred.py)"
error(error_msg)
lines = file_lines_to_list(txt_file)
for line in lines:
try:
tmp_class_name, confidence, left, top, right, bottom = line.split()
except ValueError:
error_msg = "Error: File " + txt_file + " in the wrong format.\n"
error_msg += " Expected: <class_name> <confidence> <left> <top> <right> <bottom>\n"
error_msg += " Received: " + line
error(error_msg)
if tmp_class_name == class_name:
#print("match")
bbox = left + " " + top + " " + right + " " +bottom
bounding_boxes.append({"confidence":confidence, "file_id":file_id, "bbox":bbox})
#print(bounding_boxes)
# sort predictions by decreasing confidence
bounding_boxes.sort(key=lambda x:x['confidence'], reverse=True)
with open(tmp_files_path + "/" + class_name + "_predictions.json", 'w') as outfile:
json.dump(bounding_boxes, outfile)
"""
Calculate the AP for each class
"""
sum_AP = 0.0
ap_dictionary = {}
# open file to store the results
with open(results_files_path + "/results.txt", 'w') as results_file:
results_file.write("# AP and precision/recall per class\n")
count_true_positives = {}
for class_index, class_name in enumerate(gt_classes):
count_true_positives[class_name] = 0
"""
Load predictions of that class
"""
predictions_file = tmp_files_path + "/" + class_name + "_predictions.json"
predictions_data = json.load(open(predictions_file))
"""
Assign predictions to ground truth objects
"""
nd = len(predictions_data)
tp = [0] * nd # creates an array of zeros of size nd
fp = [0] * nd
for idx, prediction in enumerate(predictions_data):
file_id = prediction["file_id"]
if show_animation:
# find ground truth image
ground_truth_img = glob.glob1(img_path, file_id + ".*")
#tifCounter = len(glob.glob1(myPath,"*.tif"))
if len(ground_truth_img) == 0:
error("Error. Image not found with id: " + file_id)
elif len(ground_truth_img) > 1:
error("Error. Multiple image with id: " + file_id)
else: # found image
#print(img_path + "/" + ground_truth_img[0])
# Load image
img = cv2.imread(img_path + "/" + ground_truth_img[0])
# Add bottom border to image
bottom_border = 60
BLACK = [0, 0, 0]
img = cv2.copyMakeBorder(img, 0, bottom_border, 0, 0, cv2.BORDER_CONSTANT, value=BLACK)
# assign prediction to ground truth object if any
# open ground-truth with that file_id
gt_file = tmp_files_path + "/" + file_id + "_ground_truth.json"
ground_truth_data = json.load(open(gt_file))
ovmax = -1
gt_match = -1
# load prediction bounding-box
bb = [ float(x) for x in prediction["bbox"].split() ]
for obj in ground_truth_data:
# look for a class_name match
if obj["class_name"] == class_name:
bbgt = [ float(x) for x in obj["bbox"].split() ]
bi = [max(bb[0],bbgt[0]), max(bb[1],bbgt[1]), min(bb[2],bbgt[2]), min(bb[3],bbgt[3])]
iw = bi[2] - bi[0] + 1
ih = bi[3] - bi[1] + 1
if iw > 0 and ih > 0:
# compute overlap (IoU) = area of intersection / area of union
ua = (bb[2] - bb[0] + 1) * (bb[3] - bb[1] + 1) + (bbgt[2] - bbgt[0]
+ 1) * (bbgt[3] - bbgt[1] + 1) - iw * ih
ov = iw * ih / ua
if ov > ovmax:
ovmax = ov
gt_match = obj
# assign prediction as true positive/don't care/false positive
if show_animation:
status = "NO MATCH FOUND!" # status is only used in the animation
# set minimum overlap
min_overlap = MINOVERLAP
if specific_iou_flagged:
if class_name in specific_iou_classes:
index = specific_iou_classes.index(class_name)
min_overlap = float(iou_list[index])
if ovmax >= min_overlap:
if "difficult" not in gt_match:
if not bool(gt_match["used"]):
# true positive
tp[idx] = 1
gt_match["used"] = True
count_true_positives[class_name] += 1
# update the ".json" file
with open(gt_file, 'w') as f:
f.write(json.dumps(ground_truth_data))
if show_animation:
status = "MATCH!"
else:
# false positive (multiple detection)
fp[idx] = 1
if show_animation:
status = "REPEATED MATCH!"
else:
# false positive
fp[idx] = 1
if ovmax > 0:
status = "INSUFFICIENT OVERLAP"
"""
Draw image to show animation
"""
if show_animation:
height, widht = img.shape[:2]
# colors (OpenCV works with BGR)
white = (255,255,255)
light_blue = (255,200,100)
green = (0,255,0)
light_red = (30,30,255)
# 1st line
margin = 10
v_pos = int(height - margin - (bottom_border / 2))
text = "Image: " + ground_truth_img[0] + " "
img, line_width = draw_text_in_image(img, text, (margin, v_pos), white, 0)
text = "Class [" + str(class_index) + "/" + str(n_classes) + "]: " + class_name + " "
img, line_width = draw_text_in_image(img, text, (margin + line_width, v_pos), light_blue, line_width)
if ovmax != -1:
color = light_red
if status == "INSUFFICIENT OVERLAP":
text = "IoU: {0:.2f}% ".format(ovmax*100) + "< {0:.2f}% ".format(min_overlap*100)
else:
text = "IoU: {0:.2f}% ".format(ovmax*100) + ">= {0:.2f}% ".format(min_overlap*100)
color = green
img, _ = draw_text_in_image(img, text, (margin + line_width, v_pos), color, line_width)
# 2nd line
v_pos += int(bottom_border / 2)
rank_pos = str(idx+1) # rank position (idx starts at 0)
text = "Prediction #rank: " + rank_pos + " confidence: {0:.2f}% ".format(float(prediction["confidence"])*100)
img, line_width = draw_text_in_image(img, text, (margin, v_pos), white, 0)
color = light_red
if status == "MATCH!":
color = green
text = "Result: " + status + " "
img, line_width = draw_text_in_image(img, text, (margin + line_width, v_pos), color, line_width)
if ovmax > 0: # if there is intersections between the bounding-boxes
bbgt = [ float(x) for x in gt_match["bbox"].split() ]
cv2.rectangle(img,(int(bbgt[0]),int(bbgt[1])),(int(bbgt[2]),int(bbgt[3])),light_blue,2)
if status == "MATCH!":
cv2.rectangle(img,(int(bb[0]),int(bb[1])),(int(bb[2]),int(bb[3])),green,2)
else:
cv2.rectangle(img,(int(bb[0]),int(bb[1])),(int(bb[2]),int(bb[3])),light_red,2)
cv2.imshow("Animation", img)
cv2.waitKey(20) # show image for 20 ms
# save image to results
output_img_path = results_files_path + "/images/" + class_name + "_prediction" + str(idx) + ".jpg"
cv2.imwrite(output_img_path, img)
#print(tp)
# compute precision/recall
cumsum = 0
for idx, val in enumerate(fp):
fp[idx] += cumsum
cumsum += val
cumsum = 0
for idx, val in enumerate(tp):
tp[idx] += cumsum
cumsum += val
#print(tp)
rec = tp[:]
for idx, val in enumerate(tp):
rec[idx] = float(tp[idx]) / gt_counter_per_class[class_name]
#print(rec)
prec = tp[:]
for idx, val in enumerate(tp):
prec[idx] = float(tp[idx]) / (fp[idx] + tp[idx])
#print(prec)
ap, mrec, mprec = voc_ap(rec, prec)
sum_AP += ap
text = "{0:.2f}%".format(ap*100) + " = " + class_name + " AP " #class_name + " AP = {0:.2f}%".format(ap*100)
"""
Write to results.txt
"""
rounded_prec = [ '%.2f' % elem for elem in prec ]
rounded_rec = [ '%.2f' % elem for elem in rec ]
results_file.write(text + "\n Precision: " + str(rounded_prec) + "\n Recall :" + str(rounded_rec) + "\n\n")
if not args.quiet:
print(text)
ap_dictionary[class_name] = ap
"""
Draw plot
"""
if draw_plot:
plt.plot(rec, prec, '-o')
# add a new penultimate point to the list (mrec[-2], 0.0)
# since the last line segment (and respective area) do not affect the AP value
area_under_curve_x = mrec[:-1] + [mrec[-2]] + [mrec[-1]]
area_under_curve_y = mprec[:-1] + [0.0] + [mprec[-1]]
plt.fill_between(area_under_curve_x, 0, area_under_curve_y, alpha=0.2, edgecolor='r')
# set window title
fig = plt.gcf() # gcf - get current figure
fig.canvas.set_window_title('AP ' + class_name)
# set plot title
plt.title('class: ' + text)
#plt.suptitle('This is a somewhat long figure title', fontsize=16)
# set axis titles
plt.xlabel('Recall')
plt.ylabel('Precision')
# optional - set axes
axes = plt.gca() # gca - get current axes
axes.set_xlim([0.0,1.0])
axes.set_ylim([0.0,1.05]) # .05 to give some extra space
# Alternative option -> wait for button to be pressed
#while not plt.waitforbuttonpress(): pass # wait for key display
# Alternative option -> normal display
#plt.show()
# save the plot
fig.savefig(results_files_path + "/classes/" + class_name + ".png")
plt.cla() # clear axes for next plot
if show_animation:
cv2.destroyAllWindows()
results_file.write("\n# mAP of all classes\n")
mAP = sum_AP / n_classes
text = "mAP = {0:.2f}%".format(mAP*100)
results_file.write(text + "\n")
print(text)
# remove the tmp_files directory
# shutil.rmtree(tmp_files_path)
"""
Count total of Predictions
"""
# iterate through all the files
pred_counter_per_class = {}
#all_classes_predicted_files = set([])
for txt_file in predicted_files_list:
# get lines to list
lines_list = file_lines_to_list(txt_file)
for line in lines_list:
class_name = line.split()[0]
# check if class is in the ignore list, if yes skip
if class_name in args.ignore:
continue
# count that object
if class_name in pred_counter_per_class:
pred_counter_per_class[class_name] += 1
else:
# if class didn't exist yet
pred_counter_per_class[class_name] = 1
#print(pred_counter_per_class)
pred_classes = list(pred_counter_per_class.keys())
"""
Plot the total number of occurences of each class in the ground-truth
"""
if draw_plot:
window_title = "Ground-Truth Info"
plot_title = "Ground-Truth\n"
plot_title += "(" + str(len(ground_truth_files_list)) + " files and " + str(n_classes) + " classes)"
x_label = "Number of objects per class"
output_path = results_files_path + "/Ground-Truth Info.png"
to_show = False
plot_color = 'forestgreen'
draw_plot_func(
gt_counter_per_class,
n_classes,
window_title,
plot_title,
x_label,
output_path,
to_show,
plot_color,
'',
)
"""
Write number of ground-truth objects per class to results.txt
"""
with open(results_files_path + "results.txt", 'a') as results_file:
results_file.write("\n# Number of ground-truth objects per class\n")
for class_name in sorted(gt_counter_per_class):
results_file.write(class_name + ": " + str(gt_counter_per_class[class_name]) + "\n")
"""
Finish counting true positives
"""
for class_name in pred_classes:
# if class exists in predictions but not in ground-truth then there are no true positives in that class
if class_name not in gt_classes:
count_true_positives[class_name] = 0
#print(count_true_positives)
"""
Plot the total number of occurences of each class in the "predicted" folder
"""
if draw_plot:
window_title = "Predicted Objects Info"
# Plot title
plot_title = "Predicted Objects\n"
plot_title += "(" + str(len(predicted_files_list)) + " files and "
count_non_zero_values_in_dictionary = sum(int(x) > 0 for x in list(pred_counter_per_class.values()))
plot_title += str(count_non_zero_values_in_dictionary) + " detected classes)"
# end Plot title
x_label = "Number of objects per class"
output_path = results_files_path + "/Predicted Objects Info.png"
to_show = False
plot_color = 'forestgreen'
true_p_bar = count_true_positives
draw_plot_func(
pred_counter_per_class,
len(pred_counter_per_class),
window_title,
plot_title,
x_label,
output_path,
to_show,
plot_color,
true_p_bar
)
"""
Write number of predicted objects per class to results.txt
"""
with open(results_files_path + "/results.txt", 'a') as results_file:
results_file.write("\n# Number of predicted objects per class\n")
for class_name in sorted(pred_classes):
n_pred = pred_counter_per_class[class_name]
text = class_name + ": " + str(n_pred)
text += " (tp:" + str(count_true_positives[class_name]) + ""
text += ", fp:" + str(n_pred - count_true_positives[class_name]) + ")\n"
results_file.write(text)
"""
Draw mAP plot (Show AP's of all classes in decreasing order)
"""
if draw_plot:
window_title = "mAP"
plot_title = "mAP = {0:.2f}%".format(mAP*100)
x_label = "Average Precision"
output_path = results_files_path + "/mAP.png"
to_show = True
plot_color = 'royalblue'
draw_plot_func(
ap_dictionary,
n_classes,
window_title,
plot_title,
x_label,
output_path,
to_show,
plot_color,
""
)
| [
"[email protected]"
] | |
077fa8b5db26b02abb818582046ec268a8d0215b | 9b9a02657812ea0cb47db0ae411196f0e81c5152 | /repoData/danfolkes-Magnet2Torrent/allPythonContent.py | 0beca59a21a4fb5422f36851ab3bc50601572d49 | [] | no_license | aCoffeeYin/pyreco | cb42db94a3a5fc134356c9a2a738a063d0898572 | 0ac6653219c2701c13c508c5c4fc9bc3437eea06 | refs/heads/master | 2020-12-14T14:10:05.763693 | 2016-06-27T05:15:15 | 2016-06-27T05:15:15 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,287 | py | __FILENAME__ = Magnet_To_Torrent2
#!/usr/bin/env python
'''
Created on Apr 19, 2012
@author: dan, Faless
GNU GENERAL PUBLIC LICENSE - Version 3
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
http://www.gnu.org/licenses/gpl-3.0.txt
'''
import shutil
import tempfile
import os.path as pt
import sys
import libtorrent as lt
from time import sleep
def magnet2torrent(magnet, output_name=None):
if output_name and \
not pt.isdir(output_name) and \
not pt.isdir(pt.dirname(pt.abspath(output_name))):
print("Invalid output folder: " + pt.dirname(pt.abspath(output_name)))
print("")
sys.exit(0)
tempdir = tempfile.mkdtemp()
ses = lt.session()
params = {
'save_path': tempdir,
'duplicate_is_error': True,
'storage_mode': lt.storage_mode_t(2),
'paused': False,
'auto_managed': True,
'duplicate_is_error': True
}
handle = lt.add_magnet_uri(ses, magnet, params)
print("Downloading Metadata (this may take a while)")
while (not handle.has_metadata()):
try:
sleep(1)
except KeyboardInterrupt:
print("Aborting...")
ses.pause()
print("Cleanup dir " + tempdir)
shutil.rmtree(tempdir)
sys.exit(0)
ses.pause()
print("Done")
torinfo = handle.get_torrent_info()
torfile = lt.create_torrent(torinfo)
output = pt.abspath(torinfo.name() + ".torrent")
if output_name:
if pt.isdir(output_name):
output = pt.abspath(pt.join(
output_name, torinfo.name() + ".torrent"))
elif pt.isdir(pt.dirname(pt.abspath(output_name))):
output = pt.abspath(output_name)
print("Saving torrent file here : " + output + " ...")
torcontent = lt.bencode(torfile.generate())
f = open(output, "wb")
f.write(lt.bencode(torfile.generate()))
f.close()
print("Saved! Cleaning up dir: " + tempdir)
ses.remove_torrent(handle)
shutil.rmtree(tempdir)
return output
def showHelp():
print("")
print("USAGE: " + pt.basename(sys.argv[0]) + " MAGNET [OUTPUT]")
print(" MAGNET\t- the magnet url")
print(" OUTPUT\t- the output torrent file name")
print("")
def main():
if len(sys.argv) < 2:
showHelp()
sys.exit(0)
magnet = sys.argv[1]
output_name = None
if len(sys.argv) >= 3:
output_name = sys.argv[2]
magnet2torrent(magnet, output_name)
if __name__ == "__main__":
main()
########NEW FILE########
| [
"[email protected]"
] | |
f90e32623be87b1e57f0764c5dee61fda7509d3a | 514223cfd2815fb91f811787d7994793d8c09019 | /QUANTAXIS/QAMarket/QAShipaneBroker.py | 141cfee13978a11d106b2673c9dd529de95f2819 | [
"MIT"
] | permissive | frosthaoz/QUANTAXIS | 350d9c8f28cecc124ae3f1b5ff1809daed5bd431 | f5f482418e5f6e23ac3530089b8d17300d931b48 | refs/heads/master | 2020-04-01T04:24:17.147637 | 2018-10-12T05:17:16 | 2018-10-12T05:17:16 | 152,862,049 | 1 | 0 | MIT | 2018-10-13T10:44:03 | 2018-10-13T10:44:02 | null | UTF-8 | Python | false | false | 14,363 | py | # coding:utf-8
import base64
import configparser
import json
import os
import urllib
import future
import asyncio
import pandas as pd
import requests
import datetime
from cryptography.hazmat.backends import default_backend
from cryptography.hazmat.primitives.ciphers import Cipher, algorithms, modes
from QUANTAXIS.QAEngine.QAEvent import QA_Event
from QUANTAXIS.QAMarket.common import cn_en_compare, trade_towards_cn_en, order_status_cn_en
from QUANTAXIS.QAMarket.QABroker import QA_Broker
from QUANTAXIS.QAMarket.QAOrderHandler import QA_OrderHandler
from QUANTAXIS.QAUtil.QAParameter import (BROKER_EVENT, ORDER_DIRECTION, BROKER_TYPE,
ORDER_MODEL, ORDER_STATUS)
from QUANTAXIS.QAUtil.QADate_trade import QA_util_get_order_datetime
from QUANTAXIS.QAUtil.QADate import QA_util_date_int2str
from QUANTAXIS.QAUtil.QASetting import setting_path
CONFIGFILE_PATH = '{}{}{}'.format(setting_path, os.sep, 'config.ini')
DEFAULT_SHIPANE_URL = 'http://127.0.0.1:8888'
DEFAULT_SHIPANE_KEY = ''
class SPE_CONFIG():
def __init__(self, uri=DEFAULT_SHIPANE_URL, key=DEFAULT_SHIPANE_KEY):
self.key = key
self.uri = uri
def get_config_SPE():
config = configparser.ConfigParser()
if os.path.exists(CONFIGFILE_PATH):
config.read(CONFIGFILE_PATH)
try:
return SPE_CONFIG(config.get('SPE', 'uri'), config.get('SPE', 'key'))
except configparser.NoSectionError:
config.add_section('SPE')
config.set('SPE', 'uri', DEFAULT_SHIPANE_URL)
config.set('SPE', 'key', DEFAULT_SHIPANE_KEY)
return SPE_CONFIG()
except configparser.NoOptionError:
config.set('SPE', 'uri', DEFAULT_SHIPANE_URL)
config.set('SPE', 'key', DEFAULT_SHIPANE_KEY)
return SPE_CONFIG()
finally:
with open(CONFIGFILE_PATH, 'w') as f:
config.write(f)
else:
f = open(CONFIGFILE_PATH, 'w')
config.add_section('SPE')
config.set('SPE', 'uri', DEFAULT_SHIPANE_URL)
config.set('SPE', 'key', DEFAULT_SHIPANE_KEY)
config.write(f)
f.close()
return DEFAULT_SHIPANE_URL
class QA_SPEBroker(QA_Broker):
"""
1. 查询账户:
如果有该账户, 返回可用资金和持仓
如果当前market不存在或异常, 返回False
2. 查询所有订单:
如果成功 返回一个DataFrame
如果失败 返回False
3. 查询未成交订单
如果成功 返回DataFrame
如果失败 返回False
4. 查询已成交订单
如果成功 返回DataFramne
如果失败 返回False
5. 下单 receive_order/send_order
receive_order(QAMARKET 用法):
输入一个QA_ORDER类
如果下单成功 返回带realorder_id, ORDER_STATUS.QUEUED状态值 的QA_Order
如果下单失败 返回带 ORDER_STATUS.FAILED状态值的 QA_Order
send_order(测试用法)
6. 撤单 cancel_order
如果撤单成功 返回 True
如果撤单失败 返回 具体的原因 dict/json格式
7. 全撤
如果成功 返回True
"""
def __init__(self):
super().__init__()
self.name = BROKER_TYPE.SHIPANE
self.order_handler = QA_OrderHandler()
self.setting = get_config_SPE()
self._session = requests
self._endpoint = self.setting.uri
self.key = self.setting.key
#self.account_headers = ['forzen_cash','balance_available','cash_available','pnl_money_today','total_assets','pnl_holding','market_value','money_available']
def run(self, event):
if event.event_type is BROKER_EVENT.RECEIVE_ORDER:
self.order_handler.run(event)
#self.run(QA_Event(event_type=BROKER_EVENT.TRADE, broker=self))
# elif event.event_type is BROKER_EVENT.TRADE:
# """实盘交易部分!!!!! ATTENTION
# 这里需要开一个子线程去查询是否成交
# ATTENTION
# """
# event = self.order_handler.run(event)
# event.message = 'trade'
# if event.callback:
# event.callback(event)
elif event.event_type is BROKER_EVENT.SETTLE:
self.order_handler.run(event)
if event.callback:
event.callback('settle')
def call(self, func, params=''):
try:
if self.key == '':
uri = '{}/api/v1.0/{}?client={}'.format(
self._endpoint, func, params.pop('client'))
else:
uri = '{}/api/v1.0/{}?key={}&client={}'.format(
self._endpoint, func, self.key, params.pop('client'))
# print(uri)
response = self._session.get(uri, params)
text = response.text
return json.loads(text)
except Exception as e:
#print(e)
if isinstance(e,ConnectionRefusedError):
print('与主机失去连接')
print(e)
else:
print(e)
# print(uri)
return None
def call_post(self, func, params={}):
if self.key == '':
uri = '{}/api/v1.0/{}?client={}'.format(
self._endpoint, func, params.pop('client'))
else:
uri = '{}/api/v1.0/{}?key={}&client={}'.format(
self._endpoint, func, self.key, params.pop('client'))
response = self._session.post(uri, json=params)
text = response.text
return json.loads(text)
def call_delete(self, func, params=''):
if self.key == '':
uri = '{}/api/v1.0/{}?client={}'.format(
self._endpoint, func, params.pop('client'))
else:
uri = '{}/api/v1.0/{}?key={}&client={}'.format(
self._endpoint, func, self.key, params.pop('client'))
response = self._session.delete(uri)
text = response.text
# print(text)
try:
if text in ['', '获取提示对话框超时,因为:组件为空']:
print('success')
return True
else:
return json.loads(text)
except:
return text
def data_to_df(self, result):
return pd.DataFrame(data=result)
#------ functions
def ping(self):
return self.call("ping", {})
def query_accounts(self, accounts):
return self.call("accounts", {
'client': accounts
})
def query_positions(self, accounts):
"""查询现金和持仓
Arguments:
accounts {[type]} -- [description]
Returns:
dict-- {'cash':xxx,'position':xxx}
"""
try:
data = self.call("positions", {
'client': accounts
})
# print(data)
if data is not None:
cash_part = data.get('subAccounts', {}).get('人民币', False)
if cash_part:
cash_available = cash_part.get('可用金额',cash_part.get('可用'))
position_part = data.get('dataTable', False)
if position_part:
res = data.get('dataTable', False)
if res:
hold_headers = res['columns']
hold_headers = [cn_en_compare[item]
for item in hold_headers]
hold_available = pd.DataFrame(
res['rows'], columns=hold_headers)
if len(hold_available)==1 and hold_available.amount[0] in [None, '', 0]:
hold_available=pd.DataFrame(data=None,columns=hold_headers)
return {'cash_available': cash_available, 'hold_available': hold_available.assign(amount=hold_available.amount.apply(float)).loc[:, ['code', 'amount']].set_index('code').amount}
else:
print(data)
return False, 'None ACCOUNT'
except:
return False
def query_clients(self):
return self.call("clients")
def query_orders(self, accounts, status='filled'):
"""查询订单
Arguments:
accounts {[type]} -- [description]
Keyword Arguments:
status {str} -- 'open' 待成交 'filled' 成交 (default: {'filled'})
Returns:
[type] -- [description]
"""
try:
data = self.call("orders", {
'client': accounts,
'status': status
})
if data is not None:
orders = data.get('dataTable', False)
order_headers = orders['columns']
if ('成交状态' or '状态说明' in order_headers) and ('备注' in order_headers):
order_headers[order_headers.index('备注')]='废弃'
order_headers = [cn_en_compare[item] for item in order_headers]
order_all = pd.DataFrame(
orders['rows'], columns=order_headers).assign(account_cookie=accounts)
order_all.towards = order_all.towards.apply(
lambda x: trade_towards_cn_en[x])
if 'order_time' in order_headers:
# 这是order_status
order_all['status'] = order_all.status.apply(lambda x: order_status_cn_en[x])
if 'order_date' not in order_headers:
order_all.order_time = order_all.order_time.apply(
lambda x: QA_util_get_order_datetime(dt='{} {}'.format(datetime.date.today(), x)))
else:
order_all = order_all.assign(order_time=order_all.order_date.apply(QA_util_date_int2str)+' '+order_all.order_time)
if 'trade_time' in order_headers:
order_all.trade_time = order_all.trade_time.apply(
lambda x: '{} {}'.format(datetime.date.today(), x))
if status is 'filled':
return order_all.loc[:, self.dealstatus_headers].set_index(['account_cookie', 'realorder_id']).sort_index()
else:
return order_all.loc[:, self.orderstatus_headers].set_index(['account_cookie', 'realorder_id']).sort_index()
else:
print('response is None')
return False
except Exception as e:
print(e)
return False
def send_order(self, accounts, code='000001', price=9, amount=100, order_direction=ORDER_DIRECTION.BUY, order_model=ORDER_MODEL.LIMIT):
"""[summary]
Arguments:
accounts {[type]} -- [description]
code {[type]} -- [description]
price {[type]} -- [description]
amount {[type]} -- [description]
Keyword Arguments:
order_direction {[type]} -- [description] (default: {ORDER_DIRECTION.BUY})
order_model {[type]} -- [description] (default: {ORDER_MODEL.LIMIT})
priceType 可选择: 上海交易所:
0 - 限价委托
4 - 五档即时成交剩余撤销
6 - 五档即时成交剩余转限
深圳交易所:
0 - 限价委托
1 - 对手方最优价格委托
2 - 本方最优价格委托
3 - 即时成交剩余撤销委托
4 - 五档即时成交剩余撤销
5 - 全额成交或撤销委托
Returns:
[type] -- [description]
"""
try:
#print(code, price, amount)
return self.call_post('orders', {
'client': accounts,
"action": 'BUY' if order_direction == 1 else 'SELL',
"symbol": code,
"type": order_model,
"priceType": 0 if order_model == ORDER_MODEL.LIMIT else 4,
"price": price,
"amount": amount
})
except json.decoder.JSONDecodeError:
print(RuntimeError('TRADE ERROR'))
return None
def cancel_order(self, accounts, orderid):
return self.call_delete('orders/{}'.format(orderid), {
'client': accounts
})
def cancel_all(self, accounts):
return self.call_delete('orders', {
'client': accounts
})
def receive_order(self, event):
order = event.order
res = self.send_order(accounts=order.account_cookie, code=order.code, price=order.price,
amount=order.amount, order_direction=order.towards, order_model=order.order_model)
try:
# if res is not None and 'id' in res.keys():
# order.status = ORDER_STATUS.QUEUED
# order.text = 'SUCCESS'
order.queued(realorder_id=res['id'])
print('success receive order {}'.format(order.realorder_id))
return order
# else:
except:
text = 'WRONG' if res is None else res.get(
'message', 'WRONG')
order.failed(text)
print('FAILED FOR CREATE ORDER {} {}'.format(
order.account_cookie, order.status))
print(res)
return order
#self.dealer.deal(order, self.market_data)
if __name__ == '__main__':
a = QA_SPEBroker()
print('查询账户')
acc = 'account:9173'
print(a.query_positions(acc))
print('查询所有订单')
print(a.query_orders(acc, ''))
print('查询未成交订单')
print(a.query_orders(acc, 'open'))
print('查询已成交订单')
print(a.query_orders(acc, 'filled'))
"""多账户同时下单测试
"""
print('下单测试')
res = a.send_order(acc, price=9)
#a.send_order(acc, price=9)
#a.send_order(acc, price=9)
# print(res)
print('查询新的未成交订单')
print(a.query_orders(acc, 'open'))
print('撤单')
print(a.cancel_order(acc, res['id']))
print('查询已成交订单')
print(a.query_orders(acc, 'filled'))
# print(a.send_order('account:141',price=8.95))
print('一键全部撤单')
print(a.cancel_all(acc))
print(a.cancel_order('account:141', '1703'))
| [
"[email protected]"
] | |
dd47b3c3b12d0394c13cd989e4409d25f90ad2cc | 3e3506f8a9c18744b5e9c1bda2f66315d2ebe753 | /snippets/serializers.py | 4089503add5e5b2f37f19c8d9fb456de701cad2f | [] | no_license | didoogan/drf | 63ad069540124ab057d4f271aa76be650486981a | 2a0446b6d38ef8ce67c031b2ac5bff62c519cf40 | refs/heads/master | 2020-07-31T00:24:19.904525 | 2016-08-24T20:55:11 | 2016-08-24T20:55:11 | 66,281,423 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 804 | py | from rest_framework import serializers
from snippets.models import Snippet, LANGUAGE_CHOICES, STYLE_CHOICES
from django.contrib.auth.models import User
class SnippetSerializer(serializers.HyperlinkedModelSerializer):
owner = serializers.ReadOnlyField(source='owner.username')
highlight = serializers.HyperlinkedIdentityField(view_name='snippet-highlight', format='html')
class Meta:
model = Snippet
fields = ('url', 'pk', 'highlight', 'owner',
'title', 'code', 'linenos', 'language', 'style')
class UserSerializer(serializers.HyperlinkedModelSerializer):
snippets = serializers.HyperlinkedRelatedField(many=True, view_name='snippet-detail', read_only=True)
class Meta:
model = User
fields = ('url', 'pk', 'username', 'snippets') | [
"[email protected]"
] | |
f52f5a4bad8ed31e4cb462e2ed17207a1b3255a4 | e298bf40ae88c2bd8e0a07f3e92f3e08a92edcc6 | /rpc.py | cbe3b0616fbd34a490c87035a6645337e0a63f49 | [] | no_license | KevinKaiQian/polar-bear | 46a814c746246394f76505846166673a049f12f2 | 61d4e0ccd7328a6aa543af3b75e5f7fedf98bf8e | refs/heads/master | 2022-04-29T02:15:35.536039 | 2021-05-19T12:33:07 | 2021-05-19T12:33:07 | 172,068,536 | 2 | 0 | null | 2022-03-29T21:56:51 | 2019-02-22T13:11:58 | Python | UTF-8 | Python | false | false | 16,644 | py | # Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
__all__ = [
'init',
'cleanup',
'set_defaults',
'add_extra_exmods',
'clear_extra_exmods',
'get_allowed_exmods',
'RequestContextSerializer',
'get_client',
'get_server',
'get_notifier',
'TRANSPORT_ALIASES',
]
import functools
from oslo_log import log as logging
import oslo_messaging as messaging
from oslo_serialization import jsonutils
from oslo_service import periodic_task
from oslo_utils import timeutils
import nova.conf
import nova.context
import nova.exception
from nova.i18n import _
from nova import objects
CONF = nova.conf.CONF
LOG = logging.getLogger(__name__)
TRANSPORT = None
LEGACY_NOTIFIER = None
NOTIFICATION_TRANSPORT = None
NOTIFIER = None
ALLOWED_EXMODS = [
nova.exception.__name__,
]
EXTRA_EXMODS = []
# NOTE(markmc): The nova.openstack.common.rpc entries are for backwards compat
# with Havana rpc_backend configuration values. The nova.rpc entries are for
# compat with Essex values.
TRANSPORT_ALIASES = {
'nova.openstack.common.rpc.impl_kombu': 'rabbit',
'nova.openstack.common.rpc.impl_qpid': 'qpid',
'nova.openstack.common.rpc.impl_zmq': 'zmq',
'nova.rpc.impl_kombu': 'rabbit',
'nova.rpc.impl_qpid': 'qpid',
'nova.rpc.impl_zmq': 'zmq',
}
def init(conf):
global TRANSPORT, NOTIFICATION_TRANSPORT, LEGACY_NOTIFIER, NOTIFIER
exmods = get_allowed_exmods()
TRANSPORT = messaging.get_transport(conf,
allowed_remote_exmods=exmods,
aliases=TRANSPORT_ALIASES)
NOTIFICATION_TRANSPORT = messaging.get_notification_transport(
conf, allowed_remote_exmods=exmods, aliases=TRANSPORT_ALIASES)
serializer = RequestContextSerializer(JsonPayloadSerializer())
if conf.notification_format == 'unversioned':
LEGACY_NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
serializer=serializer)
NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
serializer=serializer, driver='noop')
elif conf.notification_format == 'both':
LEGACY_NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
serializer=serializer)
NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
serializer=serializer,
topics=['versioned_notifications'])
else:
LEGACY_NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
serializer=serializer,
driver='noop')
NOTIFIER = messaging.Notifier(NOTIFICATION_TRANSPORT,
serializer=serializer,
topics=['versioned_notifications'])
def cleanup():
global TRANSPORT, NOTIFICATION_TRANSPORT, LEGACY_NOTIFIER, NOTIFIER
assert TRANSPORT is not None
assert NOTIFICATION_TRANSPORT is not None
assert LEGACY_NOTIFIER is not None
assert NOTIFIER is not None
TRANSPORT.cleanup()
NOTIFICATION_TRANSPORT.cleanup()
TRANSPORT = NOTIFICATION_TRANSPORT = LEGACY_NOTIFIER = NOTIFIER = None
def set_defaults(control_exchange):
messaging.set_transport_defaults(control_exchange)
def add_extra_exmods(*args):
EXTRA_EXMODS.extend(args)
def clear_extra_exmods():
del EXTRA_EXMODS[:]
def get_allowed_exmods():
return ALLOWED_EXMODS + EXTRA_EXMODS
class JsonPayloadSerializer(messaging.NoOpSerializer):
@staticmethod
def serialize_entity(context, entity):
return jsonutils.to_primitive(entity, convert_instances=True)
class RequestContextSerializer(messaging.Serializer):
def __init__(self, base):
self._base = base
def serialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.serialize_entity(context, entity)
def deserialize_entity(self, context, entity):
if not self._base:
return entity
return self._base.deserialize_entity(context, entity)
def serialize_context(self, context):
return context.to_dict()
def deserialize_context(self, context):
return nova.context.RequestContext.from_dict(context)
def get_transport_url(url_str=None):
return messaging.TransportURL.parse(CONF, url_str, TRANSPORT_ALIASES)
def get_client(target, version_cap=None, serializer=None):
assert TRANSPORT is not None
serializer = RequestContextSerializer(serializer)
return messaging.RPCClient(TRANSPORT,
target,
version_cap=version_cap,
serializer=serializer)
def get_server(target, endpoints, serializer=None):
assert TRANSPORT is not None
serializer = RequestContextSerializer(serializer)
return messaging.get_rpc_server(TRANSPORT,
target,
endpoints,
executor='eventlet',
serializer=serializer)
def get_notifier(service, host=None, publisher_id=None):
assert LEGACY_NOTIFIER is not None
if not publisher_id:
publisher_id = "%s.%s" % (service, host or CONF.host)
return LegacyValidatingNotifier(
LEGACY_NOTIFIER.prepare(publisher_id=publisher_id))
def get_versioned_notifier(publisher_id):
assert NOTIFIER is not None
return NOTIFIER.prepare(publisher_id=publisher_id)
def create_transport(url):
exmods = get_allowed_exmods()
return messaging.get_transport(CONF,
url=url,
allowed_remote_exmods=exmods,
aliases=TRANSPORT_ALIASES)
class LegacyValidatingNotifier(object):
"""Wraps an oslo.messaging Notifier and checks for allowed event_types."""
# If true an exception is thrown if the event_type is not allowed, if false
# then only a WARNING is logged
fatal = False
# This list contains the already existing therefore allowed legacy
# notification event_types. New items shall not be added to the list as
# Nova does not allow new legacy notifications any more. This list will be
# removed when all the notification is transformed to versioned
# notifications.
allowed_legacy_notification_event_types = [
'aggregate.addhost.end',
'aggregate.addhost.start',
'aggregate.create.end',
'aggregate.create.start',
'aggregate.delete.end',
'aggregate.delete.start',
'aggregate.removehost.end',
'aggregate.removehost.start',
'aggregate.updatemetadata.end',
'aggregate.updatemetadata.start',
'aggregate.updateprop.end',
'aggregate.updateprop.start',
'api.fault',
'compute.instance.create.end',
'compute.instance.create.error',
'compute.instance.create_ip.end',
'compute.instance.create_ip.start',
'compute.instance.create.start',
'compute.instance.delete.end',
'compute.instance.delete_ip.end',
'compute.instance.delete_ip.start',
'compute.instance.delete.start',
'compute.instance.evacuate',
'compute.instance.exists',
'compute.instance.finish_resize.end',
'compute.instance.finish_resize.start',
'compute.instance.live.migration.abort.start',
'compute.instance.live.migration.abort.end',
'compute.instance.live.migration.force.complete.start',
'compute.instance.live.migration.force.complete.end',
'compute.instance.live_migration.post.dest.end',
'compute.instance.live_migration.post.dest.start',
'compute.instance.live_migration._post.end',
'compute.instance.live_migration._post.start',
'compute.instance.live_migration.pre.end',
'compute.instance.live_migration.pre.start',
'compute.instance.live_migration.rollback.dest.end',
'compute.instance.live_migration.rollback.dest.start',
'compute.instance.live_migration._rollback.end',
'compute.instance.live_migration._rollback.start',
'compute.instance.pause.end',
'compute.instance.pause.start',
'compute.instance.power_off.end',
'compute.instance.power_off.start',
'compute.instance.power_on.end',
'compute.instance.power_on.start',
'compute.instance.reboot.end',
'compute.instance.reboot.start',
'compute.instance.rebuild.end',
'compute.instance.rebuild.error',
'compute.instance.rebuild.scheduled',
'compute.instance.rebuild.start',
'compute.instance.rescue.end',
'compute.instance.rescue.start',
'compute.instance.resize.confirm.end',
'compute.instance.resize.confirm.start',
'compute.instance.resize.end',
'compute.instance.resize.error',
'compute.instance.resize.prep.end',
'compute.instance.resize.prep.start',
'compute.instance.resize.revert.end',
'compute.instance.resize.revert.start',
'compute.instance.resize.start',
'compute.instance.restore.end',
'compute.instance.restore.start',
'compute.instance.resume.end',
'compute.instance.resume.start',
'compute.instance.shelve.end',
'compute.instance.shelve_offload.end',
'compute.instance.shelve_offload.start',
'compute.instance.shelve.start',
'compute.instance.shutdown.end',
'compute.instance.shutdown.start',
'compute.instance.snapshot.end',
'compute.instance.snapshot.start',
'compute.instance.soft_delete.end',
'compute.instance.soft_delete.start',
'compute.instance.suspend.end',
'compute.instance.suspend.start',
'compute.instance.trigger_crash_dump.end',
'compute.instance.trigger_crash_dump.start',
'compute.instance.unpause.end',
'compute.instance.unpause.start',
'compute.instance.unrescue.end',
'compute.instance.unrescue.start',
'compute.instance.unshelve.start',
'compute.instance.unshelve.end',
'compute.instance.update',
'compute.instance.volume.attach',
'compute.instance.volume.detach',
'compute.libvirt.error',
'compute.metrics.update',
'compute_task.build_instances',
'compute_task.migrate_server',
'compute_task.rebuild_server',
'HostAPI.power_action.end',
'HostAPI.power_action.start',
'HostAPI.set_enabled.end',
'HostAPI.set_enabled.start',
'HostAPI.set_maintenance.end',
'HostAPI.set_maintenance.start',
'keypair.create.start',
'keypair.create.end',
'keypair.delete.start',
'keypair.delete.end',
'keypair.import.start',
'keypair.import.end',
'network.floating_ip.allocate',
'network.floating_ip.associate',
'network.floating_ip.deallocate',
'network.floating_ip.disassociate',
'scheduler.select_destinations.end',
'scheduler.select_destinations.start',
'servergroup.addmember',
'servergroup.create',
'servergroup.delete',
'volume.usage',
]
message = _('%(event_type)s is not a versioned notification and not '
'whitelisted. See ./doc/source/notification.rst')
def __init__(self, notifier):
self.notifier = notifier
for priority in ['debug', 'info', 'warn', 'error', 'critical']:
setattr(self, priority,
functools.partial(self._notify, priority))
def _is_wrap_exception_notification(self, payload):
# nova.exception_wrapper.wrap_exception decorator emits notification
# where the event_type is the name of the decorated function. This
# is used in many places but it will be converted to versioned
# notification in one run by updating the decorator so it is pointless
# to white list all the function names here we white list the
# notification itself detected by the special payload keys.
return {'exception', 'args'} == set(payload.keys())
def _notify(self, priority, ctxt, event_type, payload):
if (event_type not in self.allowed_legacy_notification_event_types and
not self._is_wrap_exception_notification(payload)):
if self.fatal:
raise AssertionError(self.message % {'event_type': event_type})
else:
LOG.warning(self.message, {'event_type': event_type})
getattr(self.notifier, priority)(ctxt, event_type, payload)
class ClientWrapper(object):
def __init__(self, client):
self._client = client
self.last_access_time = timeutils.utcnow()
@property
def client(self):
self.last_access_time = timeutils.utcnow()
return self._client
class ClientRouter(periodic_task.PeriodicTasks):
"""Creates and caches RPC clients that route to cells or the default.
The default client connects to the API cell message queue. The rest of the
clients connect to compute cell message queues.
"""
def __init__(self, default_client):
super(ClientRouter, self).__init__(CONF)
self.clients = {}
self.clients['default'] = ClientWrapper(default_client)
self.target = default_client.target
self.version_cap = default_client.version_cap
# NOTE(melwitt): Cells v1 does its own serialization and won't
# have a serializer available on the client object.
self.serializer = getattr(default_client, 'serializer', None)
# Prevent this empty context from overwriting the thread local copy
self.run_periodic_tasks(nova.context.RequestContext(overwrite=False))
def _client(self, context, cell_mapping=None):
if cell_mapping:
client_id = cell_mapping.uuid
else:
client_id = 'default'
try:
client = self.clients[client_id].client
except KeyError:
transport = create_transport(cell_mapping.transport_url)
client = messaging.RPCClient(transport, self.target,
version_cap=self.version_cap,
serializer=self.serializer)
self.clients[client_id] = ClientWrapper(client)
return client
@periodic_task.periodic_task
def _remove_stale_clients(self, context):
timeout = 60
def stale(client_id, last_access_time):
if timeutils.is_older_than(last_access_time, timeout):
LOG.debug('Removing stale RPC client: %s as it was last '
'accessed at %s', client_id, last_access_time)
return True
return False
# Never expire the default client
items_copy = list(self.clients.items())
for client_id, client_wrapper in items_copy:
if (client_id != 'default' and
stale(client_id, client_wrapper.last_access_time)):
del self.clients[client_id]
def by_instance(self, context, instance):
try:
cell_mapping = objects.InstanceMapping.get_by_instance_uuid(
context, instance.uuid).cell_mapping
except nova.exception.InstanceMappingNotFound:
# Not a cells v2 deployment
cell_mapping = None
return self._client(context, cell_mapping=cell_mapping)
def by_host(self, context, host):
try:
cell_mapping = objects.HostMapping.get_by_host(
context, host).cell_mapping
except nova.exception.HostMappingNotFound:
# Not a cells v2 deployment
cell_mapping = None
return self._client(context, cell_mapping=cell_mapping)
| [
"[email protected]"
] | |
2c3f9bf1eb7af4abf52697eb26f2d9fc8262ce2d | 23631af0987b3f1d30b0bf8bfcea1bd63159eeba | /gate_api/configuration.py | 4b19c91ec5f25ad45dffcb2880c0358f4580b286 | [] | no_license | xuvw/gateapi-python | 08c3c72ff0e2c4713bf3a2ffe0b15d05e57491ca | 1a3f3551cba4a756f76f17b070c3e0c5ff2e88ea | refs/heads/master | 2020-05-25T14:33:35.592775 | 2019-04-02T08:50:25 | 2019-04-02T08:50:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 8,144 | py | # coding: utf-8
"""
Gate API v4
APIv4 provides spot, margin and futures trading operations. There are public APIs to retrieve the real-time market statistics, and private APIs which needs authentication to trade on user's behalf. # noqa: E501
OpenAPI spec version: 4.6.1
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
from __future__ import absolute_import
import copy
import logging
import multiprocessing
import sys
import urllib3
import six
from six.moves import http_client as httplib
class TypeWithDefault(type):
def __init__(cls, name, bases, dct):
super(TypeWithDefault, cls).__init__(name, bases, dct)
cls._default = None
def __call__(cls):
if cls._default is None:
cls._default = type.__call__(cls)
return copy.copy(cls._default)
def set_default(cls, default):
cls._default = copy.copy(default)
class Configuration(six.with_metaclass(TypeWithDefault, object)):
"""NOTE: This class is auto generated by OpenAPI Generator
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
def __init__(self):
"""Constructor"""
# Default Base url
self.host = "https://api.gateio.ws/api/v4"
# Temp file folder for downloading files
self.temp_folder_path = None
# Authentication Settings
# dict to store API key(s)
self.api_key = {}
# dict to store API prefix (e.g. Bearer)
self.api_key_prefix = {}
# Username for HTTP basic authentication
self.username = ""
# Password for HTTP basic authentication
self.password = ""
# Logging Settings
self.logger = {}
self.logger["package_logger"] = logging.getLogger("gate_api")
self.logger["urllib3_logger"] = logging.getLogger("urllib3")
# Log format
self.logger_format = '%(asctime)s %(levelname)s %(message)s'
# Log stream handler
self.logger_stream_handler = None
# Log file handler
self.logger_file_handler = None
# Debug file location
self.logger_file = None
# Debug switch
self.debug = False
# SSL/TLS verification
# Set this to false to skip verifying SSL certificate when calling API
# from https server.
self.verify_ssl = True
# Set this to customize the certificate file to verify the peer.
self.ssl_ca_cert = None
# client certificate file
self.cert_file = None
# client key file
self.key_file = None
# Set this to True/False to enable/disable SSL hostname verification.
self.assert_hostname = None
# urllib3 connection pool's maximum number of connections saved
# per pool. urllib3 uses 1 connection as default value, but this is
# not the best value when you are making a lot of possibly parallel
# requests to the same host, which is often the case here.
# cpu_count * 5 is used as default value to increase performance.
self.connection_pool_maxsize = multiprocessing.cpu_count() * 5
# Proxy URL
self.proxy = None
# Safe chars for path_param
self.safe_chars_for_path_param = ''
# API key and secret
self.key = ""
self.secret = ""
@property
def logger_file(self):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
return self.__logger_file
@logger_file.setter
def logger_file(self, value):
"""The logger file.
If the logger_file is None, then add stream handler and remove file
handler. Otherwise, add file handler and remove stream handler.
:param value: The logger_file path.
:type: str
"""
self.__logger_file = value
if self.__logger_file:
# If set logging file,
# then add file handler and remove stream handler.
self.logger_file_handler = logging.FileHandler(self.__logger_file)
self.logger_file_handler.setFormatter(self.logger_formatter)
for _, logger in six.iteritems(self.logger):
logger.addHandler(self.logger_file_handler)
@property
def debug(self):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
return self.__debug
@debug.setter
def debug(self, value):
"""Debug status
:param value: The debug status, True or False.
:type: bool
"""
self.__debug = value
if self.__debug:
# if debug status is True, turn on debug logging
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.DEBUG)
# turn on httplib debug
httplib.HTTPConnection.debuglevel = 1
else:
# if debug status is False, turn off debug logging,
# setting log level to default `logging.WARNING`
for _, logger in six.iteritems(self.logger):
logger.setLevel(logging.WARNING)
# turn off httplib debug
httplib.HTTPConnection.debuglevel = 0
@property
def logger_format(self):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
return self.__logger_format
@logger_format.setter
def logger_format(self, value):
"""The logger format.
The logger_formatter will be updated when sets logger_format.
:param value: The format string.
:type: str
"""
self.__logger_format = value
self.logger_formatter = logging.Formatter(self.__logger_format)
def get_api_key_with_prefix(self, identifier):
"""Gets API key (with prefix if set).
:param identifier: The identifier of apiKey.
:return: The token for api key authentication.
"""
if (self.api_key.get(identifier) and
self.api_key_prefix.get(identifier)):
return self.api_key_prefix[identifier] + ' ' + self.api_key[identifier] # noqa: E501
elif self.api_key.get(identifier):
return self.api_key[identifier]
def get_basic_auth_token(self):
"""Gets HTTP basic authentication header (string).
:return: The token for basic HTTP authentication.
"""
return urllib3.util.make_headers(
basic_auth=self.username + ':' + self.password
).get('authorization')
def auth_settings(self):
"""Gets Auth Settings dict for api client.
:return: The Auth Settings information dict.
"""
return {
'api_key':
{
'type': 'api_key',
'in': 'header',
'key': 'KEY',
'value': self.get_api_key_with_prefix('KEY')
},
'api_sign':
{
'type': 'api_key',
'in': 'header',
'key': 'SIGN',
'value': self.get_api_key_with_prefix('SIGN')
},
'api_timestamp':
{
'type': 'api_key',
'in': 'header',
'key': 'Timestamp',
'value': self.get_api_key_with_prefix('Timestamp')
},
}
def to_debug_report(self):
"""Gets the essential information for debugging.
:return: The report for debugging.
"""
return "Python SDK Debug Report:\n"\
"OS: {env}\n"\
"Python Version: {pyversion}\n"\
"Version of the API: 4.6.1\n"\
"SDK Package Version: 4.6.1".\
format(env=sys.platform, pyversion=sys.version)
| [
"[email protected]"
] | |
4e8a9a0d46ec6fd7a0d4e4a3469d3697125dc00a | 11c24617b0f62bc55b7d2f34eb65fa63e3e3ec06 | /Comprehension-Exercise/02. Words Lengths.py | aea8d7a42100485f584a4451e6753d1751ba87ad | [] | no_license | SilviaKoynova/Python-Advanced | 2d1750a4943b82a82ec910d29241bd3fc473289e | 0a94556592bca60b29a85849a5e694f2eeeda52b | refs/heads/main | 2023-07-18T05:41:33.641250 | 2021-08-26T21:15:13 | 2021-08-26T21:15:13 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 89 | py | text = input().split(', ')
print(*[f"{name} -> {len(name)}" for name in text], sep=', ') | [
"[email protected]"
] | |
891825cba08187b7841fc95a4b003d692a9c30e3 | fa4618ab323f37137e2cf3044a773085f4737c03 | /yabgp/tests/unit/message/attribute/test_mpreachnlri.py | f17a5fd14f4ab18c6bdc0be08a03e93b45dd4e75 | [
"Apache-2.0"
] | permissive | heidinet/yabgp | 5ebe5dfe93ad34baef71a1a9f8493649fd479be6 | 912e0cd71d3d95089556e421e5499d6bed299414 | refs/heads/master | 2021-01-01T19:45:52.508388 | 2017-07-21T08:29:11 | 2017-07-21T08:29:11 | 98,679,210 | 1 | 0 | null | 2017-07-28T18:57:11 | 2017-07-28T18:57:11 | null | UTF-8 | Python | false | false | 12,557 | py | # Copyright 2015 Cisco Systems, Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
""" Unittest for MPReach NLRI"""
import unittest
from yabgp.message.attribute.mpreachnlri import MpReachNLRI
class TestMpReachNLRI(unittest.TestCase):
def setUp(self):
self.maxDiff = None
def test_ipv4_mpls_vpn_parse(self):
data_bin = b'\x80\x0e\x21\x00\x01\x80\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x02\x02\x02' \
b'\x00\x78\x00\x01\x91\x00\x00\x00\x64\x00\x00\x00\x64\xaa\x00\x00\x00'
data_hoped = {'afi_safi': (1, 128),
'nexthop': {'rd': '0:0', 'str': '2.2.2.2'},
'nlri': [{'label': [25],
'rd': '100:100',
'prefix': '170.0.0.0/32'}]}
self.assertEqual(data_hoped, MpReachNLRI.parse(data_bin[3:]))
def test_ipv4_mpsl_vpn_construct_nexthop(self):
nexthop = {'rd': '0:0', 'str': '2.2.2.2'}
nexthop_bin = b'\x00\x00\x00\x00\x00\x00\x00\x00\x02\x02\x02\x02'
self.assertEqual(nexthop_bin, MpReachNLRI.construct_mpls_vpn_nexthop(nexthop))
def test_ipv6_mpls_vpn_construct_nexthop(self):
nexthop = {'rd': '0:0', 'str': '::ffff:172.16.4.12'}
nexthop_bin = b'\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
b'\x00\x00\x00\x00\x00\x00\xff\xff\xac\x10\x04\x0c'
self.assertEqual(nexthop_bin, MpReachNLRI.construct_mpls_vpn_nexthop(nexthop))
def test_ipv4_mpls_vpn_construct(self):
data_bin = b'\x80\x0e\x21\x00\x01\x80\x0c\x00\x00\x00\x00\x00\x00\x00\x00\x02\x02\x02\x02' \
b'\x00\x78\x00\x01\x91\x00\x00\x00\x64\x00\x00\x00\x64\xaa\x00\x00\x00'
data_parsed = {'afi_safi': (1, 128),
'nexthop': {'rd': '0:0', 'str': '2.2.2.2'},
'nlri': [{'label': [25],
'rd': '100:100',
'prefix': '170.0.0.0/32'}]}
self.assertEqual(data_bin, MpReachNLRI.construct(data_parsed))
def test_ipv6_unicast(self):
data_bin = b"\x00\x02\x01\x10\x20\x01\x32\x32\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" \
b"\x01\x00\x80\x20\x01\x32\x32\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x01" \
b"\x40\x20\x01\x32\x32\x00\x01\x00\x00\x7f\x20\x01\x48\x37\x16\x32\x00\x00\x00" \
b"\x00\x00\x00\x00\x00\x00\x02"
data_hoped = {
'afi_safi': (2, 1),
'nexthop': '2001:3232::1',
'nlri': ['2001:3232::1/128', '::2001:3232:1:0/64', '2001:4837:1632::2/127']}
self.assertEqual(data_hoped, MpReachNLRI.parse(data_bin))
def test_ipv6_unicast_with_linklocal_nexthop(self):
data_bin = b"\x00\x02\x01\x20\x20\x01\x0d\xb8\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" \
b"\x02\xfe\x80\x00\x00\x00\x00\x00\x00\xc0\x02\x0b\xff\xfe\x7e\x00\x00\x00\x40" \
b"\x20\x01\x0d\xb8\x00\x02\x00\x02\x40\x20\x01\x0d\xb8\x00\x02\x00\x01\x40\x20" \
b"\x01\x0d\xb8\x00\x02\x00\x00"
data_hoped = {
'afi_safi': (2, 1),
'linklocal_nexthop': 'fe80::c002:bff:fe7e:0',
'nexthop': '2001:db8::2',
'nlri': ['::2001:db8:2:2/64', '::2001:db8:2:1/64', '::2001:db8:2:0/64']}
self.assertEqual(data_hoped, MpReachNLRI.parse(data_bin))
def test_ipv6_unicast_construct(self):
data_parsed = {
'afi_safi': (2, 1),
'nexthop': '2001:3232::1',
'nlri': ['2001:3232::1/128', '::2001:3232:1:0/64', '2001:4837:1632::2/127']}
self.assertEqual(data_parsed, MpReachNLRI.parse(MpReachNLRI.construct(data_parsed)[3:]))
def test_ipv6_unicast_with_locallink_nexthop_construct(self):
data_hoped = {
'afi_safi': (2, 1),
'linklocal_nexthop': 'fe80::c002:bff:fe7e:0',
'nexthop': '2001:db8::2',
'nlri': ['::2001:db8:2:2/64', '::2001:db8:2:1/64', '::2001:db8:2:0/64']}
self.assertEqual(data_hoped, MpReachNLRI.parse(MpReachNLRI.construct(data_hoped)[3:]))
def test_ipv6_mpls_vpn_parse(self):
data_bin = b'\x80\x0e\x45\x00\x02\x80\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
b'\x00\x00\x00\x00\x00\x00\xff\xff\xac\x10\x04\x0c\x00\x98\x00\x03\x61\x00\x00' \
b'\x00\x64\x00\x00\x00\x0c\x20\x10\x00\x00\x00\x12\x00\x04\x98\x00\x03\x71\x00' \
b'\x00\x00\x64\x00\x00\x00\x0c\x20\x10\x00\x01\x00\x12\x00\x00'
data_hoped = {
'afi_safi': (2, 128),
'nexthop': {'rd': '0:0', 'str': '::ffff:172.16.4.12'},
'nlri': [
{'label': [54], 'rd': '100:12', 'prefix': '2010:0:12:4::/64'},
{'label': [55], 'rd': '100:12', 'prefix': '2010:1:12::/64'}
]
}
self.assertEqual(data_hoped, MpReachNLRI.parse(data_bin[3:]))
def test_ipv6_mpls_vpn_construct(self):
data_bin = b'\x80\x0e\x45\x00\x02\x80\x18\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00' \
b'\x00\x00\x00\x00\x00\x00\xff\xff\xac\x10\x04\x0c\x00\x98\x00\x03\x61\x00\x00' \
b'\x00\x64\x00\x00\x00\x0c\x20\x10\x00\x00\x00\x12\x00\x04\x98\x00\x03\x71\x00' \
b'\x00\x00\x64\x00\x00\x00\x0c\x20\x10\x00\x01\x00\x12\x00\x00'
data_hoped = {
'afi_safi': (2, 128),
'nexthop': {'rd': '0:0', 'str': '::ffff:172.16.4.12'},
'nlri': [
{'label': [54], 'rd': '100:12', 'prefix': '2010:0:12:4::/64'},
{'label': [55], 'rd': '100:12', 'prefix': '2010:1:12::/64'}
]
}
self.assertEqual(data_bin, MpReachNLRI.construct(data_hoped))
def test_ipv4_flowspec_parse_multi_nlri_with_nexthop(self):
data_bin = b'\x0e\x00\x1b\x00\x01\x85\x00\x00\x0a\x01\x18\xc0\x58\x03\x02\x18\xc0\x59\x03\x0a' \
b'\x01\x18\xc0\x58\x04\x02\x18\xc0\x59\x04'
data_dict = {
'afi_safi': (1, 133),
'nexthop': '',
'nlri': [
{1: '192.88.3.0/24', 2: '192.89.3.0/24'},
{1: '192.88.4.0/24', 2: '192.89.4.0/24'}
]}
self.assertEqual(data_dict, MpReachNLRI.parse(data_bin[3:]))
def test_ipv4_flowspec_construct(self):
data_bin = b'\x80\x0e\x10\x00\x01\x85\x00\x00\x0a\x01\x18\xc0\x55\x02\x02\x18\xc0\x55\x01'
data_dict = {'afi_safi': (1, 133), 'nexthop': '', 'nlri': [{1: '192.85.2.0/24', 2: '192.85.1.0/24'}]}
self.assertEqual(data_bin, MpReachNLRI.construct(data_dict))
def test_ipv4_flowspec_construct_multi_nlri(self):
data_dict = {
'afi_safi': (1, 133),
'nexthop': '',
'nlri': [
{1: '192.88.3.0/24', 2: '192.89.3.0/24'},
{1: '192.88.4.0/24', 2: '192.89.4.0/24'}
]}
data_bin_cons = MpReachNLRI.construct(data_dict)
self.assertEqual(data_dict, MpReachNLRI.parse(data_bin_cons[3:]))
def test_l2vpn_evpn_parse_construct_route_type1(self):
data_dict = {
"afi_safi": (25, 70),
"nexthop": "10.75.44.254",
"nlri": [{
"type": 1,
"value": {
"rd": "1.1.1.1:32867",
"esi": 0,
"eth_tag_id": 100,
"label": [10]
}
}]
}
self.assertEqual(data_dict, MpReachNLRI.parse(MpReachNLRI.construct(data_dict)[3:]))
def test_l2vpn_evpn_parse_route_type2(self):
data_bin = b'\x80\x0e\x30\x00\x19\x46\x04\xac\x11\x00\x03\x00\x02\x25\x00\x01\xac\x11' \
b'\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6c' \
b'\x30\x00\x11\x22\x33\x44\x55\x20\x0b\x0b\x0b\x01\x00\x00\x01'
data_dict = {
'afi_safi': (25, 70),
'nexthop': '172.17.0.3',
'nlri': [
{
'type': 2,
'value': {
'eth_tag_id': 108,
'ip': '11.11.11.1',
'label': [0],
'rd': '172.17.0.3:2',
'mac': '00-11-22-33-44-55',
'esi': 0}}]
}
self.assertEqual(data_dict, MpReachNLRI.parse(data_bin[3:]))
def test_l2vpn_evpn_construct_route_type2(self):
data_bin = b'\x80\x0e\x30\x00\x19\x46\x04\xac\x11\x00\x03\x00\x02\x25\x00\x01\xac\x11' \
b'\x00\x03\x00\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x6c' \
b'\x30\x00\x11\x22\x33\x44\x55\x20\x0b\x0b\x0b\x01\x00\x00\x00'
data_dict = {
'afi_safi': (25, 70),
'nexthop': '172.17.0.3',
'nlri': [
{
'type': 2,
'value': {
'eth_tag_id': 108,
'ip': '11.11.11.1',
'label': [0],
'rd': '172.17.0.3:2',
'mac': '00-11-22-33-44-55',
'esi': 0}}]
}
self.assertEqual(data_bin, MpReachNLRI.construct(data_dict))
def test_l2vpn_evpn_parse_construct_route_type3(self):
data_dict = {
"afi_safi": (25, 70),
"nexthop": "10.75.44.254",
"nlri": [
{
"type": 3,
"value": {
"rd": "172.16.0.1:5904",
"eth_tag_id": 100,
"ip": "192.168.0.1"
}
}
]
}
self.assertEqual(data_dict, MpReachNLRI.parse(MpReachNLRI.construct(data_dict)[3:]))
def test_l2vpn_evpn_parse_construct_route_type4(self):
data_dict = {
"afi_safi": (25, 70),
"nexthop": "10.75.44.254",
"nlri": [
{
"type": 4,
"value": {
"rd": "172.16.0.1:8888",
"esi": 0,
"ip": "192.168.0.1"
}
}
]
}
self.assertEqual(data_dict, MpReachNLRI.parse(MpReachNLRI.construct(data_dict)[3:]))
def test_linkstate(self):
self.maxDiff = None
data = b"\x90\x0e\x00\x62\x40\x04\x47\x04\x0a\x7c\x01\x7e\x00\x00\x02\x00" \
b"\x55\x02\x00\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x1a\x02\x00" \
b"\x00\x04\x00\x00\xff\xfe\x02\x01\x00\x04\x00\x00\x00\x00\x02\x03" \
b"\x00\x06\x00\x00\x00\x00\x00\x01\x01\x01\x00\x1a\x02\x00\x00\x04" \
b"\x00\x00\xff\xfe\x02\x01\x00\x04\x00\x00\x00\x00\x02\x03\x00\x06" \
b"\x00\x00\x00\x00\x00\x03\x01\x03\x00\x04\x01\x03\x00\x01\x01\x04" \
b"\x00\x04\x01\x03\x00\x02"
data_dict = {
'afi_safi': (16388, 71),
'nexthop': '10.124.1.126',
'nlri': [
{
'type': 'link',
'value': [
{
'type': 'local-node',
'value': {
'as': 65534,
'bgpls-id': '0.0.0.0',
'igp-id': '0.0.0.1'}},
{
'type': 'remote-node',
'value': {
'as': 65534,
'bgpls-id': '0.0.0.0',
'igp-id': '0.0.0.3'}},
{'type': 'link-local-ipv4', 'value': '1.3.0.1'},
{'type': 'link-remote-ipv4', 'value': '1.3.0.2'}]}]}
self.assertEqual(data_dict, MpReachNLRI.parse(data[4:]))
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
65c2e9073be62766e95cebe8a13c9ff565514568 | 3b6ba8d4dc4dd8fe572c1419709facc7bdc2274e | /ai4water/et/utils.py | 52ff6c28677cbbd9cbc4a5bdf178ee7c3ed62076 | [
"MIT"
] | permissive | AtrCheema/AI4Water | fd5bfda1eee530e7bc9ed1b2130ed49dd0d5bf89 | ec2a4a426673b11e3589b64cef9d7160b1de28d4 | refs/heads/master | 2023-09-04T10:59:55.902200 | 2023-02-10T15:55:32 | 2023-02-10T15:55:32 | 284,684,202 | 47 | 17 | MIT | 2023-02-10T15:56:43 | 2020-08-03T11:39:22 | Python | UTF-8 | Python | false | false | 54,339 | py | import re
import math
from easy_mpl.utils import process_axis
from .converter import Temp, Speed, Pressure
from .global_variables import ALLOWED_COLUMNS, SOLAR_CONSTANT, LAMBDA
from .global_variables import default_constants, SB_CONS
from ai4water.backend import np, pd, plt
class AttributeChecker:
def __init__(self, input_df):
self.input = self.check_in_df(input_df)
self.output = {}
self.allowed_columns = ALLOWED_COLUMNS
self.no_of_hours = None
def check_in_df(self, data_frame) -> pd.DataFrame:
if not isinstance(data_frame, pd.DataFrame):
raise TypeError("input must be a pandas dataframe")
for col in data_frame.columns:
if col not in ALLOWED_COLUMNS:
raise ValueError("""col {} given in input dataframe is not allowed. Allowed columns names are {}"""
.format(col, ALLOWED_COLUMNS))
if not isinstance(data_frame.index, pd.DatetimeIndex):
index = pd.to_datetime(data_frame.index)
if not isinstance(index, pd.DatetimeIndex):
raise TypeError("index of input dataframe must be convertible to pd.DatetimeIndex")
if data_frame.shape[0] > 1:
data_frame.index.freq = pd.infer_freq(data_frame.index)
else:
setattr(self, 'single_vale', True)
setattr(self, 'in_freq', data_frame.index.freqstr)
return data_frame
class PlotData(AttributeChecker):
"""
Methods:
plot_inputs
plot_outputs
"""
def __init__(self, input_df, units):
super(PlotData, self).__init__(input_df)
self.units = units
def plot_inputs(self, _name=False):
no_of_plots = len(self.input.columns)
plt.close('all')
fig, axis = plt.subplots(no_of_plots, sharex='all')
fig.set_figheight(no_of_plots+2)
fig.set_figwidth(10.48)
idx = 0
for ax, col in zip(axis, self.input.columns):
show_xaxis = False
if idx > no_of_plots-2:
show_xaxis = True
if col in self.units:
yl = self.units[col]
else:
yl = ' '
data = self.input[col]
process_axis(ax, data, label=col, show_xaxis=show_xaxis, ylabel=yl,
legend_kws={'markerscale':8},
max_xticks=4)
idx += 1
plt.subplots_adjust(wspace=0.001, hspace=0.001)
if _name:
plt.savefig(_name, dpi=300, bbox_inches='tight')
plt.show()
def plot_outputs(self, name='', _name=False):
def marker_scale(_col):
if 'Monthly' in _col:
return 4
elif 'Yearly' in _col:
return 10
else:
return 0.5
to_plot = []
for key in self.output.keys():
if name in key:
to_plot.append(key)
no_of_plots = len(to_plot)
plt.close('all')
fig, axis = plt.subplots(no_of_plots, sharex='all')
if no_of_plots==1:
axis = [axis]
fig.set_figheight(no_of_plots+4)
fig.set_figwidth(10.48)
idx = 0
for ax, col in zip(axis, self.output.keys()):
show_xaxis = False
if idx > no_of_plots-2:
show_xaxis = True
data = self.output[col]
process_axis(ax, data, ms=marker_scale(col), label=col, show_xaxis=show_xaxis, ylabel='mm',
legend_kws={'markerscale': 8}, max_xticks=4)
idx += 1
plt.subplots_adjust(wspace=0.001, hspace=0.001)
if _name:
plt.savefig(_name, dpi=300, bbox_inches='tight')
plt.show()
class PreProcessing(PlotData):
"""
Attributes
freq_str: str
daily_index: pd.DatetimeIndex
freq_in_mins: int
"""
def __init__(self, input_df, units, constants, calculate_at='same', verbosity=1):
super(PreProcessing, self).__init__(input_df, units)
self.units = units
self.default_cons = default_constants
self.cons = constants
self.freq_in_mins = calculate_at
self.sb_cons = self.freq_in_mins
self.lat_rad = self.cons
self._check_compatability()
self.verbosity = verbosity
@property
def seconds(self):
"""finds number of seconds between two steps of input data"""
if len(self.input) > 1:
return (self.input.index[1]-self.input.index[0])/np.timedelta64(1, 's')
@property
def sb_cons(self):
return self._sb_cons
@sb_cons.setter
def sb_cons(self, freq_in_mins):
self._sb_cons = freq_in_mins * SB_CONS
@property
def lat_rad(self):
return self._lat_rad
@lat_rad.setter
def lat_rad(self, constants):
if 'lat_rad' in constants:
self._lat_rad = constants['lat_rad']
elif 'lat_dec_deg' in constants:
self._lat_rad = constants['lat_dec_deg'] * 0.0174533 # # degree to radians
else:
raise ConnectionResetError("Provide latitude information in as lat_rat or as lat_dec_deg in constants")
@property
def freq_in_mins(self):
return self._freq_in_mins
@freq_in_mins.setter
def freq_in_mins(self, calculate_at):
if calculate_at is not None and calculate_at != 'same':
if isinstance(calculate_at, str):
in_minutes = freq_in_mins_from_string(calculate_at)
else:
raise TypeError("invalid type of frequency demanded", calculate_at)
else:
in_minutes = freq_in_mins_from_string(self.input.index.freqstr)
self._freq_in_mins = in_minutes
@property
def freq_str(self) -> str:
minutes = self.freq_in_mins
freq_str = min_to_str(minutes)
return freq_str
def daily_index(self) -> pd.DatetimeIndex:
start_year = justify_len(str(self.input.index[0].year))
end_year = justify_len(str(self.input.index[-1].year))
start_month = justify_len(str(self.input.index[0].month))
end_month = justify_len(str(self.input.index[0].month))
start_day = justify_len(str(self.input.index[0].day))
end_day = justify_len(str(self.input.index[0].day))
st = start_year + start_month + start_day
en = end_year + end_month + end_day
return pd.date_range(st, en, freq='D')
def _check_compatability(self):
self._preprocess_temp()
self._preprocess_rh()
self._check_wind_units()
self._cehck_pressure_units()
self._check_rad_units()
# getting julian day
self.input['jday'] = self.input.index.dayofyear
if self.freq_in_mins == 60:
a = self.input.index.hour
ma = np.convolve(a, np.ones((2,)) / 2, mode='same')
ma[0] = ma[1] - (ma[2] - ma[1])
self.input['half_hr'] = ma
freq = self.input.index.freqstr
if len(freq) > 1:
setattr(self, 'no_of_hours', int(freq[0]))
else:
setattr(self, 'no_of_hours', 1)
self.input['t1'] = np.zeros(len(self.input)) + self.no_of_hours
elif self.freq_in_mins < 60:
a = self.input.index.hour
b = (self.input.index.minute + self.freq_in_mins / 2.0) / 60.0
self.input['half_hr'] = a + b
self.input['t1'] = np.zeros(len(self.input)) + self.freq_in_mins / 60.0
for val in ['sol_rad', 'rn']:
if val in self.input:
if self.freq_in_mins <= 60:
self.input['is_day'] = np.where(self.input[val].values > 0.1, 1, 0)
return
def _preprocess_rh(self):
# make sure that we mean relative humidity calculated if possible
if 'rel_hum' in self.input.columns:
rel_hum = self.input['rel_hum']
rel_hum = np.where(rel_hum < 0.0, 0.0, rel_hum)
rel_hum = np.where(rel_hum >= 100.0, 100.0, rel_hum)
self.input['rh_mean'] = rel_hum
self.input['rel_hum'] = rel_hum
else:
if 'rh_min' in self.input.columns:
self.input['rh_mean'] = np.mean(np.array([self.input['rh_min'].values, self.input['rh_max'].values]),
axis=0)
return
def _preprocess_temp(self):
""" converts temperature related input to units of Centigrade if required. """
# converting temperature units to celsius
for val in ['tmin', 'tmax', 'temp', 'tdew']:
if val in self.input:
t = Temp(self.input[val].values, self.units[val])
temp = t.Centigrade
self.input[val] = np.where(temp < -30, -30, temp)
# if 'temp' is given, it is assumed to be mean otherwise calculate mean and put it as `temp` in input dataframe.
if 'temp' not in self.input.columns:
if 'tmin' in self.input.columns and 'tmax' in self.input.columns:
self.input['temp'] = np.mean(np.array([self.input['tmin'].values, self.input['tmax'].values]), axis=0)
return
def _check_wind_units(self):
# check units of wind speed and convert if needed
if 'wind_speed' in self.input:
wind = self.input['wind_speed'].values
wind = np.where(wind < 0.0, 0.0, wind)
w = Speed(wind, self.units['wind_speed'])
self.input['wind_speed'] = w.MeterPerSecond
return
def _cehck_pressure_units(self):
""" converts pressure related input to units of KiloPascal if required. """
for pres in ['ea', 'es', 'vp_def']:
if pres in self.input:
p = Pressure(self.input[pres].values, self.units[pres])
self.input[pres] = p.KiloPascal
def _check_rad_units(self):
"""
Currently it does not converts radiation units, only makes sure that they are > 0.0.
"""
for val in ['rn', 'sol_rad']:
if val in self.input:
rad = self.input[val].values
rad = np.where(rad < 0.0, 0.0, rad)
self.input[val] = rad
class TransFormData(PreProcessing):
"""
transforms input or output data to different frequencies.
"""
def __init__(self, input_df, units, constants, calculate_at='same', verbosity=1):
self.verbosity = verbosity
input_df = self.freq_check(input_df, calculate_at)
input_df = self.transform_data(input_df, calculate_at)
super(TransFormData, self).__init__(input_df, units, constants, calculate_at, verbosity)
def freq_check(self, input_df, freq: str):
"""
Makes sure that the input dataframe.index as frequency. It frequency is not there, it means it contains
missing data. In this case this method fills missing values. In such case, the argument freq must not be `same`.
"""
if input_df.shape[0] > 1:
input_df.index.freq = pd.infer_freq(input_df.index)
if input_df.index.freq is None:
if freq == 'same' or freq is None:
raise ValueError("input data does not have uniform time-step. Provide a value for argument"
" `calculate_at` ")
else:
new_freq = freq_in_mins_from_string(freq)
try:
input_df.index.freq = freq
except ValueError:
input_df = self.fill_missing_data(input_df, str(new_freq) + 'min')
return input_df
def fill_missing_data(self, df: pd.DataFrame, new_freq: str):
if self.verbosity > 0:
print("input contains missing values or time-steps")
df = force_freq(df.copy(), new_freq, 'input', 'nearest')
assert df.index.freqstr is not None
return df
def transform_data(self, input_df, calculate_at):
if calculate_at == 'same' or calculate_at is None:
df = input_df
else:
new_freq_mins = freq_in_mins_from_string(calculate_at)
old_freq_mins = freq_in_mins_from_string(input_df.index.freqstr)
if new_freq_mins == old_freq_mins:
df = input_df
elif new_freq_mins > old_freq_mins:
# we want to calculate at higher/larger time-step
print('downsampling input data from {} to {}'.format(old_freq_mins, new_freq_mins))
df = self.downsample_input(input_df, new_freq_mins)
else:
print('upsampling input data from {} to {}'.format(old_freq_mins, new_freq_mins))
# we want to calculate at smaller time-step
df = self.upsample_input(input_df, new_freq_mins)
return df
def upsample_input(self, df, out_freq):
# from larger timestep to smaller timestep, such as from daily to hourly
for col in df.columns:
df[col] = self.upsample_df(pd.DataFrame(df[col]), col, out_freq)
return df
def downsample_input(self, df, out_freq):
# from low timestep to high timestep i.e from 1 hour to 24 hour
# from hourly to daily
for col in df.columns:
df[col] = self.downsample_df(pd.DataFrame(df[col]), col, out_freq)
return df
def transform_etp(self, name):
freq_to_trans = self.get_freq()
down_sample = freq_to_trans['up_sample']
up_sample = freq_to_trans['down_sample']
for freq in up_sample:
in_col_name = 'et_' + name + '_' + self.freq_str
freq_str = min_to_str(freq)
out_col_name = 'et_' + name + '_' + freq_str
self.output[out_col_name] = self.upsample_df(pd.DataFrame(self.output[in_col_name]), 'et', freq)
for freq in down_sample:
in_col_name = 'et_' + name + '_' + self.freq_str
freq_str = min_to_str(freq)
out_col_name = 'et_' + name + '_' + freq_str
self.output[out_col_name] = self.downsample_df(pd.DataFrame(self.output[in_col_name]), 'et', freq)
def downsample_df(self, data_frame: pd.DataFrame, data_name: str, out_freq: int):
# from low timestep to high timestep i.e from 1 hour to 24 hour
# from hourly to daily
col_name = data_frame.columns[0]
data_frame = data_frame.copy()
old_freq = data_frame.index.freq
if self.verbosity > 1:
print('downsampling {} data from {} to {}'.format(col_name, old_freq, min_to_str(out_freq)))
out_freq = str(out_freq) + 'min'
# e.g. from hourly to daily
if data_name in ['temp', 'rel_hum', 'rh_min', 'rh_max', 'uz', 'u2', 'wind_speed_kph', 'q_lps']:
return data_frame.resample(out_freq).mean()
elif data_name in ['rain_mm', 'ss_gpl', 'sol_rad', 'etp', 'et']:
return data_frame.resample(out_freq).sum()
def upsample_df(self, data_frame, data_name, out_freq_int):
# from larger timestep to smaller timestep, such as from daily to hourly
out_freq = str(out_freq_int) + 'min'
col_name = data_frame.columns[0]
old_freq = data_frame.index.freqstr
nan_idx = data_frame.isna() # preserving indices with nan values
nan_idx_r = nan_idx.resample(out_freq).ffill()
nan_idx_r = nan_idx_r.fillna(False) # the first value was being filled with NaN, idk y?
data_frame = data_frame.copy()
if self.verbosity > 1:
print('upsampling {} data from {} to {}'.format(data_name, old_freq, min_to_str(out_freq_int)))
# e.g from monthly to daily or from hourly to sub_hourly
if data_name in ['temp', 'rel_hum', 'rh_min', 'rh_max', 'uz', 'u2', 'q_lps']:
data_frame = data_frame.resample(out_freq).interpolate(method='linear')
# filling those interpolated values with NaNs which were NaN before interpolation
data_frame[nan_idx_r] = np.nan
elif data_name in ['rain_mm', 'ss_gpl', 'sol_rad', 'pet', 'pet_hr', 'et', 'etp']:
# distribute rainfall equally to smaller time steps. like hourly 17.4 will be 1.74 at 6 min resolution
idx = data_frame.index[-1] + get_offset(data_frame.index.freqstr)
data_frame = data_frame.append(data_frame.iloc[[-1]].rename({data_frame.index[-1]: idx}))
data_frame = add_freq(data_frame)
df1 = data_frame.resample(out_freq).ffill().iloc[:-1]
df1[col_name] /= df1.resample(data_frame.index.freqstr)[col_name].transform('size')
data_frame = df1.copy()
# filling those interpolated values with NaNs which were NaN before interpolation
data_frame[nan_idx_r] = np.nan
return data_frame
def get_freq(self) -> dict:
""" decides which frequencies to """
all_freqs = {'Sub_hourly': {'down_sample': [1], 'up_sample': [60, 1440, 43200, 525600]},
'Hourly': {'down_sample': [1], 'up_sample': [1440, 43200, 525600]},
'Sub_daily': {'down_sample': [1, 60], 'up_sample': [1440, 43200, 525600]},
'Daily': {'down_sample': [1, 60], 'up_sample': [43200, 525600]},
'Sub_monthly': {'down_sample': [1, 60, 1440], 'up_sample': [43200, 525600]},
'Monthly': {'down_sample': [1, 60, 1440], 'up_sample': [525600]},
'Annualy': {'down_sample': [1, 60, 1440, 43200], 'up_sample': []}
}
return all_freqs[self.freq_str]
class Utils(TransFormData):
"""
Contains functions methods for calculation of ETP with various methods.
Methods:
net_rad
atm_pressure
_wind_2m
"""
def __init__(self, input_df, units, constants, calculate_at=None, verbosity: bool=1):
"""
Arguments:
calculate_at :a valid pandas dataframe frequency
verbosity :
"""
super(Utils, self).__init__(input_df, units, constants, calculate_at=calculate_at, verbosity=verbosity)
@property
def seasonal_correction(self):
"""Seasonal correction for solar time (Eqs. 57 & 58)
uses
----------
doy : scalar or array_like of shape(M, )
Day of year.
Returns
------
ndarray
Seasonal correction [hour]
"""
doy = self.input['jday']
b = 2 * math.pi * (doy - 81.) / 364.
return 0.1645 * np.sin(2 * b) - 0.1255 * np.cos(b) - 0.0250 * np.sin(b)
def net_rad(self, ea, rs=None):
"""
Calculate daily net radiation at the crop surface, assuming a grass reference crop.
Net radiation is the difference between the incoming net shortwave (or solar) radiation and the outgoing net
longwave radiation. Output can be converted to equivalent evaporation [mm day-1] using ``energy2evap()``.
Based on equation 40 in Allen et al (1998).
:uses rns: Net incoming shortwave radiation [MJ m-2 day-1]. Can be
estimated using ``net_in_sol_rad()``.
rnl: Net outgoing longwave radiation [MJ m-2 day-1]. Can be
estimated using ``net_out_lw_rad()``.
:return: net radiation [MJ m-2 timestep-1].
:rtype: float
"""
if 'rn' not in self.input:
if rs is None:
rs = self.rs()
if 'rns' not in self.input:
rns = self.net_in_sol_rad(rs)
else:
rns = self.input['rns']
rnl = self.net_out_lw_rad(rs=rs, ea=ea)
rn = np.subtract(rns, rnl)
self.input['rn'] = rn # for future use
else:
rn = self.input['rn']
return rn
def rs(self):
"""
calculate solar radiation either from temperature (as second preference, as it is les accurate) or from daily
_sunshine hours as second preference). Sunshine hours is given second preference because sunshine hours will
remain same for all years if sunshine hours data is not provided (which is difficult to obtain), but temperature
data which is easy to obtain and thus will be different for different years"""
if 'sol_rad' not in self.input.columns:
if 'sunshine_hrs' in self.input.columns:
rs = self.sol_rad_from_sun_hours()
if self.verbosity > 0:
print("Sunshine hour data is used for calculating incoming solar radiation")
elif 'tmin' in self.input.columns and 'tmax' in self.input.columns:
rs = self._sol_rad_from_t()
if self.verbosity > 0:
print("solar radiation is calculated from temperature")
else:
raise ValueError("""Unable to calculate solar radiation. Provide either of following inputs:
sol_rad, sunshine_hrs or tmin and tmax""")
else:
rs = self.input['sol_rad']
self.input['sol_rad'] = rs
return rs
def net_in_sol_rad(self, rs):
"""
Calculate net incoming solar (or shortwave) radiation (*Rns*) from gross incoming solar radiation, assuming a
grass reference crop.
Net incoming solar radiation is the net shortwave radiation resulting from the balance between incoming and
reflected solar radiation. The output can be converted to equivalent evaporation [mm day-1] using
``energy2evap()``.
Based on FAO equation 38 in Allen et al (1998).
Rns = (1-a)Rs
uses Gross incoming solar radiation [MJ m-2 day-1]. If necessary this can be estimated using functions whose
name begins with 'solar_rad_from'.
:param rs: solar radiation
albedo: Albedo of the crop as the proportion of gross incoming solar
radiation that is reflected by the surface. Default value is 0.23,
which is the value used by the FAO for a short grass reference crop.
Albedo can be as high as 0.95 for freshly fallen snow and as low as
0.05 for wet bare soil. A green vegetation over has an albedo of
about 0.20-0.25 (Allen et al, 1998).
:return: Net incoming solar (or shortwave) radiation [MJ m-2 day-1].
:rtype: float
"""
return np.multiply((1 - self.cons['albedo']), rs)
def net_out_lw_rad(self, rs, ea):
"""
Estimate net outgoing longwave radiation.
This is the net longwave energy (net energy flux) leaving the earth's surface. It is proportional to the
absolute temperature of the surface raised to the fourth power according to the Stefan-Boltzmann law. However,
water vapour, clouds, carbon dioxide and dust are absorbers and emitters of longwave radiation. This function
corrects the Stefan- Boltzmann law for humidity (using actual vapor pressure) and cloudiness (using solar
radiation and clear sky radiation). The concentrations of all other absorbers are assumed to be constant.
The output can be converted to equivalent evaporation [mm timestep-1] using ``energy2evap()``.
Based on FAO equation 39 in Allen et al (1998).
uses: Absolute daily minimum temperature [degrees Kelvin]
Absolute daily maximum temperature [degrees Kelvin]
Solar radiation [MJ m-2 day-1]. If necessary this can be estimated using ``sol+rad()``.
Clear sky radiation [MJ m-2 day-1]. Can be estimated using ``cs_rad()``.
Actual vapour pressure [kPa]. Can be estimated using functions with names beginning with 'avp_from'.
:param ea: actual vapour pressure, can be calculated using method avp_from
:param rs: solar radiation
:return: Net outgoing longwave radiation [MJ m-2 timestep-1]
:rtype: float
"""
if 'tmin' in self.input.columns and 'tmax' in self.input.columns:
added = np.add(np.power(self.input['tmax'].values+273.16, 4), np.power(self.input['tmin'].values+273.16, 4))
divided = np.divide(added, 2.0)
else:
divided = np.power(self.input['temp'].values+273.16, 4.0)
tmp1 = np.multiply(self.sb_cons, divided)
tmp2 = np.subtract(0.34, np.multiply(0.14, np.sqrt(ea)))
tmp3 = np.subtract(np.multiply(1.35, np.divide(rs, self._cs_rad())), 0.35)
return np.multiply(tmp1, np.multiply(tmp2, tmp3)) # eq 39
def sol_rad_from_sun_hours(self):
"""
Calculate incoming solar (or shortwave) radiation, *Rs* (radiation hitting a horizontal plane after
scattering by the atmosphere) from relative sunshine duration.
If measured radiation data are not available this method is preferable to calculating solar radiation from
temperature. If a monthly mean is required then divide the monthly number of sunshine hours by number of
days in the month and ensure that *et_rad* and *daylight_hours* was calculated using the day of the year
that corresponds to the middle of the month.
Based on equations 34 and 35 in Allen et al (1998).
uses: Number of daylight hours [hours]. Can be calculated using ``daylight_hours()``.
Sunshine duration [hours]. Can be calculated using ``sunshine_hours()``.
Extraterrestrial radiation [MJ m-2 day-1]. Can be estimated using ``et_rad()``.
:return: Incoming solar (or shortwave) radiation [MJ m-2 day-1]
:rtype: float
"""
# 0.5 and 0.25 are default values of regression constants (Angstrom values)
# recommended by FAO when calibrated values are unavailable.
ss_hrs = self.input['sunshine_hrs'] # sunshine_hours
dl_hrs = self.daylight_fao56() # daylight_hours
return np.multiply(np.add(self.cons['a_s'], np.multiply(np.divide(ss_hrs, dl_hrs), self.cons['b_s'])),
self._et_rad())
def _sol_rad_from_t(self, coastal=False):
"""
Estimate incoming solar (or shortwave) radiation [Mj m-2 day-1] , *Rs*, (radiation hitting a horizontal
plane after scattering by the atmosphere) from min and max temperature together with an empirical adjustment
coefficient for 'interior' and 'coastal' regions.
The formula is based on equation 50 in Allen et al (1998) which is the Hargreaves radiation formula (Hargreaves
and Samani, 1982, 1985). This method should be used only when solar radiation or sunshine hours data are not
available. It is only recommended for locations where it is not possible to use radiation data from a regional
station (either because climate conditions are heterogeneous or data are lacking).
**NOTE**: this method is not suitable for island locations due to the
moderating effects of the surrounding water. """
# Determine value of adjustment coefficient [deg C-0.5] for
# coastal/interior locations
if coastal: # for 'coastal' locations, situated on or adjacent to the coast of a large l
adj = 0.19 # and mass and where air masses are influenced by a nearby water body,
else: # for 'interior' locations, where land mass dominates and air
adj = 0.16 # masses are not strongly influenced by a large water body
et_rad = None
cs_rad = None
if 'et_rad' not in self.input:
et_rad = self._et_rad()
self.input['et_rad'] = et_rad
if 'cs_rad' not in self.input:
cs_rad = self._cs_rad()
self.input['cs_rad'] = cs_rad
sol_rad = np.multiply(adj, np.multiply(np.sqrt(np.subtract(self.input['tmax'].values,
self.input['tmin'].values)), et_rad))
# The solar radiation value is constrained by the clear sky radiation
return np.min(np.array([sol_rad, cs_rad]), axis=0)
def _cs_rad(self, method='asce'):
"""
Estimate clear sky radiation from altitude and extraterrestrial radiation.
Based on equation 37 in Allen et al (1998) which is recommended when calibrated Angstrom values are not
available. et_rad is Extraterrestrial radiation [MJ m-2 day-1]. Can be estimated using ``et_rad()``.
:return: Clear sky radiation [MJ m-2 day-1]
:rtype: float
"""
if method.upper() == 'ASCE':
return (0.00002 * self.cons['altitude'] + 0.75) * self._et_rad()
elif method.upper() == 'REFET':
sc = self.seasonal_correction()
_omega = omega(solar_time_rad(self.cons['long_dec_deg'], self.input['half_hour'], sc))
else:
raise ValueError
def daylight_fao56(self):
"""get number of maximum hours of sunlight for a given latitude using equation 34 in Fao56.
Annual variation of sunlight hours on earth are plotted in figre 14 in ref 1.
dr = pd.date_range('20110903 00:00', '20110903 23:59', freq='H')
sol_rad = np.array([0.45 for _ in range(len(dr))])
df = pd.DataFrame(np.stack([sol_rad],axis=1), columns=['sol_rad'], index=dr)
constants = {'lat' : -20}
units={'solar_rad': 'MegaJoulePerMeterSquarePerHour'}
eto = ReferenceET(df,units,constants=constants)
N = np.unique(eto.daylight_fao56())
array([11.66])
1) http://www.fao.org/3/X0490E/x0490e07.htm"""
ws = self.sunset_angle()
hrs = (24/3.14) * ws
# if self.input_freq == 'Monthly':
# df = pd.DataFrame(hrs, index=self.daily_index)
# hrs = df.resample('M').mean().values.reshape(-1,)
return hrs
def _et_rad(self):
"""
Estimate extraterrestrial radiation (*Ra*, 'top of the atmosphere radiation').
For daily, it is based on equation 21 in Allen et al (1998). If monthly mean radiation is required make
sure *sol_dec*. *sha* and *irl* have been calculated using the day of the year that corresponds to the middle
of the month.
**Note**: From Allen et al (1998): "For the winter months in latitudes greater than 55 degrees (N or S),
the equations have limited validity. Reference should be made to the Smithsonian Tables to assess possible
deviations."
:return: extraterrestrial radiation [MJ m-2 timestep-1]
:rtype: float
dr = pd.date_range('20110903 00:00', '20110903 23:59', freq='D')
sol_rad = np.array([0.45 ])
df = pd.DataFrame(np.stack([sol_rad],axis=1), columns=['sol_rad'], index=dr)
constants = {'lat' : -20}
units={'sol_rad': 'MegaJoulePerMeterSquarePerHour'}
eto = ReferenceET(df,units,constants=constants)
ra = eto._et_rad()
[32.27]
"""
if self.freq_in_mins < 1440: # TODO should sub_hourly be different from Hourly?
j = (3.14/180) * self.cons['lat_dec_deg'] # eq 22 phi
dr = self.inv_rel_dist_earth_sun() # eq 23
sol_dec = self.dec_angle() # eq 24 # gamma
w1, w2 = self.solar_time_angle()
t1 = (12*60)/math.pi
t2 = np.multiply(t1, np.multiply(SOLAR_CONSTANT, dr))
t3 = np.multiply(np.subtract(w2, w1), np.multiply(np.sin(j), np.sin(sol_dec)))
t4 = np.subtract(np.sin(w2), np.sin(w1))
t5 = np.multiply(np.multiply(np.cos(j), np.cos(sol_dec)), t4)
t6 = np.add(t5, t3)
ra = np.multiply(t2, t6) # eq 28
elif self.freq_in_mins == 1440: # daily frequency
sol_dec = self.dec_angle() # based on julian day
sha = self.sunset_angle() # sunset hour angle[radians], based on latitude
ird = self.inv_rel_dist_earth_sun()
tmp1 = (24.0 * 60.0) / math.pi
tmp2 = np.multiply(sha, np.multiply(math.sin(self.lat_rad), np.sin(sol_dec)))
tmp3 = np.multiply(math.cos(self.lat_rad), np.multiply(np.cos(sol_dec), np.sin(sha)))
ra = np.multiply(tmp1, np.multiply(SOLAR_CONSTANT, np.multiply(ird, np.add(tmp2, tmp3)))) # eq 21
else:
raise NotImplementedError
self.input['ra'] = ra
return ra
def sunset_angle(self):
"""
calculates sunset hour angle in radians given by Equation 25 in Fao56 (1)
1): http://www.fao.org/3/X0490E/x0490e07.htm"""
if 'sha' not in self.input:
j = (3.14/180.0) * self.cons['lat_dec_deg'] # eq 22
d = self.dec_angle() # eq 24, declination angle
angle = np.arccos(-np.tan(j)*np.tan(d)) # eq 25
self.input['sha'] = angle
else:
angle = self.input['sha'].values
return angle
def inv_rel_dist_earth_sun(self):
"""
Calculate the inverse relative distance between earth and sun from day of the year.
Based on FAO equation 23 in Allen et al (1998).
ird = 1.0 + 0.033 * cos( [2pi/365] * j )
:return: Inverse relative distance between earth and the sun
:rtype: np array
"""
if 'ird' not in self.input:
inv1 = np.multiply(2*math.pi/365.0, self.input['jday'].values)
inv2 = np.cos(inv1)
inv3 = np.multiply(0.033, inv2)
ird = np.add(1.0, inv3)
self.input['ird'] = ird
else:
ird = self.input['ird']
return ird
def dec_angle(self):
"""
finds solar declination angle
"""
if 'sol_dec' not in self.input:
if self.freq_str == 'monthly':
solar_dec = np.array(0.409 * np.sin(2*3.14 * self.daily_index().dayofyear/365 - 1.39))
else:
solar_dec = 0.409 * np.sin(2*3.14 * self.input['jday'].values/365 - 1.39) # eq 24, declination angle
self.input['solar_dec'] = solar_dec
else:
solar_dec = self.input['solar_dec']
return solar_dec
def solar_time_angle(self):
"""
returns solar time angle at start, mid and end of period using equation 29, 31 and 30 respectively in Fao
w = pi/12 [(t + 0.06667 ( lz-lm) + Sc) -12]
t =standard clock time at the midpoint of the period [hour]. For example for a period between 14.00 and 15.00
hours, t = 14.5
lm = longitude of the measurement site [degrees west of Greenwich]
lz = longitude of the centre of the local time zone [degrees west of Greenwich]
w1 = w - pi*t1/24
w2 = w + pi*t1/24
where:
w = solar time angle at midpoint of hourly or shorter period [rad]
t1 = length of the calculation period [hour]: i.e., 1 for hourly period or 0.5 for a 30-minute period
www.fao.org/3/X0490E/x0490e07.htm
"""
# TODO find out how to calculate lz
# https://github.com/djlampert/PyHSPF/blob/c3c123acf7dba62ed42336f43962a5e4db922422/src/pyhspf/preprocessing/etcalculator.py#L610
lz = np.abs(15 * round(self.cons['long_dec_deg'] / 15.0))
lm = np.abs(self.cons['long_dec_deg'])
t1 = 0.0667*(lz-lm)
t2 = self.input['half_hr'].values + t1 + self.solar_time_cor()
t3 = np.subtract(t2, 12)
w = np.multiply((math.pi/12.0), t3) # eq 31, in rad
w1 = np.subtract(w, np.divide(np.multiply(math.pi, self.input['t1']).values, 24.0)) # eq 29
w2 = np.add(w, np.divide(np.multiply(math.pi, self.input['t1']).values, 24.0)) # eq 30
return w1, w2
def solar_time_cor(self):
"""seasonal correction for solar time by implementation of eqation 32 in hour, `Sc`"""
upar = np.multiply((2*math.pi), np.subtract(self.input['jday'].values, 81))
b = np.divide(upar, 364) # eq 33
t1 = np.multiply(0.1645, np.sin(np.multiply(2, b)))
t2 = np.multiply(0.1255, np.cos(b))
t3 = np.multiply(0.025, np.sin(b))
return t1-t2-t3 # eq 32
def avp_from_rel_hum(self):
"""
Estimate actual vapour pressure (*ea*) from saturation vapour pressure and relative humidity.
Based on FAO equation 17 in Allen et al (1998).
ea = [ e_not(tmin)RHmax/100 + e_not(tmax)RHmin/100 ] / 2
uses Saturation vapour pressure at daily minimum temperature [kPa].
Saturation vapour pressure at daily maximum temperature [kPa].
Minimum relative humidity [%]
Maximum relative humidity [%]
:return: Actual vapour pressure [kPa]
:rtype: float
http://www.fao.org/3/X0490E/x0490e07.htm#TopOfPage
"""
if 'ea' in self.input:
avp = self.input['ea']
else:
avp = 0.0
# TODO `shub_hourly` calculation should be different from `Hourly`
# use equation 54 in http://www.fao.org/3/X0490E/x0490e08.htm#TopOfPage
if self.freq_in_mins <= 60: # for hourly or sub_hourly
avp = np.multiply(self.sat_vp_fao56(self.input['temp'].values),
np.divide(self.input['rel_hum'].values, 100.0))
elif self.freq_in_mins == 1440:
if 'rh_min' in self.input.columns and 'rh_max' in self.input.columns:
tmp1 = np.multiply(self.sat_vp_fao56(self.input['tmin'].values),
np.divide(self.input['rh_max'].values, 100.0))
tmp2 = np.multiply(self.sat_vp_fao56(self.input['tmax'].values),
np.divide(self.input['rh_min'].values, 100.0))
avp = np.divide(np.add(tmp1, tmp2), 2.0)
elif 'rel_hum' in self.input.columns:
# calculation actual vapor pressure from mean humidity
# equation 19
t1 = np.divide(self.input['rel_hum'].values, 100)
t2 = np.divide(np.add(self.sat_vp_fao56(self.input['tmax'].values),
self.sat_vp_fao56(self.input['tmin'].values)), 2.0)
avp = np.multiply(t1, t2)
else:
raise NotImplementedError(" for frequency of {} minutes, actual vapour pressure can not be calculated"
.format(self.freq_in_mins))
self.input['ea'] = avp
return avp
def sat_vp_fao56(self, temp):
"""calculates saturation vapor pressure (*e_not*) as given in eq 11 of FAO 56 at a given temp which must be in
units of centigrade.
using Tetens equation
es = 0.6108 * exp((17.26*temp)/(temp+273.3))
where es is in KiloPascal units.
Murray, F. W., On the computation of saturation vapor pressure, J. Appl. Meteorol., 6, 203-204, 1967.
"""
# e_not_t = multiply(0.6108, np.exp( multiply(17.26939, temp) / add(temp , 237.3)))
e_not_t = np.multiply(0.6108, np.exp(np.multiply(17.27, np.divide(temp, np.add(temp, 237.3)))))
return e_not_t
def soil_heat_flux(self, rn=None):
if self.freq_in_mins == 1440:
return 0.0
elif self.freq_in_mins <= 60:
gd = np.multiply(0.1, rn)
gn = np.multiply(0.5, rn)
return np.where(self.input['is_day'] == 1, gd, gn)
elif self.freq_in_mins > 1440:
raise NotImplementedError
def mean_sat_vp_fao56(self):
""" calculates mean saturation vapor pressure (*es*) for a day, weak or month according to eq 12 of FAO 56 using
tmin and tmax which must be in centigrade units
"""
es = None
# for case when tmax and tmin are not given and only `temp` is given
if 'tmax' not in self.input:
if 'temp' in self.input:
es = self.sat_vp_fao56(self.input['temp'])
# for case when `tmax` and `tmin` are provided
elif 'tmax' in self.input:
es_tmax = self.sat_vp_fao56(self.input['tmax'].values)
es_tmin = self.sat_vp_fao56(self.input['tmin'].values)
es = np.mean(np.array([es_tmin, es_tmax]), axis=0)
else:
raise NotImplementedError
return es
def psy_const(self) -> float:
"""
Calculate the psychrometric constant.
This method assumes that the air is saturated with water vapour at the minimum daily temperature. This
assumption may not hold in arid areas.
Based on equation 8, page 95 in Allen et al (1998).
uses Atmospheric pressure [kPa].
:return: Psychrometric constant [kPa degC-1].
:rtype: array
"""
return np.multiply(0.000665, self.atm_pressure())
def slope_sat_vp(self, t):
"""
slope of the relationship between saturation vapour pressure and temperature for a given temperature
according to equation 13 in Fao56[1].
delta = 4098 [0.6108 exp(17.27T/T+237.3)] / (T+237.3)^2
:param t: Air temperature [deg C]. Use mean air temperature for use in Penman-Monteith.
:return: Saturation vapour pressure [kPa degC-1]
[1]: http://www.fao.org/3/X0490E/x0490e07.htm#TopOfPage
"""
to_exp = np.divide(np.multiply(17.27, t), np.add(t, 237.3))
tmp = np.multiply(4098, np.multiply(0.6108, np.exp(to_exp)))
return np.divide(tmp, np.power(np.add(t, 237.3), 2))
def _wind_2m(self, method='fao56', z_o=0.001):
"""
converts wind speed (m/s) measured at height z to 2m using either FAO 56 equation 47 or McMohan eq S4.4.
u2 = uz [ 4.87/ln(67.8z-5.42) ] eq 47 in [1], eq S5.20 in [2].
u2 = uz [ln(2/z_o) / ln(z/z_o)] eq S4.4 in [2]
:param `method` string, either of `fao56` or `mcmohan2013`. if `mcmohan2013` is chosen then `z_o` is used
:param `z_o` float, roughness height. Default value is from [2]
:return: Wind speed at 2 m above the surface [m s-1]
[1] http://www.fao.org/3/X0490E/x0490e07.htm
[2] McMahon, T., Peel, M., Lowe, L., Srikanthan, R. & McVicar, T. 2012. Estimating actual, potential,
reference crop and pan evaporation using standard meteorological data: a pragmatic synthesis. Hydrology and
Earth System Sciences Discussions, 9, 11829-11910.
https://www.hydrol-earth-syst-sci.net/17/1331/2013/hess-17-1331-2013-supplement.pdf
"""
# if value of height at which wind is measured is not given, then don't convert
if 'wind_z' in self.cons:
wind_z = self.cons['wind_z']
else:
wind_z = None
if wind_z is None:
if self.verbosity > 0:
print("""WARNING: givn wind data is not at 2 meter and `wind_z` is also not given. So assuming wind
given as measured at 2m height""")
return self.input['wind_speed'].values
else:
if method == 'fao56':
return np.multiply(self.input['wind_speed'], (4.87 / math.log((67.8 * wind_z) - 5.42)))
else:
return np.multiply(self.input['wind_speed'].values, math.log(2/z_o) / math.log(wind_z/z_o))
def atm_pressure(self) -> float:
"""
Estimate atmospheric pressure from altitude.
Calculated using a simplification of the ideal gas law, assuming 20 degrees Celsius for a standard atmosphere.
Based on equation 7, page 62 in Allen et al (1998).
:return: atmospheric pressure [kPa]
:rtype: float
"""
tmp = (293.0 - (0.0065 * self.cons['altitude'])) / 293.0
return math.pow(tmp, 5.26) * 101.3
def tdew_from_t_rel_hum(self):
"""
Calculates the dew point temperature given temperature and relative humidity.
Following formulation given at https://goodcalculators.com/dew-point-calculator/
The formula is
Tdew = (237.3 × [ln(RH/100) + ( (17.27×T) / (237.3+T) )]) / (17.27 - [ln(RH/100) + ( (17.27×T) / (237.3+T) )])
Where:
Tdew = dew point temperature in degrees Celsius (°C),
T = air temperature in degrees Celsius (°C),
RH = relative humidity (%),
ln = natural logarithm.
The formula also holds true as calculations shown at http://www.decatur.de/javascript/dew/index.html
"""
temp = self.input['temp']
neum = (237.3 * (np.log(self.input['rel_hum'] / 100.0) + ((17.27 * temp) / (237.3 + temp))))
denom = (17.27 - (np.log(self.input['rel_hum'] / 100.0) + ((17.27 * temp) / (237.3 + temp))))
td = neum / denom
self.input['tdew'] = td
return
def evap_pan(self):
"""
pan evaporation which is used in almost all penman related methods
"""
ap = self.cons['pen_ap']
lat = self.cons['lat_dec_deg']
rs = self.rs()
delta = self.slope_sat_vp(self.input['temp'].values)
gamma = self.psy_const()
vabar = self.avp_from_rel_hum() # Vapour pressure
vas = self.mean_sat_vp_fao56()
u2 = self._wind_2m()
r_nl = self.net_out_lw_rad(rs=rs, ea=vabar) # net outgoing longwave radiation
ra = self._et_rad()
# eq 34 in Thom et al., 1981
f_pan_u = np.add(1.201, np.multiply(1.621, u2))
# eq 4 and 5 in Rotstayn et al., 2006
p_rad = np.add(1.32, np.add(np.multiply(4e-4, lat), np.multiply(8e-5, lat**2)))
f_dir = np.add(-0.11, np.multiply(1.31, np.divide(rs, ra)))
rs_pan = np.multiply(np.add(np.add(np.multiply(f_dir, p_rad), np.multiply(1.42,
np.subtract(1, f_dir))),
np.multiply(0.42, self.cons['albedo'])), rs)
rn_pan = np.subtract(np.multiply(1-self.cons['alphaA'], rs_pan), r_nl)
# S6.1 in McMohan et al 2013
tmp1 = np.multiply(np.divide(delta, np.add(delta, np.multiply(ap, gamma))), np.divide(rn_pan, LAMBDA))
tmp2 = np.divide(np.multiply(ap, gamma), np.add(delta, np.multiply(ap, gamma)))
tmp3 = np.multiply(f_pan_u, np.subtract(vas, vabar))
tmp4 = np.multiply(tmp2, tmp3)
epan = np.add(tmp1, tmp4)
return epan
def rad_to_evap(self):
"""
converts solar radiation to equivalent inches of water evaporation
SRadIn[in/day] = SolRad[Ley/day] / ((597.3-0.57) * temp[centigrade]) * 2.54) [1]
or using equation 20 of FAO chapter 3
from TABLE 3 in FAO chap 3.
SRadIn[mm/day] = 0.408 * Radiation[MJ m-2 day-1]
SRadIn[mm/day] = 0.035 * Radiation[Wm-2]
SRadIn[mm/day] = Radiation[MJ m-2 day-1] / 2.45
SRadIn[mm/day] = Radiation[J cm-2 day-1] / 245
SRadIn[mm/day] = Radiation[Wm-2] / 28.4
[1] https://github.com/respec/BASINS/blob/4356aa9481eb7217cb2cbc5131a0b80a932907bf/atcMetCmp/modMetCompute.vb#L1251
https://github.com/DanluGuo/Evapotranspiration/blob/8efa0a2268a3c9fedac56594b28ac4b5197ea3fe/R/Evapotranspiration.R
http://www.fao.org/3/X0490E/x0490e07.htm
"""
# TODO following equation assumes radiations in langleys/day ando output in Inches
tmp1 = np.multiply(np.subtract(597.3, np.multiply(0.57, self.input['temp'].values)), 2.54)
rad_in = np.divide(self.input['sol_rad'].values, tmp1)
return rad_in
def equil_temp(self, et_daily):
# equilibrium temperature T_e
t_e = self.input['temp'].copy()
ta = self.input['temp']
vabar = self.avp_from_rel_hum()
r_n = self.net_rad(vabar) # net radiation
gamma = self.psy_const()
for i in range(9999):
v_e = 0.6108 * np.exp(17.27 * t_e/(t_e + 237.3)) # saturated vapour pressure at T_e (S2.5)
t_e_new = ta - 1 / gamma * (1 - r_n / (LAMBDA * et_daily)) * (v_e - vabar) # rearranged from S8.8
delta_t_e = t_e_new - t_e
maxdelta_t_e = np.abs(np.max(delta_t_e))
t_e = t_e_new
if maxdelta_t_e < 0.01:
break
return t_e
def freq_in_mins_from_string(input_string: str) -> int:
if has_numbers(input_string):
in_minutes = split_freq(input_string)
elif input_string.upper() in ['D', 'H', 'M', 'DAILY', 'HOURLY', 'MONTHLY', 'YEARLY', 'MIN', 'MINUTE']:
in_minutes = str_to_mins(input_string.upper())
else:
raise TypeError("invalid input string", input_string)
return int(in_minutes)
def str_to_mins(input_string: str) -> int:
d = {'MIN': 1,
'MINUTE': 1,
'DAILY': 1440,
'D': 1440,
'HOURLY': 60,
'HOUR': 60,
'H': 60,
'MONTHLY': 43200,
'M': 43200,
'YEARLY': 525600
}
return d[input_string]
def split_freq(freq_str: str) -> int:
match = re.match(r"([0-9]+)([a-z]+)", freq_str, re.I)
if match:
minutes, freq = match.groups()
if freq.upper() in ['H', 'HOURLY', 'HOURS', 'HOUR']:
minutes = int(minutes) * 60
elif freq.upper() in ['D', 'DAILY', 'DAY', 'DAYS']:
minutes = int(minutes) * 1440
return int(minutes)
else:
raise NotImplementedError
def has_numbers(input_string: str) -> bool:
return bool(re.search(r'\d', input_string))
def justify_len(string: str, length: int = 2, pad: str = '0') -> str:
if len(string) < length:
zeros_to_pad = pad * int(len(string) - length)
new_string = zeros_to_pad + string
else:
new_string = string
return new_string
def add_freq(dataframe, name=None, _force_freq=None, method=None):
"""Add a frequency attribute to idx, through inference or directly.
Returns a copy. If `freq` is None, it is inferred.
"""
idx = dataframe.index
idx = idx.copy()
# if freq is None:
if idx.freq is None:
freq = pd.infer_freq(idx)
idx.freq = freq
if idx.freq is None:
if _force_freq is not None:
dataframe = force_freq(dataframe, _force_freq, name, method=method)
else:
raise AttributeError('no discernible frequency found in {} for {}. Specify'
' a frequency string with `freq`.'.format(name, name))
else:
print('frequency {} is assigned to {}'.format(idx.freq, name))
dataframe.index = idx
return dataframe
def force_freq(data_frame, freq_to_force, name, method=None):
old_nan_counts = data_frame.isna().sum()
old_shape = data_frame.shape
dr = pd.date_range(data_frame.index[0], data_frame.index[-1], freq=freq_to_force)
df_unique = data_frame[~data_frame.index.duplicated(keep='first')] # first remove duplicate indices if present
if method:
df_idx_sorted = df_unique.sort_index()
df_reindexed = df_idx_sorted.reindex(dr, method='nearest')
else:
df_reindexed = df_unique.reindex(dr, fill_value=np.nan)
df_reindexed.index.freq = pd.infer_freq(df_reindexed.index)
new_nan_counts = df_reindexed.isna().sum()
print('Frequency {} is forced to {} dataframe, NaN counts changed from {} to {}, shape changed from {} to {}'
.format(df_reindexed.index.freq, name, old_nan_counts.values, new_nan_counts.values,
old_shape, df_reindexed.shape))
return df_reindexed
def min_to_str(minutes: int) -> str:
if minutes == 1:
freq_str = 'Minute'
elif 60 > minutes > 1:
freq_str = 'Sub_hourly'
elif minutes == 60:
freq_str = 'Hourly'
elif 1440 > minutes > 60:
freq_str = 'Sub-daily'
elif minutes == 1440:
freq_str = 'Daily'
elif 43200 > minutes > 1440:
freq_str = 'Sub-monthly'
elif minutes == 43200:
freq_str = 'Monthly'
elif 525600 > minutes > 43200:
freq_str = 'Sub-yearly'
elif minutes == 525600:
freq_str = 'Yearly'
else:
raise ValueError("Can not calculate frequency string from given frequency in minutes ", minutes)
return freq_str
time_step = {'D': 'Day', 'H': 'Hour', 'M': 'MonthEnd'}
def get_offset(freqstr: str) -> str:
offset_step = 1
if freqstr in time_step:
freqstr = time_step[freqstr]
elif has_numbers(freqstr):
in_minutes = split_freq(freqstr)
freqstr = 'Minute'
offset_step = int(in_minutes)
offset = getattr(pd.offsets, freqstr)(offset_step)
return offset
def _wrap(x, x_min, x_max):
"""Wrap floating point values into range
Parameters
----------
x : ndarray
Values to wrap.
x_min : float
Minimum value in output range.
x_max : float
Maximum value in output range.
Returns
-------
ndarray
"""
return np.mod((x - x_min), (x_max - x_min)) + x_min
def omega(solar_time):
"""Solar hour angle (Eq. 55)
Parameters
----------
solar_time : scalar or array_like of shape(M, )
Solar time (i.e. noon is 0) [hours].
Returns
-------
omega : ndarray
Hour angle [radians].
"""
_omega = (2 * math.pi / 24.0) * solar_time
# Need to adjust omega so that the values go from -pi to pi
# Values outside this range are wrapped (i.e. -3*pi/2 -> pi/2)
_omega = _wrap(_omega, -math.pi, math.pi)
return _omega
def solar_time_rad(lon, time_mid, sc):
"""Solar time (i.e. noon is 0) (Eq. 55)
Parameters
----------
lon : scalar or array_like of shape(M, )
Longitude [radians].
time_mid : scalar or array_like of shape(M, )
UTC time at midpoint of period [hours].
sc : scalar or array_like of shape(M, )
Seasonal correction [hours].
Returns
-------
ndarray
Solar time [hours].
Notes
-----
This function could be integrated into the _omega() function since they are
always called together (i.e. _omega(_solar_time_rad()). It was built
independently from _omega to eventually support having a separate
solar_time functions for longitude in degrees.
"""
return time_mid + (lon * 24 / (2 * math.pi)) + sc - 12
| [
"[email protected]"
] | |
971b10ea19c5d1463cb92b153dc361635405f186 | 80b7f2a10506f70477d8720e229d7530da2eff5d | /ixnetwork_restpy/testplatform/sessions/ixnetwork/vport/protocolstack/dcbxtlv_37d6aa3b470efb288cbc76a9c77c1804.py | d458b4bd197b07612fdfe9f43e94db2631f8cae0 | [
"MIT"
] | permissive | OpenIxia/ixnetwork_restpy | 00fdc305901aa7e4b26e4000b133655e2d0e346a | c8ecc779421bffbc27c906c1ea51af3756d83398 | refs/heads/master | 2023-08-10T02:21:38.207252 | 2023-07-19T14:14:57 | 2023-07-19T14:14:57 | 174,170,555 | 26 | 16 | MIT | 2023-02-02T07:02:43 | 2019-03-06T15:27:20 | Python | UTF-8 | Python | false | false | 18,046 | py | # MIT LICENSE
#
# Copyright 1997 - 2020 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
import sys
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
if sys.version_info >= (3, 5):
from typing import List, Any, Union
class DcbxTlv(Base):
"""DCBX TLV (Type-Length-Value)
The DcbxTlv class encapsulates a list of dcbxTlv resources that are managed by the user.
A list of resources can be retrieved from the server using the DcbxTlv.find() method.
The list can be managed by using the DcbxTlv.add() and DcbxTlv.remove() methods.
"""
__slots__ = ()
_SDM_NAME = "dcbxTlv"
_SDM_ATT_MAP = {
"Enabled": "enabled",
"Error": "error",
"ErrorOverride": "errorOverride",
"FeatureEnable": "featureEnable",
"FeatureType": "featureType",
"MaxVersion": "maxVersion",
"Name": "name",
"ObjectId": "objectId",
"SubType": "subType",
"Willing": "willing",
}
_SDM_ENUM_MAP = {}
def __init__(self, parent, list_op=False):
super(DcbxTlv, self).__init__(parent, list_op)
@property
def TlvSettings(self):
"""
Returns
-------
- obj(ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.tlvsettings_9ee7f0bbd6252892487709b1e2bd344a.TlvSettings): An instance of the TlvSettings class
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
from ixnetwork_restpy.testplatform.sessions.ixnetwork.vport.protocolstack.tlvsettings_9ee7f0bbd6252892487709b1e2bd344a import (
TlvSettings,
)
if len(self._object_properties) > 0:
if self._properties.get("TlvSettings", None) is not None:
return self._properties.get("TlvSettings")
return TlvSettings(self)._select()
@property
def Enabled(self):
# type: () -> bool
"""
Returns
-------
- bool: Specifies if this TLV is used in the configuration.
"""
return self._get_attribute(self._SDM_ATT_MAP["Enabled"])
@Enabled.setter
def Enabled(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Enabled"], value)
@property
def Error(self):
# type: () -> bool
"""
Returns
-------
- bool: Indicates that an error has occurred during the configuration exchange with the peer.
"""
return self._get_attribute(self._SDM_ATT_MAP["Error"])
@Error.setter
def Error(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Error"], value)
@property
def ErrorOverride(self):
# type: () -> bool
"""
Returns
-------
- bool: True to override the error bit.
"""
return self._get_attribute(self._SDM_ATT_MAP["ErrorOverride"])
@ErrorOverride.setter
def ErrorOverride(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["ErrorOverride"], value)
@property
def FeatureEnable(self):
# type: () -> bool
"""
Returns
-------
- bool: Indicates whether the DCB feature is enabled or not.
"""
return self._get_attribute(self._SDM_ATT_MAP["FeatureEnable"])
@FeatureEnable.setter
def FeatureEnable(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["FeatureEnable"], value)
@property
def FeatureType(self):
# type: () -> int
"""
Returns
-------
- number: Type code of the DCB Feature. The codes translate to: 2 - Priority Group 3 - PFC 4 - Application (IEEE 1.01) / Custom(BCN) (Intel 1.0) 5 - Custom (IEEE 1.01) / FCoE (Intel 1.0) 6 - Custom (IEEE 1.01) / Logical Link (Intel 1.0) 7 - NIV 8 - Custom (IEEE 1.01 / Intel 1.0) 9/10 - Custom (IEEE 1.01 / Intel 1.0) / ETS Configuration/Recommendation (802.1Qaz) 11 - Custom (IEEE 1.01 / Intel 1.0) / PFC (802.1Qaz) 12 - Custom (IEEE 1.01 / Intel 1.0) / Application Priority (802.1Qaz) 13 to 127 - Custom (IEEE 1.01 / Intel 1.0)
"""
return self._get_attribute(self._SDM_ATT_MAP["FeatureType"])
@FeatureType.setter
def FeatureType(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["FeatureType"], value)
@property
def MaxVersion(self):
# type: () -> int
"""
Returns
-------
- number: Highest feature version supported by the system.
"""
return self._get_attribute(self._SDM_ATT_MAP["MaxVersion"])
@MaxVersion.setter
def MaxVersion(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["MaxVersion"], value)
@property
def Name(self):
# type: () -> str
"""
Returns
-------
- str: Name of TLV
"""
return self._get_attribute(self._SDM_ATT_MAP["Name"])
@Name.setter
def Name(self, value):
# type: (str) -> None
self._set_attribute(self._SDM_ATT_MAP["Name"], value)
@property
def ObjectId(self):
# type: () -> str
"""
Returns
-------
- str: Unique identifier for this object
"""
return self._get_attribute(self._SDM_ATT_MAP["ObjectId"])
@property
def SubType(self):
# type: () -> int
"""
Returns
-------
- number: Indicates specific types of network traffic.
"""
return self._get_attribute(self._SDM_ATT_MAP["SubType"])
@SubType.setter
def SubType(self, value):
# type: (int) -> None
self._set_attribute(self._SDM_ATT_MAP["SubType"], value)
@property
def Willing(self):
# type: () -> bool
"""
Returns
-------
- bool: Indicates whether this feature accepts its configuration from the peer or not.
"""
return self._get_attribute(self._SDM_ATT_MAP["Willing"])
@Willing.setter
def Willing(self, value):
# type: (bool) -> None
self._set_attribute(self._SDM_ATT_MAP["Willing"], value)
def update(
self,
Enabled=None,
Error=None,
ErrorOverride=None,
FeatureEnable=None,
FeatureType=None,
MaxVersion=None,
Name=None,
SubType=None,
Willing=None,
):
# type: (bool, bool, bool, bool, int, int, str, int, bool) -> DcbxTlv
"""Updates dcbxTlv resource on the server.
Args
----
- Enabled (bool): Specifies if this TLV is used in the configuration.
- Error (bool): Indicates that an error has occurred during the configuration exchange with the peer.
- ErrorOverride (bool): True to override the error bit.
- FeatureEnable (bool): Indicates whether the DCB feature is enabled or not.
- FeatureType (number): Type code of the DCB Feature. The codes translate to: 2 - Priority Group 3 - PFC 4 - Application (IEEE 1.01) / Custom(BCN) (Intel 1.0) 5 - Custom (IEEE 1.01) / FCoE (Intel 1.0) 6 - Custom (IEEE 1.01) / Logical Link (Intel 1.0) 7 - NIV 8 - Custom (IEEE 1.01 / Intel 1.0) 9/10 - Custom (IEEE 1.01 / Intel 1.0) / ETS Configuration/Recommendation (802.1Qaz) 11 - Custom (IEEE 1.01 / Intel 1.0) / PFC (802.1Qaz) 12 - Custom (IEEE 1.01 / Intel 1.0) / Application Priority (802.1Qaz) 13 to 127 - Custom (IEEE 1.01 / Intel 1.0)
- MaxVersion (number): Highest feature version supported by the system.
- Name (str): Name of TLV
- SubType (number): Indicates specific types of network traffic.
- Willing (bool): Indicates whether this feature accepts its configuration from the peer or not.
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._update(self._map_locals(self._SDM_ATT_MAP, locals()))
def add(
self,
Enabled=None,
Error=None,
ErrorOverride=None,
FeatureEnable=None,
FeatureType=None,
MaxVersion=None,
Name=None,
SubType=None,
Willing=None,
):
# type: (bool, bool, bool, bool, int, int, str, int, bool) -> DcbxTlv
"""Adds a new dcbxTlv resource on the server and adds it to the container.
Args
----
- Enabled (bool): Specifies if this TLV is used in the configuration.
- Error (bool): Indicates that an error has occurred during the configuration exchange with the peer.
- ErrorOverride (bool): True to override the error bit.
- FeatureEnable (bool): Indicates whether the DCB feature is enabled or not.
- FeatureType (number): Type code of the DCB Feature. The codes translate to: 2 - Priority Group 3 - PFC 4 - Application (IEEE 1.01) / Custom(BCN) (Intel 1.0) 5 - Custom (IEEE 1.01) / FCoE (Intel 1.0) 6 - Custom (IEEE 1.01) / Logical Link (Intel 1.0) 7 - NIV 8 - Custom (IEEE 1.01 / Intel 1.0) 9/10 - Custom (IEEE 1.01 / Intel 1.0) / ETS Configuration/Recommendation (802.1Qaz) 11 - Custom (IEEE 1.01 / Intel 1.0) / PFC (802.1Qaz) 12 - Custom (IEEE 1.01 / Intel 1.0) / Application Priority (802.1Qaz) 13 to 127 - Custom (IEEE 1.01 / Intel 1.0)
- MaxVersion (number): Highest feature version supported by the system.
- Name (str): Name of TLV
- SubType (number): Indicates specific types of network traffic.
- Willing (bool): Indicates whether this feature accepts its configuration from the peer or not.
Returns
-------
- self: This instance with all currently retrieved dcbxTlv resources using find and the newly added dcbxTlv resources available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._create(self._map_locals(self._SDM_ATT_MAP, locals()))
def remove(self):
"""Deletes all the contained dcbxTlv resources in this instance from the server.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
self._delete()
def find(
self,
Enabled=None,
Error=None,
ErrorOverride=None,
FeatureEnable=None,
FeatureType=None,
MaxVersion=None,
Name=None,
ObjectId=None,
SubType=None,
Willing=None,
):
# type: (bool, bool, bool, bool, int, int, str, str, int, bool) -> DcbxTlv
"""Finds and retrieves dcbxTlv resources from the server.
All named parameters are evaluated on the server using regex. The named parameters can be used to selectively retrieve dcbxTlv resources from the server.
To retrieve an exact match ensure the parameter value starts with ^ and ends with $
By default the find method takes no parameters and will retrieve all dcbxTlv resources from the server.
Args
----
- Enabled (bool): Specifies if this TLV is used in the configuration.
- Error (bool): Indicates that an error has occurred during the configuration exchange with the peer.
- ErrorOverride (bool): True to override the error bit.
- FeatureEnable (bool): Indicates whether the DCB feature is enabled or not.
- FeatureType (number): Type code of the DCB Feature. The codes translate to: 2 - Priority Group 3 - PFC 4 - Application (IEEE 1.01) / Custom(BCN) (Intel 1.0) 5 - Custom (IEEE 1.01) / FCoE (Intel 1.0) 6 - Custom (IEEE 1.01) / Logical Link (Intel 1.0) 7 - NIV 8 - Custom (IEEE 1.01 / Intel 1.0) 9/10 - Custom (IEEE 1.01 / Intel 1.0) / ETS Configuration/Recommendation (802.1Qaz) 11 - Custom (IEEE 1.01 / Intel 1.0) / PFC (802.1Qaz) 12 - Custom (IEEE 1.01 / Intel 1.0) / Application Priority (802.1Qaz) 13 to 127 - Custom (IEEE 1.01 / Intel 1.0)
- MaxVersion (number): Highest feature version supported by the system.
- Name (str): Name of TLV
- ObjectId (str): Unique identifier for this object
- SubType (number): Indicates specific types of network traffic.
- Willing (bool): Indicates whether this feature accepts its configuration from the peer or not.
Returns
-------
- self: This instance with matching dcbxTlv resources retrieved from the server available through an iterator or index
Raises
------
- ServerError: The server has encountered an uncategorized error condition
"""
return self._select(self._map_locals(self._SDM_ATT_MAP, locals()))
def read(self, href):
"""Retrieves a single instance of dcbxTlv data from the server.
Args
----
- href (str): An href to the instance to be retrieved
Returns
-------
- self: This instance with the dcbxTlv resources from the server available through an iterator or index
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
return self._read(href)
def CustomProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> None
"""Executes the customProtocolStack operation on the server.
Create custom protocol stack under /vport/protocolStack
customProtocolStack(Arg2=list, Arg3=enum, async_operation=bool)
---------------------------------------------------------------
- Arg2 (list(str)): List of plugin types to be added in the new custom stack
- Arg3 (str(kAppend | kMerge | kOverwrite)): Append, merge or overwrite existing protocol stack
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"customProtocolStack", payload=payload, response_object=None
)
def DisableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the disableProtocolStack operation on the server.
Disable a protocol under protocolStack using the class name
disableProtocolStack(Arg2=string, async_operation=bool)string
-------------------------------------------------------------
- Arg2 (str): Protocol class name to disable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"disableProtocolStack", payload=payload, response_object=None
)
def EnableProtocolStack(self, *args, **kwargs):
# type: (*Any, **Any) -> Union[str, None]
"""Executes the enableProtocolStack operation on the server.
Enable a protocol under protocolStack using the class name
enableProtocolStack(Arg2=string, async_operation=bool)string
------------------------------------------------------------
- Arg2 (str): Protocol class name to enable
- async_operation (bool=False): True to execute the operation asynchronously. Any subsequent rest api calls made through the Connection class will block until the operation is complete.
- Returns str: Status of the exec
Raises
------
- NotFoundError: The requested resource does not exist on the server
- ServerError: The server has encountered an uncategorized error condition
"""
payload = {"Arg1": self.href}
for i in range(len(args)):
payload["Arg%s" % (i + 2)] = args[i]
for item in kwargs.items():
payload[item[0]] = item[1]
return self._execute(
"enableProtocolStack", payload=payload, response_object=None
)
| [
"[email protected]"
] | |
a4cfe939cf946016e8aa26c131d44218af521253 | d5f53599338a30a9d6c7de7d5c574db59545ed3d | /Gse/generated/Ref/channels/SG1_SignalGen_Output.py | dd5c8e60cf31bbd105160631ee8ff53b9fd5a55e | [
"Apache-2.0"
] | permissive | dstockhouse/eaglesat-fprime | c39a01cc5648dcd8b351f47684923fe481c720be | e640b3faea0000e1ca8acab4d6ff66150196c32b | refs/heads/master | 2020-05-07T15:31:09.289797 | 2019-11-20T00:33:15 | 2019-11-20T00:33:15 | 180,639,007 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,405 | py | '''
Created on Wednesday, 10 April 2019
@author: David
THIS FILE IS AUTOMATICALLY GENERATED - DO NOT EDIT!!!
XML Source: /cygdrive/c/Users/David/Documents/eaglesat/eaglesat-fprime/Ref/SignalGen/SignalGenComponentAi.xml
'''
# Import the types this way so they do not need prefixing for execution.
from models.serialize.type_exceptions import *
from models.serialize.type_base import *
from models.serialize.bool_type import *
from models.serialize.enum_type import *
from models.serialize.f32_type import *
from models.serialize.f64_type import *
from models.serialize.u8_type import *
from models.serialize.u16_type import *
from models.serialize.u32_type import *
from models.serialize.u64_type import *
from models.serialize.i8_type import *
from models.serialize.i16_type import *
from models.serialize.i32_type import *
from models.serialize.i64_type import *
from models.serialize.string_type import *
from models.serialize.serializable_type import *
from models.common import channel_telemetry
# Each file represents the information for a single event
# These module variables are used to instance the channel object within the Gse
COMPONENT = "Ref::SignalGen"
NAME = "SG1_SignalGen_Output"
ID = 0xb5
CHANNEL_DESCRIPTION = "SignalGen Output"
TYPE = F32Type()
FORMAT_STRING = None
LOW_RED = None
LOW_ORANGE = None
LOW_YELLOW = None
HIGH_YELLOW = None
HIGH_ORANGE = None
HIGH_RED = None
| [
"[email protected]"
] | |
2d54d1dc86dd11ca0fd2da892931be9a65a2636a | 51f887286aa3bd2c3dbe4c616ad306ce08976441 | /pybind/nos/v6_0_2f/interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/__init__.py | 5325359c0954094916091552e54c3c268cbea198 | [
"Apache-2.0"
] | permissive | b2220333/pybind | a8c06460fd66a97a78c243bf144488eb88d7732a | 44c467e71b2b425be63867aba6e6fa28b2cfe7fb | refs/heads/master | 2020-03-18T09:09:29.574226 | 2018-04-03T20:09:50 | 2018-04-03T20:09:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 52,828 | py |
from operator import attrgetter
import pyangbind.lib.xpathhelper as xpathhelper
from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType
from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType
from pyangbind.lib.base import PybindBase
from decimal import Decimal
from bitarray import bitarray
import __builtin__
import virtual_ip
import track
import short_path_forwarding
class vrrpv3e(PybindBase):
"""
This class was auto-generated by the PythonClass plugin for PYANG
from YANG module brocade-interface - based on the path /interface/port-channel/ipv6/hide-vrrpv3-holder/vrrpv3e. Each member element of
the container is represented as a class variable - with a specific
YANG type.
"""
__slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__vrid','__virtual_ip','__track','__enable','__hold_time','__preempt_mode','__priority','__description','__advertise_backup','__nd_advertisement_timer','__advertisement_interval_scale','__backup_advertisement_interval','__vrrpe_advertisement_interval','__short_path_forwarding',)
_yang_name = 'vrrpv3e'
_rest_name = 'vrrp-extended-group'
_pybind_generated_by = 'container'
def __init__(self, *args, **kwargs):
path_helper_ = kwargs.pop("path_helper", None)
if path_helper_ is False:
self._path_helper = False
elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper):
self._path_helper = path_helper_
elif hasattr(self, "_parent"):
path_helper_ = getattr(self._parent, "_path_helper", False)
self._path_helper = path_helper_
else:
self._path_helper = False
extmethods = kwargs.pop("extmethods", None)
if extmethods is False:
self._extmethods = False
elif extmethods is not None and isinstance(extmethods, dict):
self._extmethods = extmethods
elif hasattr(self, "_parent"):
extmethods = getattr(self._parent, "_extmethods", None)
self._extmethods = extmethods
else:
self._extmethods = False
self.__enable = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)
self.__track = YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
self.__nd_advertisement_timer = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..3600']}), is_leaf=True, yang_name="nd-advertisement-timer", rest_name="nd-advertisement-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
self.__advertisement_interval_scale = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="advertisement-interval-scale", rest_name="advertisement-interval-scale", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ipv6 session advertisement interval scale factor <1|2|5|10>'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
self.__vrid = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)
self.__advertise_backup = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="advertise-backup", rest_name="advertise-backup", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable periodic backup advertisement messages'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
self.__short_path_forwarding = YANGDynClass(base=short_path_forwarding.short_path_forwarding, is_container='container', presence=False, yang_name="short-path-forwarding", rest_name="short-path-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable backup router to send traffic', u'cli-compact-syntax': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
self.__priority = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)
self.__virtual_ip = YANGDynClass(base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)
self.__vrrpe_advertisement_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrrpe-advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval', u'alt-name': u'advertisement-interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
self.__hold_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
self.__preempt_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
self.__backup_advertisement_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'60..3600']}), is_leaf=True, yang_name="backup-advertisement-interval", rest_name="backup-advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set Backup advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
load = kwargs.pop("load", None)
if args:
if len(args) > 1:
raise TypeError("cannot create a YANG container with >1 argument")
all_attr = True
for e in self._pyangbind_elements:
if not hasattr(args[0], e):
all_attr = False
break
if not all_attr:
raise ValueError("Supplied object did not have the correct attributes")
for e in self._pyangbind_elements:
nobj = getattr(args[0], e)
if nobj._changed() is False:
continue
setmethod = getattr(self, "_set_%s" % e)
if load is None:
setmethod(getattr(args[0], e))
else:
setmethod(getattr(args[0], e), load=load)
def _path(self):
if hasattr(self, "_parent"):
return self._parent._path()+[self._yang_name]
else:
return [u'interface', u'port-channel', u'ipv6', u'hide-vrrpv3-holder', u'vrrpv3e']
def _rest_path(self):
if hasattr(self, "_parent"):
if self._rest_name:
return self._parent._rest_path()+[self._rest_name]
else:
return self._parent._rest_path()
else:
return [u'interface', u'Port-channel', u'ipv6', u'vrrp-extended-group']
def _get_vrid(self):
"""
Getter method for vrid, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/vrid (vrid-type)
"""
return self.__vrid
def _set_vrid(self, v, load=False):
"""
Setter method for vrid, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/vrid (vrid-type)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrid is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vrid() directly.
"""
parent = getattr(self, "_parent", None)
if parent is not None and load is False:
raise AttributeError("Cannot set keys directly when" +
" within an instantiated list")
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vrid must be of a type compatible with vrid-type""",
'defined-type': "brocade-vrrpv3:vrid-type",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)""",
})
self.__vrid = t
if hasattr(self, '_set'):
self._set()
def _unset_vrid(self):
self.__vrid = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrid", rest_name="vrid", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'cli-suppress-range': None}}, is_keyval=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='vrid-type', is_config=True)
def _get_virtual_ip(self):
"""
Getter method for virtual_ip, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/virtual_ip (list)
"""
return self.__virtual_ip
def _set_virtual_ip(self, v, load=False):
"""
Setter method for virtual_ip, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/virtual_ip (list)
If this variable is read-only (config: false) in the
source YANG file, then _set_virtual_ip is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_virtual_ip() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """virtual_ip must be of a type compatible with list""",
'defined-type': "list",
'generated-type': """YANGDynClass(base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)""",
})
self.__virtual_ip = t
if hasattr(self, '_set'):
self._set()
def _unset_virtual_ip(self):
self.__virtual_ip = YANGDynClass(base=YANGListType("virtual_ipaddr",virtual_ip.virtual_ip, yang_name="virtual-ip", rest_name="virtual-ip", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='virtual-ipaddr', extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}), is_container='list', yang_name="virtual-ip", rest_name="virtual-ip", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set virtual IPv6 address', u'cli-suppress-mode': None, u'cli-suppress-list-no': None, u'cli-full-no': None, u'cli-no-match-completion': None, u'callpoint': u'vrrpv3eVirtualIPPo'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='list', is_config=True)
def _get_track(self):
"""
Getter method for track, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/track (container)
YANG Description: Interface to be tracked
"""
return self.__track
def _set_track(self, v, load=False):
"""
Setter method for track, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/track (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_track is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_track() directly.
YANG Description: Interface to be tracked
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """track must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)""",
})
self.__track = t
if hasattr(self, '_set'):
self._set()
def _unset_track(self):
self.__track = YANGDynClass(base=track.track, is_container='container', presence=False, yang_name="track", rest_name="track", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface to be tracked', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
def _get_enable(self):
"""
Getter method for enable, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/enable (empty)
YANG Description: Enable Session
"""
return self.__enable
def _set_enable(self, v, load=False):
"""
Setter method for enable, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/enable (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_enable is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_enable() directly.
YANG Description: Enable Session
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """enable must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)""",
})
self.__enable = t
if hasattr(self, '_set'):
self._set()
def _unset_enable(self):
self.__enable = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="enable", rest_name="enable", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable Session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
def _get_hold_time(self):
"""
Getter method for hold_time, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/hold_time (uint32)
YANG Description: Configure hold time for this session
"""
return self.__hold_time
def _set_hold_time(self, v, load=False):
"""
Setter method for hold_time, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/hold_time (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_hold_time is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_hold_time() directly.
YANG Description: Configure hold time for this session
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """hold_time must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)""",
})
self.__hold_time = t
if hasattr(self, '_set'):
self._set()
def _unset_hold_time(self):
self.__hold_time = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..3600']}), is_leaf=True, yang_name="hold-time", rest_name="hold-time", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Configure hold time for this session'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
def _get_preempt_mode(self):
"""
Getter method for preempt_mode, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/preempt_mode (empty)
YANG Description: Set preempt mode for the session
"""
return self.__preempt_mode
def _set_preempt_mode(self, v, load=False):
"""
Setter method for preempt_mode, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/preempt_mode (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_preempt_mode is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_preempt_mode() directly.
YANG Description: Set preempt mode for the session
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """preempt_mode must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)""",
})
self.__preempt_mode = t
if hasattr(self, '_set'):
self._set()
def _unset_preempt_mode(self):
self.__preempt_mode = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="preempt-mode", rest_name="preempt-mode", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set preempt mode for the session', u'cli-show-no': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
def _get_priority(self):
"""
Getter method for priority, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/priority (uint8)
YANG Description: Set router priority within virtual router
"""
return self.__priority
def _set_priority(self, v, load=False):
"""
Setter method for priority, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/priority (uint8)
If this variable is read-only (config: false) in the
source YANG file, then _set_priority is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_priority() directly.
YANG Description: Set router priority within virtual router
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """priority must be of a type compatible with uint8""",
'defined-type': "uint8",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)""",
})
self.__priority = t
if hasattr(self, '_set'):
self._set()
def _unset_priority(self):
self.__priority = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=int, restriction_dict={'range': ['0..255']}, int_size=8), restriction_dict={'range': [u'1..254']}), is_leaf=True, yang_name="priority", rest_name="priority", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set router priority within virtual router'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint8', is_config=True)
def _get_description(self):
"""
Getter method for description, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/description (string)
YANG Description: Interface specific description
"""
return self.__description
def _set_description(self, v, load=False):
"""
Setter method for description, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/description (string)
If this variable is read-only (config: false) in the
source YANG file, then _set_description is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_description() directly.
YANG Description: Interface specific description
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """description must be of a type compatible with string""",
'defined-type': "string",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)""",
})
self.__description = t
if hasattr(self, '_set'):
self._set()
def _unset_description(self):
self.__description = YANGDynClass(base=RestrictedClassType(base_type=unicode, restriction_dict={'length': [u'1..64']}), is_leaf=True, yang_name="description", rest_name="description", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Interface specific description', u'cli-multi-value': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='string', is_config=True)
def _get_advertise_backup(self):
"""
Getter method for advertise_backup, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/advertise_backup (empty)
YANG Description: Enable periodic backup advertisement messages
"""
return self.__advertise_backup
def _set_advertise_backup(self, v, load=False):
"""
Setter method for advertise_backup, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/advertise_backup (empty)
If this variable is read-only (config: false) in the
source YANG file, then _set_advertise_backup is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_advertise_backup() directly.
YANG Description: Enable periodic backup advertisement messages
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=YANGBool, is_leaf=True, yang_name="advertise-backup", rest_name="advertise-backup", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable periodic backup advertisement messages'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """advertise_backup must be of a type compatible with empty""",
'defined-type': "empty",
'generated-type': """YANGDynClass(base=YANGBool, is_leaf=True, yang_name="advertise-backup", rest_name="advertise-backup", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable periodic backup advertisement messages'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)""",
})
self.__advertise_backup = t
if hasattr(self, '_set'):
self._set()
def _unset_advertise_backup(self):
self.__advertise_backup = YANGDynClass(base=YANGBool, is_leaf=True, yang_name="advertise-backup", rest_name="advertise-backup", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable periodic backup advertisement messages'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='empty', is_config=True)
def _get_nd_advertisement_timer(self):
"""
Getter method for nd_advertisement_timer, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/nd_advertisement_timer (uint32)
"""
return self.__nd_advertisement_timer
def _set_nd_advertisement_timer(self, v, load=False):
"""
Setter method for nd_advertisement_timer, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/nd_advertisement_timer (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_nd_advertisement_timer is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_nd_advertisement_timer() directly.
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..3600']}), is_leaf=True, yang_name="nd-advertisement-timer", rest_name="nd-advertisement-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """nd_advertisement_timer must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..3600']}), is_leaf=True, yang_name="nd-advertisement-timer", rest_name="nd-advertisement-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)""",
})
self.__nd_advertisement_timer = t
if hasattr(self, '_set'):
self._set()
def _unset_nd_advertisement_timer(self):
self.__nd_advertisement_timer = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'0..3600']}), is_leaf=True, yang_name="nd-advertisement-timer", rest_name="nd-advertisement-timer", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
def _get_advertisement_interval_scale(self):
"""
Getter method for advertisement_interval_scale, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/advertisement_interval_scale (uint32)
YANG Description: Ipv6 session advertisement interval scale factor <1|2|5|10>
"""
return self.__advertisement_interval_scale
def _set_advertisement_interval_scale(self, v, load=False):
"""
Setter method for advertisement_interval_scale, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/advertisement_interval_scale (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_advertisement_interval_scale is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_advertisement_interval_scale() directly.
YANG Description: Ipv6 session advertisement interval scale factor <1|2|5|10>
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="advertisement-interval-scale", rest_name="advertisement-interval-scale", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ipv6 session advertisement interval scale factor <1|2|5|10>'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """advertisement_interval_scale must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="advertisement-interval-scale", rest_name="advertisement-interval-scale", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ipv6 session advertisement interval scale factor <1|2|5|10>'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)""",
})
self.__advertisement_interval_scale = t
if hasattr(self, '_set'):
self._set()
def _unset_advertisement_interval_scale(self):
self.__advertisement_interval_scale = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..10']}), is_leaf=True, yang_name="advertisement-interval-scale", rest_name="advertisement-interval-scale", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Ipv6 session advertisement interval scale factor <1|2|5|10>'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
def _get_backup_advertisement_interval(self):
"""
Getter method for backup_advertisement_interval, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/backup_advertisement_interval (uint32)
YANG Description: Set Backup advertisement interval
"""
return self.__backup_advertisement_interval
def _set_backup_advertisement_interval(self, v, load=False):
"""
Setter method for backup_advertisement_interval, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/backup_advertisement_interval (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_backup_advertisement_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_backup_advertisement_interval() directly.
YANG Description: Set Backup advertisement interval
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'60..3600']}), is_leaf=True, yang_name="backup-advertisement-interval", rest_name="backup-advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set Backup advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """backup_advertisement_interval must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'60..3600']}), is_leaf=True, yang_name="backup-advertisement-interval", rest_name="backup-advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set Backup advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)""",
})
self.__backup_advertisement_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_backup_advertisement_interval(self):
self.__backup_advertisement_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'60..3600']}), is_leaf=True, yang_name="backup-advertisement-interval", rest_name="backup-advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set Backup advertisement interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
def _get_vrrpe_advertisement_interval(self):
"""
Getter method for vrrpe_advertisement_interval, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/vrrpe_advertisement_interval (uint32)
YANG Description: Set advertisement interval
"""
return self.__vrrpe_advertisement_interval
def _set_vrrpe_advertisement_interval(self, v, load=False):
"""
Setter method for vrrpe_advertisement_interval, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/vrrpe_advertisement_interval (uint32)
If this variable is read-only (config: false) in the
source YANG file, then _set_vrrpe_advertisement_interval is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_vrrpe_advertisement_interval() directly.
YANG Description: Set advertisement interval
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrrpe-advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval', u'alt-name': u'advertisement-interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """vrrpe_advertisement_interval must be of a type compatible with uint32""",
'defined-type': "uint32",
'generated-type': """YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrrpe-advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval', u'alt-name': u'advertisement-interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)""",
})
self.__vrrpe_advertisement_interval = t
if hasattr(self, '_set'):
self._set()
def _unset_vrrpe_advertisement_interval(self):
self.__vrrpe_advertisement_interval = YANGDynClass(base=RestrictedClassType(base_type=RestrictedClassType(base_type=long, restriction_dict={'range': ['0..4294967295']}, int_size=32), restriction_dict={'range': [u'1..255']}), is_leaf=True, yang_name="vrrpe-advertisement-interval", rest_name="advertisement-interval", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Set advertisement interval', u'alt-name': u'advertisement-interval'}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='uint32', is_config=True)
def _get_short_path_forwarding(self):
"""
Getter method for short_path_forwarding, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/short_path_forwarding (container)
YANG Description: Enable backup router to send traffic
"""
return self.__short_path_forwarding
def _set_short_path_forwarding(self, v, load=False):
"""
Setter method for short_path_forwarding, mapped from YANG variable /interface/port_channel/ipv6/hide_vrrpv3_holder/vrrpv3e/short_path_forwarding (container)
If this variable is read-only (config: false) in the
source YANG file, then _set_short_path_forwarding is considered as a private
method. Backends looking to populate this variable should
do so via calling thisObj._set_short_path_forwarding() directly.
YANG Description: Enable backup router to send traffic
"""
if hasattr(v, "_utype"):
v = v._utype(v)
try:
t = YANGDynClass(v,base=short_path_forwarding.short_path_forwarding, is_container='container', presence=False, yang_name="short-path-forwarding", rest_name="short-path-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable backup router to send traffic', u'cli-compact-syntax': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
except (TypeError, ValueError):
raise ValueError({
'error-string': """short_path_forwarding must be of a type compatible with container""",
'defined-type': "container",
'generated-type': """YANGDynClass(base=short_path_forwarding.short_path_forwarding, is_container='container', presence=False, yang_name="short-path-forwarding", rest_name="short-path-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable backup router to send traffic', u'cli-compact-syntax': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)""",
})
self.__short_path_forwarding = t
if hasattr(self, '_set'):
self._set()
def _unset_short_path_forwarding(self):
self.__short_path_forwarding = YANGDynClass(base=short_path_forwarding.short_path_forwarding, is_container='container', presence=False, yang_name="short-path-forwarding", rest_name="short-path-forwarding", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Enable backup router to send traffic', u'cli-compact-syntax': None}}, namespace='urn:brocade.com:mgmt:brocade-vrrpv3', defining_module='brocade-vrrpv3', yang_type='container', is_config=True)
vrid = __builtin__.property(_get_vrid, _set_vrid)
virtual_ip = __builtin__.property(_get_virtual_ip, _set_virtual_ip)
track = __builtin__.property(_get_track, _set_track)
enable = __builtin__.property(_get_enable, _set_enable)
hold_time = __builtin__.property(_get_hold_time, _set_hold_time)
preempt_mode = __builtin__.property(_get_preempt_mode, _set_preempt_mode)
priority = __builtin__.property(_get_priority, _set_priority)
description = __builtin__.property(_get_description, _set_description)
advertise_backup = __builtin__.property(_get_advertise_backup, _set_advertise_backup)
nd_advertisement_timer = __builtin__.property(_get_nd_advertisement_timer, _set_nd_advertisement_timer)
advertisement_interval_scale = __builtin__.property(_get_advertisement_interval_scale, _set_advertisement_interval_scale)
backup_advertisement_interval = __builtin__.property(_get_backup_advertisement_interval, _set_backup_advertisement_interval)
vrrpe_advertisement_interval = __builtin__.property(_get_vrrpe_advertisement_interval, _set_vrrpe_advertisement_interval)
short_path_forwarding = __builtin__.property(_get_short_path_forwarding, _set_short_path_forwarding)
_pyangbind_elements = {'vrid': vrid, 'virtual_ip': virtual_ip, 'track': track, 'enable': enable, 'hold_time': hold_time, 'preempt_mode': preempt_mode, 'priority': priority, 'description': description, 'advertise_backup': advertise_backup, 'nd_advertisement_timer': nd_advertisement_timer, 'advertisement_interval_scale': advertisement_interval_scale, 'backup_advertisement_interval': backup_advertisement_interval, 'vrrpe_advertisement_interval': vrrpe_advertisement_interval, 'short_path_forwarding': short_path_forwarding, }
| [
"[email protected]"
] | |
6a4b9b33ccb4907b8e2d6194e8a505fcd0aaaeb0 | 523f8f5febbbfeb6d42183f2bbeebc36f98eadb5 | /539.py | 3f49db2b3056bb0ea18a9558663dea67f0a5b806 | [] | no_license | saleed/LeetCode | 655f82fdfcc3000400f49388e97fc0560f356af0 | 48b43999fb7e2ed82d922e1f64ac76f8fabe4baa | refs/heads/master | 2022-06-15T21:54:56.223204 | 2022-05-09T14:05:50 | 2022-05-09T14:05:50 | 209,430,056 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 580 | py | class Solution(object):
def findMinDifference(self, timePoints):
"""
:type timePoints: List[str]
:rtype: int
"""
timePoints.sort()
diff=float("inf")
for i in range(1,len(timePoints)):
time1=timePoints[i%len(timePoints)]
time2=timePoints[(i-1)%len(timePoints)]
hour1=int(time1[:2])
hour2=int(time2[:2])
minute1=int(time1[3:])
minute2=int(time2[3:])
diff=min(diff,((hour1-hour2)*60+minute1-minute2+(24*60))%(24*60))
return diff
| [
"[email protected]"
] | |
4dd41f91081d4649e198f6707d9523f035a40245 | bad62c2b0dfad33197db55b44efeec0bab405634 | /sdk/authorization/azure-mgmt-authorization/azure/mgmt/authorization/v2021_12_01_preview/aio/operations/_access_review_instance_operations.py | 378596af3f8ac6f043cf6a4178c98399aedc4ea8 | [
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] | permissive | test-repo-billy/azure-sdk-for-python | 20c5a2486456e02456de17515704cb064ff19833 | cece86a8548cb5f575e5419864d631673be0a244 | refs/heads/master | 2022-10-25T02:28:39.022559 | 2022-10-18T06:05:46 | 2022-10-18T06:05:46 | 182,325,031 | 0 | 0 | MIT | 2019-07-25T22:28:52 | 2019-04-19T20:59:15 | Python | UTF-8 | Python | false | false | 15,208 | py | # pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, Callable, Dict, Optional, TypeVar
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._access_review_instance_operations import (
build_accept_recommendations_request,
build_apply_decisions_request,
build_reset_decisions_request,
build_send_reminders_request,
build_stop_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class AccessReviewInstanceOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.authorization.v2021_12_01_preview.aio.AuthorizationManagementClient`'s
:attr:`access_review_instance` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace_async
async def stop( # pylint: disable=inconsistent-return-statements
self, schedule_definition_id: str, id: str, **kwargs: Any
) -> None:
"""An action to stop an access review instance.
:param schedule_definition_id: The id of the access review schedule definition. Required.
:type schedule_definition_id: str
:param id: The id of the access review instance. Required.
:type id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-12-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_stop_request(
schedule_definition_id=schedule_definition_id,
id=id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.stop.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDefinition, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
stop.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/accessReviewScheduleDefinitions/{scheduleDefinitionId}/instances/{id}/stop"} # type: ignore
@distributed_trace_async
async def reset_decisions( # pylint: disable=inconsistent-return-statements
self, schedule_definition_id: str, id: str, **kwargs: Any
) -> None:
"""An action to reset all decisions for an access review instance.
:param schedule_definition_id: The id of the access review schedule definition. Required.
:type schedule_definition_id: str
:param id: The id of the access review instance. Required.
:type id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-12-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_reset_decisions_request(
schedule_definition_id=schedule_definition_id,
id=id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.reset_decisions.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDefinition, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
reset_decisions.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/accessReviewScheduleDefinitions/{scheduleDefinitionId}/instances/{id}/resetDecisions"} # type: ignore
@distributed_trace_async
async def apply_decisions( # pylint: disable=inconsistent-return-statements
self, schedule_definition_id: str, id: str, **kwargs: Any
) -> None:
"""An action to apply all decisions for an access review instance.
:param schedule_definition_id: The id of the access review schedule definition. Required.
:type schedule_definition_id: str
:param id: The id of the access review instance. Required.
:type id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-12-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_apply_decisions_request(
schedule_definition_id=schedule_definition_id,
id=id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.apply_decisions.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDefinition, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
apply_decisions.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/accessReviewScheduleDefinitions/{scheduleDefinitionId}/instances/{id}/applyDecisions"} # type: ignore
@distributed_trace_async
async def send_reminders( # pylint: disable=inconsistent-return-statements
self, schedule_definition_id: str, id: str, **kwargs: Any
) -> None:
"""An action to send reminders for an access review instance.
:param schedule_definition_id: The id of the access review schedule definition. Required.
:type schedule_definition_id: str
:param id: The id of the access review instance. Required.
:type id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-12-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_send_reminders_request(
schedule_definition_id=schedule_definition_id,
id=id,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.send_reminders.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDefinition, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
send_reminders.metadata = {"url": "/subscriptions/{subscriptionId}/providers/Microsoft.Authorization/accessReviewScheduleDefinitions/{scheduleDefinitionId}/instances/{id}/sendReminders"} # type: ignore
@distributed_trace_async
async def accept_recommendations( # pylint: disable=inconsistent-return-statements
self, schedule_definition_id: str, id: str, **kwargs: Any
) -> None:
"""An action to accept recommendations for decision in an access review instance.
:param schedule_definition_id: The id of the access review schedule definition. Required.
:type schedule_definition_id: str
:param id: The id of the access review instance. Required.
:type id: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2021-12-01-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_accept_recommendations_request(
schedule_definition_id=schedule_definition_id,
id=id,
api_version=api_version,
template_url=self.accept_recommendations.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
error = self._deserialize.failsafe_deserialize(_models.ErrorDefinition, pipeline_response)
raise HttpResponseError(response=response, model=error, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
accept_recommendations.metadata = {"url": "/providers/Microsoft.Authorization/accessReviewScheduleDefinitions/{scheduleDefinitionId}/instances/{id}/acceptRecommendations"} # type: ignore
| [
"[email protected]"
] | |
f6be5f348da2ecdc3c2f5549a9bb3406e3276280 | 5da5473ff3026165a47f98744bac82903cf008e0 | /packages/google-cloud-securitycenter/google/cloud/securitycenter_v1/types/vulnerability.py | a9fea0e6bca2b8199f9e1f0237c0ce0083326912 | [
"Apache-2.0"
] | permissive | googleapis/google-cloud-python | ed61a5f03a476ab6053870f4da7bc5534e25558b | 93c4e63408c65129422f65217325f4e7d41f7edf | refs/heads/main | 2023-09-04T09:09:07.852632 | 2023-08-31T22:49:26 | 2023-08-31T22:49:26 | 16,316,451 | 2,792 | 917 | Apache-2.0 | 2023-09-14T21:45:18 | 2014-01-28T15:51:47 | Python | UTF-8 | Python | false | false | 12,429 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from __future__ import annotations
from typing import MutableMapping, MutableSequence
import proto # type: ignore
__protobuf__ = proto.module(
package="google.cloud.securitycenter.v1",
manifest={
"Vulnerability",
"Cve",
"Reference",
"Cvssv3",
},
)
class Vulnerability(proto.Message):
r"""Refers to common vulnerability fields e.g. cve, cvss, cwe
etc.
Attributes:
cve (google.cloud.securitycenter_v1.types.Cve):
CVE stands for Common Vulnerabilities and
Exposures (https://cve.mitre.org/about/)
"""
cve: "Cve" = proto.Field(
proto.MESSAGE,
number=1,
message="Cve",
)
class Cve(proto.Message):
r"""CVE stands for Common Vulnerabilities and Exposures.
More information: https://cve.mitre.org
Attributes:
id (str):
The unique identifier for the vulnerability.
e.g. CVE-2021-34527
references (MutableSequence[google.cloud.securitycenter_v1.types.Reference]):
Additional information about the CVE.
e.g.
https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527
cvssv3 (google.cloud.securitycenter_v1.types.Cvssv3):
Describe Common Vulnerability Scoring System
specified at
https://www.first.org/cvss/v3.1/specification-document
upstream_fix_available (bool):
Whether upstream fix is available for the
CVE.
"""
id: str = proto.Field(
proto.STRING,
number=1,
)
references: MutableSequence["Reference"] = proto.RepeatedField(
proto.MESSAGE,
number=2,
message="Reference",
)
cvssv3: "Cvssv3" = proto.Field(
proto.MESSAGE,
number=3,
message="Cvssv3",
)
upstream_fix_available: bool = proto.Field(
proto.BOOL,
number=4,
)
class Reference(proto.Message):
r"""Additional Links
Attributes:
source (str):
Source of the reference e.g. NVD
uri (str):
Uri for the mentioned source e.g.
https://cve.mitre.org/cgi-bin/cvename.cgi?name=CVE-2021-34527.
"""
source: str = proto.Field(
proto.STRING,
number=1,
)
uri: str = proto.Field(
proto.STRING,
number=2,
)
class Cvssv3(proto.Message):
r"""Common Vulnerability Scoring System version 3.
Attributes:
base_score (float):
The base score is a function of the base
metric scores.
attack_vector (google.cloud.securitycenter_v1.types.Cvssv3.AttackVector):
Base Metrics
Represents the intrinsic characteristics of a
vulnerability that are constant over time and
across user environments. This metric reflects
the context by which vulnerability exploitation
is possible.
attack_complexity (google.cloud.securitycenter_v1.types.Cvssv3.AttackComplexity):
This metric describes the conditions beyond
the attacker's control that must exist in order
to exploit the vulnerability.
privileges_required (google.cloud.securitycenter_v1.types.Cvssv3.PrivilegesRequired):
This metric describes the level of privileges
an attacker must possess before successfully
exploiting the vulnerability.
user_interaction (google.cloud.securitycenter_v1.types.Cvssv3.UserInteraction):
This metric captures the requirement for a
human user, other than the attacker, to
participate in the successful compromise of the
vulnerable component.
scope (google.cloud.securitycenter_v1.types.Cvssv3.Scope):
The Scope metric captures whether a
vulnerability in one vulnerable component
impacts resources in components beyond its
security scope.
confidentiality_impact (google.cloud.securitycenter_v1.types.Cvssv3.Impact):
This metric measures the impact to the
confidentiality of the information resources
managed by a software component due to a
successfully exploited vulnerability.
integrity_impact (google.cloud.securitycenter_v1.types.Cvssv3.Impact):
This metric measures the impact to integrity
of a successfully exploited vulnerability.
availability_impact (google.cloud.securitycenter_v1.types.Cvssv3.Impact):
This metric measures the impact to the
availability of the impacted component resulting
from a successfully exploited vulnerability.
"""
class AttackVector(proto.Enum):
r"""This metric reflects the context by which vulnerability
exploitation is possible.
Values:
ATTACK_VECTOR_UNSPECIFIED (0):
Invalid value.
ATTACK_VECTOR_NETWORK (1):
The vulnerable component is bound to the
network stack and the set of possible attackers
extends beyond the other options listed below,
up to and including the entire Internet.
ATTACK_VECTOR_ADJACENT (2):
The vulnerable component is bound to the
network stack, but the attack is limited at the
protocol level to a logically adjacent topology.
ATTACK_VECTOR_LOCAL (3):
The vulnerable component is not bound to the
network stack and the attacker's path is via
read/write/execute capabilities.
ATTACK_VECTOR_PHYSICAL (4):
The attack requires the attacker to
physically touch or manipulate the vulnerable
component.
"""
ATTACK_VECTOR_UNSPECIFIED = 0
ATTACK_VECTOR_NETWORK = 1
ATTACK_VECTOR_ADJACENT = 2
ATTACK_VECTOR_LOCAL = 3
ATTACK_VECTOR_PHYSICAL = 4
class AttackComplexity(proto.Enum):
r"""This metric describes the conditions beyond the attacker's
control that must exist in order to exploit the vulnerability.
Values:
ATTACK_COMPLEXITY_UNSPECIFIED (0):
Invalid value.
ATTACK_COMPLEXITY_LOW (1):
Specialized access conditions or extenuating
circumstances do not exist. An attacker can
expect repeatable success when attacking the
vulnerable component.
ATTACK_COMPLEXITY_HIGH (2):
A successful attack depends on conditions
beyond the attacker's control. That is, a
successful attack cannot be accomplished at
will, but requires the attacker to invest in
some measurable amount of effort in preparation
or execution against the vulnerable component
before a successful attack can be expected.
"""
ATTACK_COMPLEXITY_UNSPECIFIED = 0
ATTACK_COMPLEXITY_LOW = 1
ATTACK_COMPLEXITY_HIGH = 2
class PrivilegesRequired(proto.Enum):
r"""This metric describes the level of privileges an attacker
must possess before successfully exploiting the vulnerability.
Values:
PRIVILEGES_REQUIRED_UNSPECIFIED (0):
Invalid value.
PRIVILEGES_REQUIRED_NONE (1):
The attacker is unauthorized prior to attack,
and therefore does not require any access to
settings or files of the vulnerable system to
carry out an attack.
PRIVILEGES_REQUIRED_LOW (2):
The attacker requires privileges that provide
basic user capabilities that could normally
affect only settings and files owned by a user.
Alternatively, an attacker with Low privileges
has the ability to access only non-sensitive
resources.
PRIVILEGES_REQUIRED_HIGH (3):
The attacker requires privileges that provide
significant (e.g., administrative) control over
the vulnerable component allowing access to
component-wide settings and files.
"""
PRIVILEGES_REQUIRED_UNSPECIFIED = 0
PRIVILEGES_REQUIRED_NONE = 1
PRIVILEGES_REQUIRED_LOW = 2
PRIVILEGES_REQUIRED_HIGH = 3
class UserInteraction(proto.Enum):
r"""This metric captures the requirement for a human user, other
than the attacker, to participate in the successful compromise
of the vulnerable component.
Values:
USER_INTERACTION_UNSPECIFIED (0):
Invalid value.
USER_INTERACTION_NONE (1):
The vulnerable system can be exploited
without interaction from any user.
USER_INTERACTION_REQUIRED (2):
Successful exploitation of this vulnerability
requires a user to take some action before the
vulnerability can be exploited.
"""
USER_INTERACTION_UNSPECIFIED = 0
USER_INTERACTION_NONE = 1
USER_INTERACTION_REQUIRED = 2
class Scope(proto.Enum):
r"""The Scope metric captures whether a vulnerability in one
vulnerable component impacts resources in components beyond its
security scope.
Values:
SCOPE_UNSPECIFIED (0):
Invalid value.
SCOPE_UNCHANGED (1):
An exploited vulnerability can only affect
resources managed by the same security
authority.
SCOPE_CHANGED (2):
An exploited vulnerability can affect
resources beyond the security scope managed by
the security authority of the vulnerable
component.
"""
SCOPE_UNSPECIFIED = 0
SCOPE_UNCHANGED = 1
SCOPE_CHANGED = 2
class Impact(proto.Enum):
r"""The Impact metrics capture the effects of a successfully
exploited vulnerability on the component that suffers the worst
outcome that is most directly and predictably associated with
the attack.
Values:
IMPACT_UNSPECIFIED (0):
Invalid value.
IMPACT_HIGH (1):
High impact.
IMPACT_LOW (2):
Low impact.
IMPACT_NONE (3):
No impact.
"""
IMPACT_UNSPECIFIED = 0
IMPACT_HIGH = 1
IMPACT_LOW = 2
IMPACT_NONE = 3
base_score: float = proto.Field(
proto.DOUBLE,
number=1,
)
attack_vector: AttackVector = proto.Field(
proto.ENUM,
number=5,
enum=AttackVector,
)
attack_complexity: AttackComplexity = proto.Field(
proto.ENUM,
number=6,
enum=AttackComplexity,
)
privileges_required: PrivilegesRequired = proto.Field(
proto.ENUM,
number=7,
enum=PrivilegesRequired,
)
user_interaction: UserInteraction = proto.Field(
proto.ENUM,
number=8,
enum=UserInteraction,
)
scope: Scope = proto.Field(
proto.ENUM,
number=9,
enum=Scope,
)
confidentiality_impact: Impact = proto.Field(
proto.ENUM,
number=10,
enum=Impact,
)
integrity_impact: Impact = proto.Field(
proto.ENUM,
number=11,
enum=Impact,
)
availability_impact: Impact = proto.Field(
proto.ENUM,
number=12,
enum=Impact,
)
__all__ = tuple(sorted(__protobuf__.manifest))
| [
"[email protected]"
] | |
7cec84d82d2dcb14c1cbdaf99b64ffc73e1ae94e | 11771f5dd90a74d5c76765f27f0d9a9cb044f57b | /route/bbs_make.py | 2185cef4c1bdcfd50b641b610aa030eb0d012695 | [
"BSD-3-Clause"
] | permissive | openNAMU/openNAMU | cc031ea848ac6d829ad243fcf59da26adf0f0814 | 868107e4ef53e4e78af15c590673b78ee385baa5 | refs/heads/beta | 2023-08-24T10:20:00.245680 | 2023-08-23T14:09:53 | 2023-08-23T14:09:53 | 78,184,261 | 86 | 75 | BSD-3-Clause | 2023-09-13T21:36:03 | 2017-01-06T07:22:10 | Python | UTF-8 | Python | false | false | 2,024 | py | from .tool.func import *
def bbs_make():
with get_db_connect() as conn:
curs = conn.cursor()
if admin_check() != 1:
return re_error('/error/3')
if flask.request.method == 'POST':
curs.execute(db_change('select set_id from bbs_set where set_name = "bbs_name" order by set_id + 0 desc'))
db_data = curs.fetchall()
bbs_num = str(int(db_data[0][0]) + 1) if db_data else '1'
bbs_name = flask.request.form.get('bbs_name', 'test')
bbs_type = flask.request.form.get('bbs_type', 'comment')
bbs_type = bbs_type if bbs_type in ['comment', 'thread'] else 'comment'
curs.execute(db_change("insert into bbs_set (set_name, set_code, set_id, set_data) values ('bbs_name', '', ?, ?)"), [bbs_num, bbs_name])
curs.execute(db_change("insert into bbs_set (set_name, set_code, set_id, set_data) values ('bbs_type', '', ?, ?)"), [bbs_num, bbs_type])
conn.commit()
return redirect('/bbs/main')
else:
return easy_minify(flask.render_template(skin_check(),
imp = [load_lang('bbs_make'), wiki_set(), wiki_custom(), wiki_css([0, 0])],
data = '''
<form method="post">
<input placeholder="''' + load_lang('bbs_name') + '''" name="bbs_name">
<hr class="main_hr">
<select name="bbs_type">
<option value="comment">''' + load_lang('comment_base') + '''</option>
<option value="thread">''' + load_lang('thread_base') + '''</option>
</select>
<hr class="main_hr">
<button type="submit">''' + load_lang('save') + '''</button>
</form>
''',
menu = [['bbs/main', load_lang('return')]]
)) | [
"[email protected]"
] | |
376183f1fd02abc26c81e2af35be1774eebe4052 | 1eab574606dffb14a63195de994ee7c2355989b1 | /ixnetwork_restpy/testplatform/sessions/ixnetwork/globals/topology/bfdrouter/bfdrouter.py | a4c8d026e8eb6e4b33d202bfd079514c11616f61 | [
"MIT"
] | permissive | steiler/ixnetwork_restpy | 56b3f08726301e9938aaea26f6dcd20ebf53c806 | dd7ec0d311b74cefb1fe310d57b5c8a65d6d4ff9 | refs/heads/master | 2020-09-04T12:10:18.387184 | 2019-11-05T11:29:43 | 2019-11-05T11:29:43 | 219,728,796 | 0 | 0 | null | 2019-11-05T11:28:29 | 2019-11-05T11:28:26 | null | UTF-8 | Python | false | false | 2,602 | py | # MIT LICENSE
#
# Copyright 1997 - 2019 by IXIA Keysight
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from ixnetwork_restpy.base import Base
from ixnetwork_restpy.files import Files
class BfdRouter(Base):
"""Bfd Port Specific Data
The BfdRouter class encapsulates a required bfdRouter resource which will be retrieved from the server every time the property is accessed.
"""
__slots__ = ()
_SDM_NAME = 'bfdRouter'
def __init__(self, parent):
super(BfdRouter, self).__init__(parent)
@property
def Count(self):
"""Number of elements inside associated multiplier-scaled container object, e.g. number of devices inside a Device Group.
Returns:
number
"""
return self._get_attribute('count')
@property
def DescriptiveName(self):
"""Longer, more descriptive name for element. It's not guaranteed to be unique like -name-, but may offers more context
Returns:
str
"""
return self._get_attribute('descriptiveName')
@property
def Name(self):
"""Name of NGPF element, guaranteed to be unique in Scenario
Returns:
str
"""
return self._get_attribute('name')
@Name.setter
def Name(self, value):
self._set_attribute('name', value)
@property
def RowNames(self):
"""Name of rows
Returns:
list(str)
"""
return self._get_attribute('rowNames')
def update(self, Name=None):
"""Updates a child instance of bfdRouter on the server.
Args:
Name (str): Name of NGPF element, guaranteed to be unique in Scenario
Raises:
ServerError: The server has encountered an uncategorized error condition
"""
self._update(locals())
| [
"[email protected]"
] | |
6e5b281dfdbc8eb03b095b591ce654289d789360 | e0c8e66af3a72a1cc534d7a90fead48754d266b3 | /vb_suite/miscellaneous.py | 8295d275f2dd615f626d02981203b406f233a1ea | [
"BSD-3-Clause"
] | permissive | gwtaylor/pandas | e12b0682347b9f03a24d6bff3e14f563cb7a3758 | 7b0349f0545011a6cac2422b8d8d0f409ffd1e15 | refs/heads/master | 2021-01-15T17:51:47.147334 | 2012-01-13T17:53:56 | 2012-01-13T17:53:56 | 3,174,111 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 505 | py | from vbench.benchmark import Benchmark
from datetime import datetime
common_setup = """from pandas_vb_common import *
"""
#----------------------------------------------------------------------
# cache_readonly
setup = common_setup + """
from pandas.util.decorators import cache_readonly
class Foo:
@cache_readonly
def prop(self):
return 5
obj = Foo()
"""
misc_cache_readonly = Benchmark("obj.prop", setup, name="misc_cache_readonly",
ncalls=2000000)
| [
"[email protected]"
] | |
7bcff9fa804b622d48c7a6bb33873bdeede52060 | b61dedf12868e2bc511b6693af1985911a13f336 | /src/logpipe/formats/json.py | 5747c863e36910b285e51dc63598357f2e147fee | [
"ISC"
] | permissive | vitorcarmovieira/django-logpipe | f9eebb6674b9ba180a63448c9d71ce2e87929f7c | 89d0543e341518f9ae49124c354e6a6c2e3f4150 | refs/heads/main | 2023-03-03T13:18:22.456270 | 2021-02-13T17:29:32 | 2021-02-13T17:29:32 | 326,679,534 | 1 | 1 | ISC | 2021-02-13T17:29:32 | 2021-01-04T12:39:30 | Python | UTF-8 | Python | false | false | 139 | py | from rest_framework.renderers import JSONRenderer
from rest_framework.parsers import JSONParser
__all__ = ['JSONRenderer', 'JSONParser']
| [
"[email protected]"
] | |
53cd30d03207556424257a7a49ed432f13b6260a | 9aa7b52847a161507eae57c222f6f3b3473fbf67 | /Project/Main/bin/pyhtmlizer | cb636c852d7d27ecd58cab7c59fd4e7b1fc7541b | [] | no_license | azatnt/Project_Aza_Madi | 13a41bcc7bc822503136046dd5905a0884ffccb5 | d2804cd2b1e9b97d44e85d6a24c45d3f41458db3 | refs/heads/master | 2023-01-22T18:17:17.512344 | 2020-11-16T15:56:00 | 2020-11-16T15:56:00 | 261,734,097 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 276 | #!/Users/sulpak/Documents/GitHub/Project_Aza_Madi/Project/Main/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from twisted.scripts.htmlizer import run
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(run())
| [
"[email protected]"
] | ||
f012952b29876b396eeff208f656b11ad3d1d3d2 | 54f352a242a8ad6ff5516703e91da61e08d9a9e6 | /Source Codes/AtCoder/agc025/C/2658196.py | 38086edb4f9e8897ebee6bd4d4c545b41c0b5eb2 | [] | no_license | Kawser-nerd/CLCDSA | 5cbd8a4c3f65173e4e8e0d7ed845574c4770c3eb | aee32551795763b54acb26856ab239370cac4e75 | refs/heads/master | 2022-02-09T11:08:56.588303 | 2022-01-26T18:53:40 | 2022-01-26T18:53:40 | 211,783,197 | 23 | 9 | null | null | null | null | UTF-8 | Python | false | false | 333 | py | from itertools import accumulate
N = int(input())
L, R = [0], [0]
for i in range(N):
li, ri = map(int, input().split())
L.append(li)
R.append(ri)
L.sort(reverse=True)
R.sort()
L = list(accumulate(L))
R = list(accumulate(R))
ans = 0
for k in range(N+1):
ans = max(ans, 2*(L[k]-R[k]))
print(ans) | [
"[email protected]"
] | |
3e14c52e46b70e5aca1d0f5a055cabdee1774a89 | 372da4f11ff2996f4c7e023884f12455fdfbb045 | /RA4Analysis/plotsDaniel/makeSubPlot.py | 0888769bf88a5d67e18425f2471031eadb7038fd | [] | no_license | HephyAnalysisSW/Workspace | ec127f75da58c1bbf12f8999055a1ef513598e94 | dcdee36e344dec8cbfe6dd6986e2345d9c1af25f | refs/heads/94X-master | 2022-11-22T01:52:28.179081 | 2019-03-12T13:19:04 | 2019-03-12T13:19:04 | 13,293,437 | 1 | 1 | null | 2018-11-14T10:52:57 | 2013-10-03T08:07:27 | Python | UTF-8 | Python | false | false | 8,532 | py | import ROOT
import os, sys, copy
ROOT.gROOT.LoadMacro('../../HEPHYPythonTools/scripts/root/tdrstyle.C')
ROOT.setTDRStyle()
from math import *
from array import array
from Workspace.HEPHYPythonTools.helpers import getVarValue, getChain, deltaPhi
from Workspace.RA4Analysis.cmgTuplesPostProcessed_v6_Phys14V2_HT400ST150_withDF import *
#from Workspace.RA4Analysis.cmgTuplesPostProcessed_v6_Phys14V2 import *
from Workspace.RA4Analysis.helpers import *
varstring='deltaPhi_Wl'
binning=[16,0,3.2]
lepSel = 'hard'
nbtagCut=0
njetCut=2
nBtagReg=[0,1,2]
nJetReg=[2,3,4,5,6]
stReg=[(150,-1)]#250),(250,350),(350,450),(450,-1)]
htReg=[(500,-1)]#750),(750,1000),(1000,1250),(1250,-1)]
startpath = '/afs/hephy.at/user/d/dspitzbart/www/subBkgInclusive/'
#Load the Background Chain
c = getChain(ttJets[lepSel],histname='')
#Sub Background Definitions
ngNuEFromW = "Sum$(abs(genPart_pdgId)==12&&abs(genPart_motherId)==24)"
ngNuMuFromW = "Sum$(abs(genPart_pdgId)==14&&abs(genPart_motherId)==24)"
ngNuTauFromW = "Sum$(abs(genPart_pdgId)==16&&abs(genPart_motherId)==24)"
lTau_H = ngNuEFromW+"+"+ngNuMuFromW+"==0&&"+ngNuTauFromW+"==1"\
+"&&Sum$(genTau_nNuE+genTau_nNuMu==1&&genTau_nNuTau==1)==1"
hTau_l= "Sum$((abs(genPart_pdgId)==14||abs(genPart_pdgId)==12)&&abs(genPart_motherId)==24)==1"\
+"&&Sum$(abs(genPart_pdgId)==16&&abs(genPart_motherId)==24)==1"\
+"&&Sum$(genTau_nNuE+genTau_nNuMu==0&&genTau_nNuTau==1)==1"
diLepEff = ngNuEFromW+"+"+ngNuMuFromW+"==2&&"+ngNuTauFromW+"==0&&Sum$(genLep_pt>10&&(abs(genLep_eta)<2.1&&abs(genLep_pdgId)==13||abs(genLep_eta)<2.4&&abs(genLep_pdgId)==11))==2"
diLepAcc = ngNuEFromW+"+"+ngNuMuFromW+"==2&&"+ngNuTauFromW+"==0&&Sum$(genLep_pt>10&&(abs(genLep_eta)<2.1&&abs(genLep_pdgId)==13||abs(genLep_eta)<2.4&&abs(genLep_pdgId)==11))!=2"
lTau_l = ngNuEFromW+"+"+ngNuMuFromW+"==1&&"+ngNuTauFromW+"==1&&Sum$(genTau_nNuE+genTau_nNuMu==1&&genTau_nNuTau==1)==1"
diTau = ngNuEFromW+"+"+ngNuMuFromW+"==0&&"+ngNuTauFromW+"==2"
l_H = ngNuEFromW+"+"+ngNuMuFromW+"==1&&"+ngNuTauFromW+"==0"
diHad = ngNuEFromW+"+"+ngNuMuFromW+"==0&&"+ngNuTauFromW+"==0"
hTau_H = ngNuEFromW+"+"+ngNuMuFromW+"==0&&"+ngNuTauFromW+"==1&&Sum$(genTau_nNuE+genTau_nNuMu==0&&genTau_nNuTau==1)==1"
allHad = "(("+diHad+")||("+hTau_H+"))"
#tot_lumi = 4000
#nevents = c.GetEntries()
#weight = "("+str(tot_lumi)+"*xsec)/"+str(nevents)
#print weight
for i,bReg in enumerate(nBtagReg):
if i < len(nBtagReg)-1:
nbtagCutString='&&nBJetMediumCMVA30=='+str(bReg)
nbtagPath='nBtagEq'+str(bReg)+'/'
else:
nbtagCutString='&&nBJetMediumCMVA30>='+str(bReg)
nbtagPath='nBtagLEq'+str(bReg)+'/'
for j, hReg in enumerate(htReg):
if j < len(htReg)-1:
htCutString='&&htJet30j>='+str(hReg[0])+'&&htJet30j<'+str(hReg[1])
htPath=str(hReg[0])+'htJet30j'+str(hReg[1])+'/'
else:
htCutString='&&htJet30j>='+str(hReg[0])
htPath='_'+str(hReg[0])+'htJet30j/'
for k,sReg in enumerate(stReg):
if k < len(stReg)-1:
stCutString='&&st>='+str(sReg[0])+'&&st<'+str(sReg[1])
stPath=str(sReg[0])+'st'+str(sReg[1])+'/'
else:
stCutString='&&st>='+str(sReg[0])
stPath='_'+str(sReg[0])+'st/'
for l,jReg in enumerate(nJetReg):
if l < len(nJetReg)-1:
njCutString='&&nJet30=='+str(jReg)
njPath='nJet30Eq'+str(jReg)+'/'
else:
njCutString='&&nJet30>='+str(jReg)
njPath='nJet30LEq'+str(jReg)+'/'
path=startpath+nbtagPath+htPath+stPath+njPath
if not os.path.exists(path):
os.makedirs(path)
prepresel = "singleLeptonic&&nLooseHardLeptons==1&&nTightHardLeptons==1&&nLooseSoftPt10Leptons==0&&Jet_pt[1]>80"
presel = prepresel+nbtagCutString+htCutString+stCutString+njCutString#"htJet30j>=750&&htJet30j<=1000&&st>=450&&"+njetCutString+"&&"+nbtagCutString+'&&Jet_pt[1]>80'
prefix= ''.join(presel.split('&&')[5:]).replace("&&","_").replace(">=","le_").replace("==","eq_")
can1 = ROOT.TCanvas(varstring,varstring,1200,1000)
h_Stack = ROOT.THStack('h_Stack',varstring)
h_Stack_S = ROOT.THStack('h_Stack_S',varstring)
l = ROOT.TLegend(0.7,0.7,0.95,0.95)
l.SetFillColor(ROOT.kWhite)
l.SetShadowColor(ROOT.kWhite)
l.SetBorderSize(1)
nothing='(1)'
subBkg=[
##[allHad, 'all hadronic', ROOT.kRed-7, 'all hadronic'],
[diHad,'two had.', ROOT.kRed-9,'diHad'],
[hTau_H,'W#rightarrow#tau#nu#rightarrow had.+2#nu | W#rightarrow had.', ROOT.kRed-7, 'hadTau'],
[lTau_H,'W#rightarrow#tau#nu#rightarrow e/#mu+3#nu | W#rightarrow had.', ROOT.kBlue-2, 'lepTau_H'],
[diTau,'two #tau leptons', ROOT.kGreen+3,'diTau'],
[lTau_l,'W#rightarrow#tau#nu#rightarrow e/#mu+3#nu | W#rightarrow e/#mu+#nu', ROOT.kOrange+1,'lTau_l'],
[diLepAcc,'dileptonic (e/#mu) Acc.',ROOT.kRed-3,'diLepAcc'],
[diLepEff,'dileptonic (e/#mu) Eff.',ROOT.kRed-4,'diLepEff'],
[hTau_l,'W#rightarrow#tau#nu#rightarrow had.+2#nu | W#rightarrow e/#mu+#nu', ROOT.kAzure+6,'hTau_l'],
[l_H, 'single lep. (e/#mu)',ROOT.kCyan+3,'singleLep']
#[nothing,'tt Jets',ROOT.kBlue,'ttJets']
]
totalh=ROOT.TH1F('total','Total',*binning)
c.Draw(varstring+'>>total','weight*('+presel+')')
totalh.SetLineColor(ROOT.kBlue+3)
totalh.SetLineWidth(2)
totalh.SetMarkerSize(0)
totalh.SetMarkerStyle(0)
totalh.SetTitleSize(20)
totalh.SetFillColor(0)
l.AddEntry(totalh)
for i, [cut,name,col,subname] in enumerate(subBkg):
histo = 'h'+str(i)
histoname = histo
print histoname
histo = ROOT.TH1F(str(histo) ,str(histo),*binning)
print histo
print col
wholeCut=presel+'&&'+cut
print wholeCut
c.Draw(varstring+'>>'+str(histoname),'weight*('+wholeCut+')')
histo.SetLineColor(ROOT.kBlack)
histo.SetLineWidth(1)
histo.SetMarkerSize(0)
histo.SetMarkerStyle(0)
histo.SetTitleSize(20)
histo.GetXaxis().SetTitle(varstring)
histo.GetYaxis().SetTitle("Events / "+str( (binning[2] - binning[1])/binning[0]))
histo.GetXaxis().SetLabelSize(0.04)
histo.GetYaxis().SetLabelSize(0.04)
histo.GetYaxis().SetTitleOffset(0.8)
histo.GetYaxis().SetTitleSize(0.05)
histo.SetFillColor(col)
histo.SetFillStyle(1001)
histo.SetMinimum(.08)
h_Stack.Add(histo)
l.AddEntry(histo, name)
#RCS Backup calculation
twoBin=[0,1,pi]
rcsh=ROOT.TH1F('rcsh','rcsh',len(twoBin)-1, array('d', twoBin))
c.Draw(varstring+'>>rcsh','weight*('+wholeCut+')','goff')
rcsb=0
if rcsh.GetBinContent(1)>0 and rcsh.GetBinContent(2)>0:
rcsb=rcsh.GetBinContent(2)/rcsh.GetBinContent(1)
can2=ROOT.TCanvas('sub','sub',800,600)
histo.Draw()
latex2 = ROOT.TLatex()
latex2.SetNDC()
latex2.SetTextSize(0.035)
latex2.SetTextAlign(11) # align right
latex2.DrawLatex(0.7,0.96,str(rcsb))
latex2.DrawLatex(0.16,0.96,name)
can2.SetGrid()
can2.SetLogy()
can2.Print(path+varstring+subname+'_'+prefix+'notauRej.png')
can2.Print(path+varstring+subname+'_'+prefix+'notauRej.root')
can1.cd()
can1.SetGrid()
h_Stack.Draw()
totalh.Draw('same')
h_Stack.SetMinimum(0.08)
l.Draw()
#Calculation of RCS value, works only for cut at dPhi=1 atm
bins=1/(binning[2]/binning[0])
i=1+int(bins)
rcs=0
rcsn=0
while i <= binning[0]:
rcs=rcs+h_Stack.GetStack().Last().GetBinContent(i)
i=i+1
i=1
while i<= int(bins):
rcsn=rcsn+h_Stack.GetStack().Last().GetBinContent(i)
i=i+1
if rcsn>0:
rcs=rcs/rcsn
else:
rcs=float('nan')
print rcs
latex1 = ROOT.TLatex()
latex1.SetNDC()
latex1.SetTextSize(0.035)
latex1.SetTextAlign(11) # align right
latex1.DrawLatex(0.16,0.96,"Rcs="+str(rcs))
latex1.DrawLatex(0.72,0.96,"L=4 fb^{-1} (13TeV)")
can1.SetLogy()
can1.Print(path+varstring+'_'+prefix+'notauRej.png')
can1.Print(path+varstring+'_'+prefix+'notauRej.root')
| [
"[email protected]"
] | |
4265c5c776462df6a9e2ace5418932716eac8fc8 | acb8e84e3b9c987fcab341f799f41d5a5ec4d587 | /langs/0/a_l.py | 653cbd127ab6e82ce289684705f743c8316095a0 | [] | no_license | G4te-Keep3r/HowdyHackers | 46bfad63eafe5ac515da363e1c75fa6f4b9bca32 | fb6d391aaecb60ab5c4650d4ae2ddd599fd85db2 | refs/heads/master | 2020-08-01T12:08:10.782018 | 2016-11-13T20:45:50 | 2016-11-13T20:45:50 | 73,624,224 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 486 | py | import sys
def printFunction(lineRemaining):
if lineRemaining[0] == '"' and lineRemaining[-1] == '"':
if len(lineRemaining) > 2:
#data to print
lineRemaining = lineRemaining[1:-1]
print ' '.join(lineRemaining)
else:
print
def main(fileName):
with open(fileName) as f:
for line in f:
data = line.split()
if data[0] == 'a_L':
printFunction(data[1:])
else:
print 'ERROR'
return
if __name__ == '__main__':
main(sys.argv[1]) | [
"[email protected]"
] | |
26571dafb7f2105ef31a78259155c44e4a01ad01 | 22dcd52b6a07e82e8db9bf8b7ad38711d12f69a8 | /venv/Lib/site-packages/sklearn/neighbors/base.py | 9be7b1ffc90f2c7d40252bf586916429df32fafb | [] | no_license | MrGreenPepper/music_cluster | 9060d44db68ae5e085a4f2c78d36868645432d43 | af5383a7b9c68d04c16c1086cac6d2d54c3e580c | refs/heads/main | 2023-08-15T09:14:50.630105 | 2021-10-01T09:45:47 | 2021-10-01T09:45:47 | 412,407,002 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 489 | py |
# THIS FILE WAS AUTOMATICALLY GENERATED BY deprecated_modules.py
import sys
from . import _base
from ..externals._pep562 import Pep562
from ..utils.deprecation import _raise_dep_warning_if_not_pytest
deprecated_path = 'sklearn.neighbors.base'
correct_import_path = 'sklearn.neighbors'
_raise_dep_warning_if_not_pytest(deprecated_path, correct_import_path)
def __getattr__(name):
return getattr(_base, name)
if not sys.version_info >= (3, 7):
Pep562(__name__)
| [
"[email protected]"
] | |
eac8e55efa2b9ab7f320a562c98fa8c884b5e994 | 60ce73bf2f86940438e5b7fecaaccad086888dc5 | /working_scrapers/Virginia_amherst.py | dd21db5c4557762ea61e5ec1f6730d25b2bd6a00 | [] | no_license | matthewgomies/jailcrawl | 22baf5f0e6dc66fec1b1b362c26c8cd2469dcb0d | 9a9ca7e1328ae549860ebeea9b149a785f152f39 | refs/heads/master | 2023-02-16T06:39:42.107493 | 2021-01-15T16:37:57 | 2021-01-15T16:37:57 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,248 | py | #!/usr/bin/python
'''
This is an template script
'''
from urllib.request import urlopen, Request
import pandas as pd
import os
import time
import numpy as np
from datetime import datetime
import datetime as dt
import sys
from io import StringIO
from joblib import Parallel, delayed
import requests
from jailscrape.common import save_to_s3, get_browser, get_logger, record_error, save_pages_array
from jailscrape import crawlers
# jailscrape.common is a file that is part of the project which keeps
# most common boilerplate code out of this file
from selenium.webdriver.common.keys import Keys
import watchtower
from bs4 import BeautifulSoup
import re
import math
# NOTE: These are imports. They ideally don't change very often. It's OK
# to have a large, maximal set here and to bulk-edit files to add to
# these.
ROW_INDEX = 1015 # Change this for each scraper. This references the row
# of the main jailcrawl spreadsheet. This index will be used to look up
# the URL as well as state/county info
THIS_STATE = 'virginia' # Change the current state/county information.
THIS_COUNTY = 'amherst'
def main(roster_row):
try:
logger = get_logger(roster_row) # Get a standard logger
# Here are standard variable values/how to initialize them.
# These aren't initialized here since in the save_single_page
# case, they can be done in the called function
#browser = get_browser() # Get a standard browser
#urlAddress = roster_row['Working Link'] # Set the main URL from the spreadsheet
#page_index = 0 # Set an initial value of "page_index", which we will use to separate output pages
#logger.info('Set working link to _%s_', urlAddress) # Log the chosen URL
##########
# Begin core specific scraping code
if roster_row['State'].lower() != THIS_STATE or roster_row['County'].lower() != THIS_COUNTY:
raise Exception("Expected county definition info from _%s, %s_, but found info: _%s_" % (THIS_COUNTY, THIS_STATE, roster_row))
#crawlers.save_single_page(roster_row) # try to call a known crawler if possible
crawlers.basic_multipage(roster_row, next_type='text', next_string='>') # try to call a known crawler if possible
## Code to save a page and log appropriately
#save_to_s3(store_source, page_index, roster_row)
#logger.info('Saved page _%s_', page_index)
# End core specific scraping code
##########
#Close the browser
logger.info('complete!')
except Exception as errorMessage:
try:
browser.close()
record_error(message=str(errorMessage), roster_row=roster_row, browser=browser)
except:
record_error(message=str(errorMessage), roster_row=roster_row)
# Record error in S3 for a general error
logger.error('Error: %s', errorMessage)
# Log error
sys.exit(1)
if __name__ == "__main__":
#This will load in the current jail roster list
#Select the index of the roster this script is for:
#Write the name of the county and state
roster = pd.read_csv('/opt/jail_roster_final_rmDuplicates.csv',encoding = "utf-8")
main(roster[roster['index'] == ROW_INDEX].iloc[0])
| [
"[email protected]"
] | |
93431b4260ae9bcc50dc2babafb602fe5f3a56f8 | f3598888ce889075d006de9559aa67499ca0d708 | /Common/CenterToLeft.py | d1cf789f60c0bb67849262f0612c7c308bf8032d | [] | no_license | JinYanming/jym_cmot_semi_mask | 6f1ceafa344d2831cdc91e1af0515b417b3939d6 | be5fc9694f802ab0fb2eaeb11c7eca10ee0e72b3 | refs/heads/master | 2022-02-20T05:56:36.418283 | 2019-09-18T18:23:40 | 2019-09-18T18:23:40 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 722 | py | # Generated with SMOP 0.41
from libsmop import *
# /workspace/MOT/cmot-v1/Common/CenterToLeft.m
@function
def CenterToLeft(x=None,y=None,height=None,width=None,*args,**kwargs):
varargin = CenterToLeft.varargin
nargin = CenterToLeft.nargin
## Copyright (C) 2014 Seung-Hwan Bae
## All rights reserved.
# (x,y): Center position
h_height=height / 2
# /workspace/MOT/cmot-v1/Common/CenterToLeft.m:7
h_width=width / 2
# /workspace/MOT/cmot-v1/Common/CenterToLeft.m:8
L_x=x - round(h_width)
# /workspace/MOT/cmot-v1/Common/CenterToLeft.m:10
L_y=y - round(h_height)
# /workspace/MOT/cmot-v1/Common/CenterToLeft.m:11
return L_x,L_y
if __name__ == '__main__':
pass
| [
"[email protected]"
] | |
fa8d5992af22569fce3cc34d5d811404061b7321 | fa1002dd32e2332396d0b359094050825e42c343 | /emotion-program-pi-version.py | 0cea550ccba505b69dfebe8c3071874a0e5092a7 | [] | no_license | vanstorm9/Emotion-Recognition-DOF | ced912158e45636b53469b3dc0645bb4c5ab69f8 | 0d6e395bf950388a37065cb9ccf1bba44171c35f | refs/heads/master | 2021-01-10T02:14:11.917489 | 2018-05-26T04:17:40 | 2018-05-26T04:17:40 | 45,325,932 | 28 | 15 | null | null | null | null | UTF-8 | Python | false | false | 12,001 | py | # Program implemented in the Raspberry Pi (with camera module)
#from matplotlib import pyplot as plt
#from sklearn.naive_bayes import GaussianNB
import numpy as np
import math
import cv2
import os
import os.path
import io
from time import time
import picamera
import smtplib
#camera = picamera.PiCamera()
from time import sleep
#import pyttsx
# Libraries to preform machine learning
import sys
from sklearn.svm import SVC
from sklearn.metrics import accuracy_score,accuracy_score, confusion_matrix
from sklearn.decomposition import PCA, RandomizedPCA
# from mpl_toolkits.mplot3d import Axes3D
from sklearn.externals import joblib
from sklearn import cross_validation
from sklearn.linear_model import Ridge
from sklearn.learning_curve import validation_curve, learning_curve
from sklearn.externals import joblib
def emotion_to_text(pred):
smtpUser= "(ENTER YOUR EMAIL ADDRESS)"
smtpPass= "(ENTER YOUR EMAIL ACCOUNT'S PASSWORD)"
toAdd = "[email protected]"
fromAdd = smtpUser
if pred == "Neutral":
subject = "How are you doing?"
body = "Hey! Just checking in, I was just wondering how you are doing today. \n \n - Rapiro"
elif pred == "Angry":
subject = "Are you okay? You look mad"
body = "I noticed that you are a bit red. Did something annoy or aggrivate you? /n -Rapiro"
elif pred == "Shocked":
subject = "Did something scare or surprised you?"
body = "What's wrong, you look like you have seen a ghost. . . \n Rapiro"
else:
subject = "You seem happy today"
body = "Hey there! I am very happy that you are happy ^_^ \n \n -Rapiro"
header = "To: " + toAdd + "\n" + "From: " + fromAdd + "\n" + "Subject: " + subject
#print header + "\n" + body
s = smtplib.SMTP('smtp.gmail.com', 587)
s.ehlo()
s.starttls()
s.ehlo()
s.login(smtpUser, smtpPass)
s.sendmail(fromAdd, toAdd, header + "\n" + body)
s.quit()
# Cannot use due to memory error
def pca_calc(main):
n_components = 90000
print '----------------------'
print main.shape
pca = RandomizedPCA(n_components=n_components, whiten=True).fit(main)
main = pca.transform(main)
print main.shape
return main
def motor_emotion_response(pred):
if pred == 'Smiling':
print 'Activating command. . .'
os.system("./rapirocommands 6")
sleep(5)
os.system("./rapirocommands 0")
print 'Command finished'
elif pred == 'Neutral':
print 'Activating neutral command. . .'
os.system("./hellobash")
sleep(5)
os.system("./rapirocommands 5")
sleep(5)
os.system("./rapirocommands 0")
print 'End command'
elif pred == 'Angry':
print 'Activating angry command. . .'
os.system("./rapirocommands 4")
sleep(2)
os.system("./rapirocommands 0")
print 'Command ended'
elif pred == 'Shocked':
print 'Activating shocked command'
os.system("./rapiro-commands 2")
sleep(2)
os.system("./rapiro-commands 0")
print 'Command ended'
def draw_flow(im,flow,step=16):
h,w = im.shape[:2]
y,x = np.mgrid[step/2:h:step,step/2:w:step].reshape(2,-1)
fx,fy = flow[y,x].T
# create line endpoints
lines = np.vstack([x,y,x+fx,y+fy]).T.reshape(-1,2,2)
lines = np.int32(lines)
# create image and draw
vis = cv2.cvtColor(im,cv2.COLOR_GRAY2BGR)
for (x1,y1),(x2,y2) in lines:
cv2.line(vis,(x1,y1),(x2,y2),(0,255,0),1)
cv2.circle(vis,(x1,y1),1,(0,255,0), -1)
return vis
def catch_first_frame():
ret, frame_f = capf.read()
prev_gray = cv2.cvtColor(frame_f,cv2.COLOR_BGR2GRAY)
prev_gray = cv2.resize(prev_gray, (0,0), fx=0.27, fy=0.27)
face = face_classifier.detectMultiScale(prev_gray, 1.2, 4)
if len(face) == 0:
print 'No face was detected'
print prev_gray.shape
exit()
else:
print 'Face detected'
for (x,y,w,h) in face:
prev_gray = prev_gray[y: y+h, x: x+w]
capf.release()
return (x,y,w,h, prev_gray)
def sensitive_override_check(prob_s, pred):
if pred == 'Neutral':
override_arr = [prob_s[0,3], prob_s[0,2], prob_s[0,0]]
max_comp = max(override_arr)
max_ind = [i for i, j in enumerate(override_arr) if j == max_comp][0]
qualified_override = False
if max_comp > 30:
qualified_override = True
if qualified_override:
if max_ind == 0:
pred = 'Smiling'
elif max_ind == 1:
pred = 'Shocked'
else:
pred = 'Angry'
#print 'Sensitive Override triggered. . .'
return pred
def emotion_to_speech(pred):
engine = pyttsx.init()
rate = engine.getProperty('rate')
engine.setProperty('rate', rate)
if pred == 'Neutral':
speech = 'Hello, you seem fine today'
elif pred == 'Smiling':
speech = 'You seem happy. I am very happy that you are happy!'
elif pred == 'Shocked':
speech = 'What is wrong? You look like you seen a ghost.'
elif pred == 'Angry':
speech = 'Why are you angry? Did something annoy or frustrate you?'
print speech
engine.say(speech)
engine.runAndWait()
motor_emotion_response("Smiling")
slash = '/'
folder_trans = np.array([])
target = np.array([])
label_trans = np.array([])
folder = ''
choice = ''
face_classifier = cv2.CascadeClassifier('haarcascades/haarcascade_frontalface_default.xml')
#print 'Load datasets [l] from file or create a new one [n]'
loading = 'l'
if loading == 'l':
#print 'Press [p] to predict test dataset, or else press any key'
predict_start = 'n'
else:
predict_start = 'p'
if loading=='l':
# load dataset matrix from npy file
t0 = time()
t1 = time()
if predict_start == 'p':
print 'Loading the main matrix. . .'
main = np.load('optical-main-mini.npy')
diff = diff = time() - t1
print 'Loaded main matrix in ', diff, 's of size ', main.shape
t2 = time()
print 'Loading the target vector. . .'
target = np.load('optical-target-mini.npy')
diff = time() - t2
print 'Loaded target in ', diff, 's of size ', target.shape
print 'Finished'
total_time = time() - t0
print total_time, 's'
t0 = time()
if loading == 'l':
print 'Now loading trained model. . .'
model = joblib.load('Optical-Model-Mini/optical-model-mini.pkl')
t1 = time()
print 'Loading time: ', round(time()-t0, 3), 's'
else:
features_train, features_test, labels_train, labels_test = cross_validation.train_test_split(main, target, test_size = 0.2)
print 'Now training. . .'
model = SVC(probability=True)
'''
#model = SVC(kernel='poly')
#model = GaussianNB()
'''
model.fit(features_train, labels_train)
print 'training time: ', round(time()-t0, 3), 's'
print 'Saving model. . .'
t1 = time()
joblib.dump(model, 'Optical-Model-Mini/optical-model-mini.pkl')
t3 = time()
print 'model saving time: ', round(time()-t0, 3), 's'
print 'Now predicting. . .'
if predict_start == 'p':
if loading == 'l':
features_train, features_test, labels_train, labels_test = cross_validation.train_test_split(main, target, test_size = 0.2)
# Determine amount of time to train
t1 = time()
pred = model.predict(features_test)
print 'predicting time: ', round(time()-t1, 3), 's'
accuracy = accuracy_score(labels_test, pred)
print 'Confusion Matrix: '
print confusion_matrix(labels_test, pred)
# Accuracy in the 0.9333, 9.6667, 1.0 range
print accuracy
# ---------------------------------
while True:
# Test with another video
while True:
print 'Press [n] to go into normal mode or [s] to go into sensitive mode'
sensitive_out = raw_input()
if sensitive_out == 'n' or sensitive_out == 's':
break
# Manually setting x, y, w, h values in order make more consistant test
# and training videos
x = 63
y = 35
w = 64
h = 64
#prev_gray = frame_f.copy()
#prev_gray = cv2.cvtColor(prev_gray, cv2.COLOR_BGR2GRAY)
# Start video to record the user
#cap to record user for 15 frames
cap = cv2.VideoCapture(0)
# Name of the video file
path = 'test.h264'
# Starting video
fourcc = cv2.VideoWriter_fourcc(*'XVID')
out = cv2.VideoWriter(path,fourcc, 20.0, (640,480))
print 'Press any key to start recording'
go = raw_input()
# New recording feature for the Raspberry Pi
with picamera.PiCamera() as camera:
print 'Starting recording. . .'
camera.vflip = True
camera.start_recording(path)
print 'Before sleep'
sleep(5)
print 'After sleep'
print 'Stopping the camera from recording. . .'
camera.stop_recording()
print 'Finished recording'
# To get a
# Cap3
cap3 = cv2.VideoCapture(path)
ret, prev_gray = cap3.read()
prev_gray = cv2.cvtColor(prev_gray,cv2.COLOR_BGR2GRAY)
prev_gray = cv2.resize(prev_gray, (0,0), fx=0.27, fy=0.27)
prev_gray = prev_gray[y: y+h, x: x+w]
cap3.release()
#face = face_classifier.detectMultiScale(prev_gray, 1.2, 4)
j = 0
# To analyze the recording and make an emotion prediction
cap4 = cv2.VideoCapture(path)
max_frame = 36
while True:
print 'j: ', j
ret, frame = cap4.read()
if frame == None:
print 'Frame failure, trying again. . .'
cap4.release()
cap4 = cv2.VideoCapture(path)
continue
if j > max_frame + 1:
cap4.release()
break
frame = cv2.resize(frame, (0,0), fx=0.35, fy=0.35)
frame = frame[y: y+h, x: x+w]
#cv2.imshow('To test with', frame)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
flow = cv2.calcOpticalFlowFarneback(prev_gray,gray,None, 0.5, 3, 15, 3, 5, 1.2, 0)
# Working with the flow matrix
flow_mat = flow.flatten()
if j == 1:
sub_main = flow_mat
elif j != 0:
sub_main = np.concatenate((sub_main, flow_mat))
prev_gray = gray
# To show us visually each video
#cv2.imshow('Optical flow',draw_flow(gray,flow))
if cv2.waitKey(1) & 0xFF == ord('q'):
break
j = j + 1
cap4.release()
#cv2.destroyAllWindows()
print 'Now predicting. . .'
### Sliding window ###
k_start = 0
k_end = 15 * flow_mat.shape[0]
max_frame = 36 * flow_mat.shape[0]
while k_end < max_frame:
count = float(k_end/max_frame)
count = np.around(count, decimals=2)
print count, '%'
model.predict(sub_main[k_start:k_end])
prob = model.predict_proba(sub_main[k_start:k_end])
prob_s = np.around(prob, decimals=5)
prob_s = prob_s* 100
# Determine amount of time to predict
t1 = time()
pred = model.predict(sub_main[k_start:k_end])
if sensitive_out == 's':
pred = sensitive_override_check(prob_s, pred)
if pred != 'Neutral':
break
k_start = k_start + (7 * flow_mat.shape[0])
k_end = k_end + (7 * flow_mat.shape[0])
######################
print 'predicting time: ', round(time()-t1, 3), 's'
print ''
print 'Prediction: '
print pred
print 'Probability: '
print 'Neutral: ', prob_s[0,1]
print 'Smiling: ', prob_s[0,3]
print 'Shocked: ', prob_s[0,2]
print 'Angry: ', prob_s[0,0]
print 'Start hello 2'
os.system("./hellobash")
print 'End hello 2'
emotion_to_text(pred)
print 'Starting robot motion response'
motor_emotion_response(pred)
print 'Motion ended'
| [
"[email protected]"
] | |
a2f9e589693f4eda5cea8869d53759b116acfc76 | b0e299f6ab0139b831d0ed86cc6da0c3eb80b50d | /hello/public/deploy/chal.py | 3cdcfdcec0ecf9a88f3f75665084382c0d2855d2 | [] | no_license | kendricktan/paradigm-ctf | 96768eb6a3ee76867b873e96e2f623796803361c | 21ba8273f858d1af24d0abdb841bb019e8fa0965 | refs/heads/main | 2023-06-26T07:50:39.179665 | 2021-07-31T06:27:11 | 2021-07-31T06:27:11 | 387,947,845 | 5 | 0 | null | null | null | null | UTF-8 | Python | false | false | 191 | py | import eth_sandbox
from web3 import Web3
eth_sandbox.run_launcher([
eth_sandbox.new_launch_instance_action(deploy_value=Web3.toWei(0, 'ether')),
eth_sandbox.new_get_flag_action()
])
| [
"[email protected]"
] | |
781ffa6094e1e065a1662ff414e97c2d8e72f5f6 | 9743d5fd24822f79c156ad112229e25adb9ed6f6 | /xai/brain/wordbase/nouns/_clambake.py | a60e6c1d2e25f9b27e08fb830685e405c05f668b | [
"MIT"
] | permissive | cash2one/xai | de7adad1758f50dd6786bf0111e71a903f039b64 | e76f12c9f4dcf3ac1c7c08b0cc8844c0b0a104b6 | refs/heads/master | 2021-01-19T12:33:54.964379 | 2017-01-28T02:00:50 | 2017-01-28T02:00:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 359 | py |
#calss header
class _CLAMBAKE():
def __init__(self,):
self.name = "CLAMBAKE"
self.definitions = [u'an event in which seafood is cooked and eaten outside, usually near the sea']
self.parents = []
self.childen = []
self.properties = []
self.jsondata = {}
self.specie = 'nouns'
def run(self, obj1 = [], obj2 = []):
return self.jsondata
| [
"[email protected]"
] | |
c9a9d2dda80846c0c2c7b067316cfabaf6aed24b | 321b4ed83b6874eeb512027eaa0b17b0daf3c289 | /1266/1266.minimum-time-visiting-all-points.289659185.Accepted.leetcode.python3.py | 76eb95f002965918c8ee1bffff4858d8a5a97364 | [] | no_license | huangyingw/submissions | 7a610613bdb03f1223cdec5f6ccc4391149ca618 | bfac1238ecef8b03e54842b852f6fec111abedfa | refs/heads/master | 2023-07-25T09:56:46.814504 | 2023-07-16T07:38:36 | 2023-07-16T07:38:36 | 143,352,065 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 277 | py | class Solution(object):
def minTimeToVisitAllPoints(self, points):
x1, y1 = points[0]
time = 0
for x2, y2 in points[1:]:
dx, dy = abs(x1 - x2), abs(y1 - y2)
time += max(dx, dy)
x1, y1 = x2, y2
return time
| [
"[email protected]"
] | |
f03251f2e2c93487fb9538d28c53e60da6493523 | 772f8f0a197b736cba22627485ccbdb65ed45e4b | /day09/mygui3.py | 85903ce1a15afbb06fa75763d482edd7e38d2f79 | [] | no_license | zhpg/python1805 | ddc69cd1b3bda8bef1cb0c2913d456ea2c29a391 | 3d98c8ebc106fd0aab633a4c99ae6591013e4438 | refs/heads/master | 2020-03-26T11:26:59.378511 | 2018-08-05T09:25:21 | 2018-08-05T09:25:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 610 | py | #!/usr/bin/env python3
# -*- coding:utf-8 -*-
import tkinter
from functools import partial
def say_hi(world):
def welcome():
label.config(text='Hello %s' % world)
return welcome
root = tkinter.Tk()
label = tkinter.Label(text='Hello world', font="15px")
b1 = tkinter.Button(root, bg='red', fg='white', text='button1', comand=say_hi('sss'))
MyButton = partial(tkinter.Button(root, bg='red', fg='white')) # 偏函数
b2 = MyButton(text='button2', command=say_hi('chine'))
b3 = MyButton(text='quit', command=root.quit())
label.pack() # ?
b1.pack()
b2.pack()
b3.pack()
root.mainloop() # ?
| [
"[email protected]"
] | |
62c0360071a15ade3e6a6b3f38a577416759847b | 7160e632d88bf49492616f8152c91cb9f1d40d8d | /testcases/statistical_form2/test_case_166_statistical_form_alarm_detail.py | 53c29adc336cc3d9a149c60941a9e7a5f1d2954e | [] | no_license | huangqiming123/tuqiangol_test1 | ad5ddf22ce61b5b6daad55f684be5da160a64e59 | 75722812260590480320910c4ad6f6c1251a2def | refs/heads/master | 2021-03-30T23:29:08.478494 | 2018-03-12T03:45:11 | 2018-03-12T03:45:11 | 124,832,890 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,802 | py | import unittest
from time import sleep
from automate_driver.automate_driver import AutomateDriver
from pages.alarm_info.alarm_info_page import AlarmInfoPage
from pages.base.base_page import BasePage
from pages.base.lon_in_base import LogInBase
from pages.statistical_form.statistical_form_page import StatisticalFormPage
from pages.statistical_form.statistical_form_page_read_csv import StatisticalFormPageReadCsv
class TestCase166StatisticalFormAlarmDetail(unittest.TestCase):
def setUp(self):
# 前置条件
# 实例化对象
self.driver = AutomateDriver()
self.base_url = self.driver.base_url
self.base_page = BasePage(self.driver, self.base_url)
self.alarm_info_page = AlarmInfoPage(self.driver, self.base_url)
self.statistical_form_page_read_csv = StatisticalFormPageReadCsv()
self.log_in_base = LogInBase(self.driver, self.base_url)
self.statistical_form_page = StatisticalFormPage(self.driver, self.base_url)
# 打开页面,填写用户名、密码、点击登录
self.base_page.open_page()
self.driver.set_window_max()
self.driver.implicitly_wait(5)
self.log_in_base.log_in_jimitest()
# 登录之后点击控制台,然后点击指令管理
self.statistical_form_page.click_control_after_click_statistical_form_page()
sleep(3)
def tearDown(self):
self.driver.quit_browser()
def test_case_statistical_form_alarm_detail(self):
# 断言url
expect_url = self.base_url + '/deviceReport/statisticalReport'
self.assertEqual(expect_url, self.alarm_info_page.actual_url_click_alarm())
# 点击告警详情
self.alarm_info_page.click_alarm_detail_list()
for n in range(5):
self.statistical_form_page.click_customer_in_alarm_detail_form(n)
# 点击搜索设备按钮
self.statistical_form_page.click_search_dev_button_in_alarm_detail()
# 获取有多少组
number = self.statistical_form_page.get_group_number_in_alarm_detail_form()
if number == 0:
pass
else:
for m in range(number):
# 收起默认组
self.statistical_form_page.click_defalut_group_in_alarm_detail_form()
# 获取每个组设备的数量
dev_number = self.statistical_form_page.get_dev_number_in_alarm_detail_form(m)
# 点开每一个分组
self.statistical_form_page.click_per_group_in_alarm_detail_form(m)
dev_number_list = self.statistical_form_page.get_dev_number_list_in_alarm_detail_form(m)
self.assertEqual(str(dev_number_list), dev_number)
| [
"[email protected]"
] | |
d8c6eb7e638620f0db30fcee4607c3f27da7d23c | 501e9924cb19e95c32e2d168e73ea44e7c9c440c | /readfiles.py | 9c0300bb83848b8231570bcef6216b1d447617f6 | [] | no_license | Miguelmargar/file-io | cc2790b109187dbeec87788c662aaf52d8e96c02 | f1c6f6ccfefbc572cac83a6ddc21ba2e902ac0c1 | refs/heads/master | 2020-03-17T17:23:49.493484 | 2018-05-21T12:22:23 | 2018-05-21T12:22:23 | 133,786,461 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,607 | py | #to read a file's data
# ONE WAY -----------------------------------------
f = open("data.txt", "r") # opens file to read it with "r"
lines = f.readlines() # stores the file info in lines variable therefore in memory
f.close() # closes the file but it is still stored in memory
print(lines)
# OTHER WAY ---------------------------------------
f = open("data.txt", "r") #opens file
lines = f.read().split("\n") #stores file in varible but as it has .split it gives each line a list without the .split it would all be one string together
f.close() #closes file but it is still stored in memory
print(lines)
# find most common word in a text file-------------
import re # imports regular expresions from a python library which is native to python - another library would be "random"
import collections
text = open("1155-0.txt").read().lower() # opens the file in question .read() reads it and .lower() makes it all lower case and converts it into a string in a variable called text
words = re.findall("\w+", text) # this line converts the string into a line - "\w+", text = finds all the words in text
long_words = []
for word in words: # this loop takes the words that are bigger than 5 characters
if len(word) > 5:
long_words.append(word)
most_common = collections.Counter(long_words).most_common(10) # this prints out the top 10 words from the list created by the loop above
print(most_common) | [
"[email protected]"
] | |
73782f3ba66ecf7f99c21522cdbbf9118fadd0e6 | 32e2ba212d39e022bea40f12cdd6b3c138a62ac0 | /mizani/tests/test_breaks.py | b1e61d60f512ce503f985284c50ce6a24b8c473b | [
"BSD-3-Clause"
] | permissive | vals/mizani | 148dd985d25796c25346a3fac106c1c5c7f40d05 | 6b288fe6061e36add001cc5f8ffb147154e7ca62 | refs/heads/master | 2020-09-11T09:59:07.672839 | 2017-06-16T08:03:30 | 2017-06-16T08:03:30 | 94,454,967 | 1 | 0 | null | 2017-06-15T15:47:21 | 2017-06-15T15:47:21 | null | UTF-8 | Python | false | false | 5,432 | py | from __future__ import division
from datetime import datetime, timedelta
import pandas as pd
import numpy as np
import numpy.testing as npt
import pytest
from mizani.breaks import (mpl_breaks, log_breaks, minor_breaks,
trans_minor_breaks, date_breaks,
timedelta_breaks, extended_breaks)
from mizani.transforms import trans
def test_mpl_breaks():
x = np.arange(100)
limits = min(x), max(x)
for nbins in (5, 7, 10, 13, 31):
breaks = mpl_breaks(nbins=nbins)
assert len(breaks(limits)) <= nbins+1
limits = float('-inf'), float('inf')
breaks = mpl_breaks(n=5)
assert len(breaks(limits)) == 0
# Zero range discrete
limits = [1, 1]
assert len(breaks(limits)) == 1
assert breaks(limits)[0] == limits[1]
# Zero range continuous
limits = [np.pi, np.pi]
assert len(breaks(limits)) == 1
assert breaks(limits)[0] == limits[1]
def test_log_breaks():
x = [2, 20, 2000]
limits = min(x), max(x)
breaks = log_breaks()(limits)
npt.assert_array_equal(breaks, [1, 10, 100, 1000, 10000])
breaks = log_breaks(3)(limits)
npt.assert_array_equal(breaks, [1, 100, 10000])
breaks = log_breaks()((10000, 10000))
npt.assert_array_equal(breaks, [10000])
breaks = log_breaks()((float('-inf'), float('inf')))
assert len(breaks) == 0
def test_minor_breaks():
# equidistant breaks
major = [1, 2, 3, 4]
limits = [0, 5]
breaks = minor_breaks()(major, limits)
npt.assert_array_equal(breaks, [.5, 1.5, 2.5, 3.5, 4.5])
minor = minor_breaks(3)(major, [2, 3])
npt.assert_array_equal(minor, [2.25, 2.5, 2.75])
# non-equidistant breaks
major = [1, 2, 4, 8]
limits = [0, 10]
minor = minor_breaks()(major, limits)
npt.assert_array_equal(minor, [1.5, 3, 6])
# single major break
minor = minor_breaks()([2], limits)
assert len(minor) == 0
def test_trans_minor_breaks():
class identity_trans(trans):
minor_breaks = trans_minor_breaks()
class square_trans(trans):
transform = staticmethod(np.square)
inverse = staticmethod(np.sqrt)
minor_breaks = trans_minor_breaks()
class weird_trans(trans):
dataspace_is_numerical = False
minor_breaks = trans_minor_breaks()
major = [1, 2, 3, 4]
limits = [0, 5]
regular_minors = trans.minor_breaks(major, limits)
npt.assert_allclose(
regular_minors,
identity_trans.minor_breaks(major, limits))
# Transform the input major breaks and check against
# the inverse of the output minor breaks
squared_input_minors = square_trans.minor_breaks(
np.square(major), np.square(limits))
npt.assert_allclose(regular_minors,
np.sqrt(squared_input_minors))
t = weird_trans()
with pytest.raises(TypeError):
t.minor_breaks(major)
def test_date_breaks():
# cpython
x = [datetime(year, 1, 1) for year in [2010, 2026, 2015]]
limits = min(x), max(x)
breaks = date_breaks('5 Years')
years = [d.year for d in breaks(limits)]
npt.assert_array_equal(
years, [2010, 2015, 2020, 2025, 2030])
breaks = date_breaks('10 Years')
years = [d.year for d in breaks(limits)]
npt.assert_array_equal(years, [2010, 2020, 2030])
# numpy
x = [np.datetime64(i*10, 'D') for i in range(1, 10)]
breaks = date_breaks('10 Years')
limits = min(x), max(x)
with pytest.raises(AttributeError):
breaks(limits)
# NaT
limits = np.datetime64('NaT'), datetime(2017, 1, 1)
breaks = date_breaks('10 Years')
assert len(breaks(limits)) == 0
def test_timedelta_breaks():
breaks = timedelta_breaks()
# cpython
x = [timedelta(days=i*365) for i in range(25)]
limits = min(x), max(x)
major = breaks(limits)
years = [val.total_seconds()/(365*24*60*60)for val in major]
npt.assert_array_equal(
years, [0, 5, 10, 15, 20, 25])
x = [timedelta(microseconds=i) for i in range(25)]
limits = min(x), max(x)
major = breaks(limits)
mseconds = [val.total_seconds()*10**6 for val in major]
npt.assert_array_equal(
mseconds, [0, 5, 10, 15, 20, 25])
# pandas
x = [pd.Timedelta(seconds=i*60) for i in range(10)]
limits = min(x), max(x)
major = breaks(limits)
minutes = [val.total_seconds()/60 for val in major]
npt.assert_allclose(
minutes, [0, 2, 4, 6, 8])
# numpy
x = [np.timedelta64(i*10, unit='D') for i in range(1, 10)]
limits = min(x), max(x)
with pytest.raises(ValueError):
breaks(limits)
# NaT
limits = pd.NaT, pd.Timedelta(seconds=9*60)
assert len(breaks(limits)) == 0
def test_extended_breaks():
x = np.arange(100)
limits = min(x), max(x)
for n in (5, 7, 10, 13, 31):
breaks = extended_breaks(n=n)
assert len(breaks(limits)) <= n+1
# Reverse limits
breaks = extended_breaks(n=7)
npt.assert_array_equal(breaks((0, 6)), breaks((6, 0)))
# Infinite limits
limits = float('-inf'), float('inf')
breaks = extended_breaks(n=5)
assert len(breaks(limits)) == 0
# Zero range discrete
limits = [1, 1]
assert len(breaks(limits)) == 1
assert breaks(limits)[0] == limits[1]
# Zero range continuous
limits = [np.pi, np.pi]
assert len(breaks(limits)) == 1
assert breaks(limits)[0] == limits[1]
| [
"[email protected]"
] | |
9361aa700700b980f8e9441d56dee3d2730b8b05 | 8c09764a1258696c51ef9ef9c1c3bdc1b931b00d | /pylons__shootout/shootout/tests/test_views.py | a2226095f946dd0ef87a8ab37d4b07a6f4f23cf8 | [] | no_license | mindreframer/python-pyramid-stuff | 2178c4922adca15b7905bb0916c8f035ca495564 | 4b9034bdde63ac8dd799ae2050506edd164a96b7 | refs/heads/master | 2021-01-18T16:09:52.835796 | 2013-04-09T20:09:44 | 2013-04-09T20:09:44 | 9,325,528 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 14,887 | py | import unittest
from pyramid import testing
def init_db():
from shootout.models import DBSession
from shootout.models import Base
from sqlalchemy import create_engine
engine = create_engine('sqlite://')
DBSession.configure(bind=engine)
Base.metadata.bind = engine
Base.metadata.create_all(engine)
session = DBSession()
return session
def register_templates(config):
config.testing_add_renderer('templates/login.pt')
config.testing_add_renderer('templates/toolbar.pt')
config.testing_add_renderer('templates/cloud.pt')
config.testing_add_renderer('templates/latest.pt')
class ViewTests(unittest.TestCase):
def setUp(self):
self.session = init_db()
self.config = testing.setUp()
def tearDown(self):
import transaction
from shootout.models import DBSession
transaction.abort()
DBSession.remove()
testing.tearDown()
def _addUser(self, username=u'username'):
from shootout.models import User
user = User(username=username, password=u'password', name=u'name',
email=u'email')
self.session.add(user)
self.session.flush()
return user
def _addIdea(self, target=None, user=None):
from shootout.models import Idea
if not user:
user = self._addUser()
idea = Idea(target=target, author=user, title=u'title',
text=u'text')
self.session.add(idea)
self.session.flush()
return idea
def test_main_view(self):
from shootout.views import main_view
self.config.testing_securitypolicy(u'username')
self.config.include(register_templates)
request = testing.DummyRequest()
result = main_view(request)
self.assertEqual(result['username'], u'username')
self.assertEqual(len(result['toplists']), 4)
def test_idea_add_nosubmit_idea(self):
from shootout.views import idea_add
self.config.testing_securitypolicy(u'username')
self.config.include(register_templates)
request = testing.DummyRequest()
result = idea_add(request)
self.assertEqual(result['target'], None)
self.assertEqual(result['kind'], 'idea')
def test_idea_add_nosubmit_comment(self):
from shootout.views import idea_add
self.config.testing_securitypolicy(u'username')
self.config.include(register_templates)
idea = self._addIdea()
request = testing.DummyRequest(params={'target': idea.idea_id})
result = idea_add(request)
self.assertEqual(result['target'], idea)
self.assertEqual(result['kind'], 'comment')
def test_idea_add_not_existing_target(self):
from shootout.views import idea_add
self.config.testing_securitypolicy(u'username')
self.config.include(register_templates)
request = testing.DummyRequest(params={'target': 100})
result = idea_add(request)
self.assertEqual(result.code, 404)
def test_idea_add_submit_schema_fail_empty_params(self):
from shootout.views import idea_add
self.config.testing_securitypolicy(u'username')
self.config.include(register_templates)
self.config.include('shootout.addroutes')
request = testing.DummyRequest(post={'form.submitted': 'Shoot'})
result = idea_add(request)
self.assertEqual(
result['form'].form.errors,
{
'text': u'Missing value',
'tags': u'Missing value',
'title': u'Missing value'
}
)
def test_idea_add_submit_schema_succeed(self):
from shootout.views import idea_add
from shootout.models import Idea
self.config.testing_securitypolicy(u'username')
self.config.include('shootout.addroutes')
request = testing.DummyRequest(
post={
'form.submitted': u'Shoot',
'tags': u'abc def, bar',
'text': u'My idea is cool',
'title': u'My idea',
}
)
user = self._addUser(u'username')
result = idea_add(request)
self.assertEqual(result.location, 'http://example.com/ideas/1')
ideas = self.session.query(Idea).all()
self.assertEqual(len(ideas), 1)
idea = ideas[0]
self.assertEqual(idea.idea_id, 1)
self.assertEqual(idea.text, u'My idea is cool')
self.assertEqual(idea.title, u'My idea')
self.assertEqual(idea.author, user)
self.assertEqual(len(idea.tags), 3)
self.assertEqual(idea.tags[0].name, u'abc')
self.assertEqual(idea.tags[1].name, u'bar')
self.assertEqual(idea.tags[2].name, u'def')
def test_comment_add_submit_schema_succeed(self):
from shootout.views import idea_add
from shootout.models import Idea
idea = self._addIdea()
self.config.testing_securitypolicy(u'commentator')
self.config.include('shootout.addroutes')
request = testing.DummyRequest(
params={
'form.submitted': u'Shoot',
'tags': u'abc def, bar',
'text': u'My comment is cool',
'title': u'My comment',
'target': unicode(idea.idea_id),
}
)
request.method = 'POST'
user = self._addUser(u'commentator')
result = idea_add(request)
self.assertEqual(result.location, 'http://example.com/ideas/2')
ideas = self.session.query(Idea).all()
self.assertEqual(len(ideas), 2)
comment = ideas[1]
self.assertEqual(comment.idea_id, 2)
self.assertEqual(comment.target_id, 1)
self.assertEqual(comment.text, u'My comment is cool')
self.assertEqual(comment.title, u'My comment')
self.assertEqual(comment.author, user)
self.assertEqual(len(comment.tags), 3)
self.assertEqual(comment.tags[0].name, u'abc')
self.assertEqual(comment.tags[1].name, u'bar')
self.assertEqual(comment.tags[2].name, u'def')
def test_vote_on_own_idea(self):
from shootout.views import idea_vote
from shootout.models import User
self.config.include('shootout.addroutes')
idea = self._addIdea()
self.session.query(User).one()
self.assertEqual(idea.user_voted(u'username'), False)
self.config.testing_securitypolicy(u'username')
post_data = {
'form.vote_hit': u'Hit',
'target': 1,
}
request = testing.DummyRequest(post=post_data)
idea_vote(request)
self.assertEqual(idea.hits, 0)
self.assertEqual(idea.misses, 0)
self.assertEqual(idea.hit_percentage, 0)
self.assertEqual(idea.total_votes, 0)
self.assertEqual(idea.vote_differential, 0)
self.assertEqual(idea.author.hits, 0)
self.assertEqual(len(idea.voted_users.all()), 0)
self.assertEqual(idea.user_voted(u'username'), False)
def test_positive_idea_voting(self):
from shootout.views import idea_vote
self.config.include('shootout.addroutes')
user = self._addUser()
idea = self._addIdea(user=user)
voter = self._addUser(u'votername')
self.assertEqual(idea.user_voted(u'votername'), False)
self.config.testing_securitypolicy(u'votername')
post_data = {
'form.vote_hit': u'Hit',
'target': 1,
}
request = testing.DummyRequest(post=post_data)
idea_vote(request)
self.assertEqual(idea.hits, 1)
self.assertEqual(idea.misses, 0)
self.assertEqual(idea.hit_percentage, 100)
self.assertEqual(idea.total_votes, 1)
self.assertEqual(idea.vote_differential, 1)
self.assertEqual(idea.author.hits, 1)
self.assertEqual(len(idea.voted_users.all()), 1)
self.assertEqual(idea.voted_users.one(), voter)
self.assertTrue(idea.user_voted(u'votername'))
def test_negative_idea_voting(self):
from shootout.views import idea_vote
self.config.include('shootout.addroutes')
user = self._addUser()
idea = self._addIdea(user=user)
voter = self._addUser(u'votername')
self.assertEqual(idea.user_voted(u'votername'), False)
self.config.testing_securitypolicy(u'votername')
post_data = {
'form.vote_miss': u'Miss',
'target': 1,
}
request = testing.DummyRequest(post=post_data)
idea_vote(request)
self.assertEqual(idea.hits, 0)
self.assertEqual(idea.misses, 1)
self.assertEqual(idea.hit_percentage, 0)
self.assertEqual(idea.total_votes, 1)
self.assertEqual(idea.vote_differential, -1)
self.assertEqual(idea.author.hits, 0)
self.assertEqual(len(idea.voted_users.all()), 1)
self.assertEqual(idea.voted_users.one(), voter)
self.assertTrue(idea.user_voted(u'votername'))
def test_registration_nosubmit(self):
from shootout.views import user_add
self.config.include(register_templates)
request = testing.DummyRequest()
result = user_add(request)
self.assertTrue('form' in result)
def test_registration_submit_empty(self):
from shootout.views import user_add
self.config.include(register_templates)
request = testing.DummyRequest()
result = user_add(request)
self.assertTrue('form' in result)
request = testing.DummyRequest(post={'form.submitted': 'Shoot'})
result = user_add(request)
self.assertEqual(
result['form'].form.errors,
{
'username': u'Missing value',
'confirm_password': u'Missing value',
'password': u'Missing value',
'email': u'Missing value',
'name': u'Missing value'
}
)
def test_registration_submit_schema_succeed(self):
from shootout.views import user_add
from shootout.models import User
self.config.include('shootout.addroutes')
request = testing.DummyRequest(
post={
'form.submitted': u'Register',
'username': u'username',
'password': u'secret',
'confirm_password': u'secret',
'email': u'[email protected]',
'name': u'John Doe',
}
)
user_add(request)
users = self.session.query(User).all()
self.assertEqual(len(users), 1)
user = users[0]
self.assertEqual(user.username, u'username')
self.assertEqual(user.name, u'John Doe')
self.assertEqual(user.email, u'[email protected]')
self.assertEqual(user.hits, 0)
self.assertEqual(user.misses, 0)
self.assertEqual(user.delivered_hits, 0)
self.assertEqual(user.delivered_misses, 0)
self.assertEqual(user.ideas, [])
self.assertEqual(user.voted_ideas, [])
def test_user_view(self):
from shootout.views import user_view
self.config.testing_securitypolicy(u'username')
self.config.include('shootout.addroutes')
self.config.include(register_templates)
request = testing.DummyRequest()
request.matchdict = {'username': u'username'}
self._addUser()
result = user_view(request)
self.assertEqual(result['user'].username, u'username')
self.assertEqual(result['user'].user_id, 1)
def test_idea_view(self):
from shootout.views import idea_view
self.config.testing_securitypolicy(u'username')
self.config.include('shootout.addroutes')
self.config.include(register_templates)
self._addIdea()
request = testing.DummyRequest()
request.matchdict = {'idea_id': 1}
result = idea_view(request)
self.assertEqual(result['idea'].title, u'title')
self.assertEqual(result['idea'].idea_id, 1)
self.assertEqual(result['viewer_username'], u'username')
def test_tag_view(self):
from shootout.views import tag_view
from shootout.models import Tag
self.config.testing_securitypolicy(u'username')
self.config.include('shootout.addroutes')
self.config.include(register_templates)
user = self._addUser()
tag1 = Tag(u'bar')
tag2 = Tag(u'foo')
self.session.add_all([tag1, tag2])
idea1 = self._addIdea(user=user)
idea1.tags.append(tag1)
idea2 = self._addIdea(user=user)
idea2.tags.append(tag1)
idea3 = self._addIdea(user=user)
idea3.tags.append(tag2)
self.session.flush()
request = testing.DummyRequest()
request.matchdict = {'tag_name': u'bar'}
result = tag_view(request)
ideas = result['ideas'].all()
self.assertEqual(ideas[0].idea_id, idea1.idea_id)
self.assertEqual(ideas[1].idea_id, idea2.idea_id)
self.assertEqual(result['tag'], u'bar')
request = testing.DummyRequest()
request.matchdict = {'tag_name': u'foo'}
result = tag_view(request)
self.assertEqual(result['ideas'].one().idea_id, idea3.idea_id)
self.assertEqual(result['tag'], u'foo')
def test_about_view(self):
from shootout.views import about_view
self.config.include(register_templates)
request = testing.DummyRequest()
about_view(request)
def test_login_view_submit_fail(self):
from shootout.views import login_view
self.config.include('shootout.addroutes')
self._addUser()
request = testing.DummyRequest(
post={
'submit': u'Login',
'login': u'username',
'password': u'wrongpassword',
}
)
login_view(request)
messages = request.session.peek_flash()
self.assertEqual(messages, [u'Failed to login.'])
def test_login_view_submit_success(self):
from shootout.views import login_view
self.config.include('shootout.addroutes')
self._addUser()
request = testing.DummyRequest(
post={
'submit': u'Login',
'login': u'username',
'password': u'password',
}
)
login_view(request)
messages = request.session.peek_flash()
self.assertEqual(messages, [u'Logged in successfully.'])
def test_logout_view(self):
from shootout.views import logout_view
self.config.include('shootout.addroutes')
request = testing.DummyRequest()
logout_view(request)
messages = request.session.peek_flash()
self.assertEqual(messages, [u'Logged out successfully.'])
| [
"[email protected]"
] | |
09c5c0f500049f682537e17e758566cd5a346d59 | bc01e1d158e7d8f28451a7e108afb8ec4cb7d5d4 | /sage/src/sage/combinat/species/functorial_composition_species.py | 6c84368ba4dfa192538a5c7946a0850b4b801bd3 | [] | no_license | bopopescu/geosci | 28792bda1ec1f06e23ba8dcb313769b98f793dad | 0d9eacbf74e2acffefde93e39f8bcbec745cdaba | refs/heads/master | 2021-09-22T17:47:20.194233 | 2018-09-12T22:19:36 | 2018-09-12T22:19:36 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,158 | py | """
Functorial composition species
"""
from __future__ import absolute_import
#*****************************************************************************
# Copyright (C) 2008 Mike Hansen <[email protected]>,
#
# Distributed under the terms of the GNU General Public License (GPL)
#
# This code is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# The full text of the GPL is available at:
#
# http://www.gnu.org/licenses/
#*****************************************************************************
from .species import GenericCombinatorialSpecies
from .structure import GenericSpeciesStructure
from sage.misc.cachefunc import cached_function
from sage.structure.unique_representation import UniqueRepresentation
class FunctorialCompositionStructure(GenericSpeciesStructure):
pass
class FunctorialCompositionSpecies(GenericCombinatorialSpecies):
def __init__(self, F, G, min=None, max=None, weight=None):
"""
Returns the functorial composition of two species.
EXAMPLES::
sage: E = species.SetSpecies()
sage: E2 = species.SetSpecies(size=2)
sage: WP = species.SubsetSpecies()
sage: P2 = E2*E
sage: G = WP.functorial_composition(P2)
sage: G.isotype_generating_series().coefficients(5)
[1, 1, 2, 4, 11]
sage: G = species.SimpleGraphSpecies()
sage: c = G.generating_series().coefficients(2)
sage: type(G)
<class 'sage.combinat.species.functorial_composition_species.FunctorialCompositionSpecies'>
sage: G == loads(dumps(G))
True
sage: G._check() #False due to isomorphism types not being implemented
False
"""
self._F = F
self._G = G
self._state_info = [F, G]
self._name = "Functorial composition of (%s) and (%s)"%(F, G)
GenericCombinatorialSpecies.__init__(self, min=None, max=None, weight=None)
_default_structure_class = FunctorialCompositionStructure
def _structures(self, structure_class, s):
"""
EXAMPLES::
sage: G = species.SimpleGraphSpecies()
sage: G.structures([1,2,3]).list()
[{},
{{1, 2}*{3}},
{{1, 3}*{2}},
{{2, 3}*{1}},
{{1, 2}*{3}, {1, 3}*{2}},
{{1, 2}*{3}, {2, 3}*{1}},
{{1, 3}*{2}, {2, 3}*{1}},
{{1, 2}*{3}, {1, 3}*{2}, {2, 3}*{1}}]
"""
gs = self._G.structures(s).list()
for f in self._F.structures(gs):
yield f
def _isotypes(self, structure_class, s):
"""
There is no known algorithm for efficiently generating the
isomorphism types of the functorial composition of two species.
EXAMPLES::
sage: G = species.SimpleGraphSpecies()
sage: G.isotypes([1,2,3]).list()
Traceback (most recent call last):
...
NotImplementedError
"""
raise NotImplementedError
def _gs(self, series_ring, base_ring):
"""
EXAMPLES::
sage: G = species.SimpleGraphSpecies()
sage: G.generating_series().coefficients(5)
[1, 1, 1, 4/3, 8/3]
"""
return self._F.generating_series(base_ring).functorial_composition(self._G.generating_series(base_ring))
def _itgs(self, series_ring, base_ring):
"""
EXAMPLES::
sage: G = species.SimpleGraphSpecies()
sage: G.isotype_generating_series().coefficients(5)
[1, 1, 2, 4, 11]
"""
return self.cycle_index_series(base_ring).isotype_generating_series()
def _cis(self, series_ring, base_ring):
"""
EXAMPLES::
sage: G = species.SimpleGraphSpecies()
sage: G.cycle_index_series().coefficients(5)
[p[],
p[1],
p[1, 1] + p[2],
4/3*p[1, 1, 1] + 2*p[2, 1] + 2/3*p[3],
8/3*p[1, 1, 1, 1] + 4*p[2, 1, 1] + 2*p[2, 2] + 4/3*p[3, 1] + p[4]]
"""
return self._F.cycle_index_series(base_ring).functorial_composition(self._G.cycle_index_series(base_ring))
def weight_ring(self):
"""
Returns the weight ring for this species. This is determined by
asking Sage's coercion model what the result is when you multiply
(and add) elements of the weight rings for each of the operands.
EXAMPLES::
sage: G = species.SimpleGraphSpecies()
sage: G.weight_ring()
Rational Field
"""
from sage.structure.element import get_coercion_model
cm = get_coercion_model()
f_weights = self._F.weight_ring()
g_weights = self._G.weight_ring()
return cm.explain(f_weights, g_weights, verbosity=0)
#Backward compatibility
FunctorialCompositionSpecies_class = FunctorialCompositionSpecies
| [
"valber@HPC"
] | valber@HPC |
042cbf52143196b868afdd9abf034bc2a4ed1dd5 | a3a3183bc3ae9d3d4bad2f4923e8297bce0ff7d3 | /final/Python-3.10.0/Lib/ensurepip/__init__.py | f28ab11ed400828f39a629a46d2708f9de616bad | [] | no_license | Nuitka/Nuitka-references | 4b78831e6947f1c4b32ef034435a88ecfd27f701 | f20d1b5728ec00cf8a5b23d650101c288b2594e9 | refs/heads/main | 2023-08-06T19:12:11.795836 | 2023-08-03T14:54:16 | 2023-08-03T14:55:22 | 169,884,560 | 2 | 1 | null | null | null | null | UTF-8 | Python | false | false | 9,487 | py | import collections
import os
import os.path
import subprocess
import sys
import sysconfig
import tempfile
from importlib import resources
__all__ = ["version", "bootstrap"]
_PACKAGE_NAMES = ('setuptools', 'pip')
_SETUPTOOLS_VERSION = "57.4.0"
_PIP_VERSION = "21.2.3"
_PROJECTS = [
("setuptools", _SETUPTOOLS_VERSION, "py3"),
("pip", _PIP_VERSION, "py3"),
]
# Packages bundled in ensurepip._bundled have wheel_name set.
# Packages from WHEEL_PKG_DIR have wheel_path set.
_Package = collections.namedtuple('Package',
('version', 'wheel_name', 'wheel_path'))
# Directory of system wheel packages. Some Linux distribution packaging
# policies recommend against bundling dependencies. For example, Fedora
# installs wheel packages in the /usr/share/python-wheels/ directory and don't
# install the ensurepip._bundled package.
_WHEEL_PKG_DIR = sysconfig.get_config_var('WHEEL_PKG_DIR')
def _find_packages(path):
packages = {}
try:
filenames = os.listdir(path)
except OSError:
# Ignore: path doesn't exist or permission error
filenames = ()
# Make the code deterministic if a directory contains multiple wheel files
# of the same package, but don't attempt to implement correct version
# comparison since this case should not happen.
filenames = sorted(filenames)
for filename in filenames:
# filename is like 'pip-20.2.3-py2.py3-none-any.whl'
if not filename.endswith(".whl"):
continue
for name in _PACKAGE_NAMES:
prefix = name + '-'
if filename.startswith(prefix):
break
else:
continue
# Extract '20.2.2' from 'pip-20.2.2-py2.py3-none-any.whl'
version = filename.removeprefix(prefix).partition('-')[0]
wheel_path = os.path.join(path, filename)
packages[name] = _Package(version, None, wheel_path)
return packages
def _get_packages():
global _PACKAGES, _WHEEL_PKG_DIR
if _PACKAGES is not None:
return _PACKAGES
packages = {}
for name, version, py_tag in _PROJECTS:
wheel_name = f"{name}-{version}-{py_tag}-none-any.whl"
packages[name] = _Package(version, wheel_name, None)
if _WHEEL_PKG_DIR:
dir_packages = _find_packages(_WHEEL_PKG_DIR)
# only used the wheel package directory if all packages are found there
if all(name in dir_packages for name in _PACKAGE_NAMES):
packages = dir_packages
_PACKAGES = packages
return packages
_PACKAGES = None
def _run_pip(args, additional_paths=None):
# Run the bootstraping in a subprocess to avoid leaking any state that happens
# after pip has executed. Particulary, this avoids the case when pip holds onto
# the files in *additional_paths*, preventing us to remove them at the end of the
# invocation.
code = f"""
import runpy
import sys
sys.path = {additional_paths or []} + sys.path
sys.argv[1:] = {args}
runpy.run_module("pip", run_name="__main__", alter_sys=True)
"""
return subprocess.run([sys.executable, '-W', 'ignore::DeprecationWarning',
"-c", code], check=True).returncode
def version():
"""
Returns a string specifying the bundled version of pip.
"""
return _get_packages()['pip'].version
def _disable_pip_configuration_settings():
# We deliberately ignore all pip environment variables
# when invoking pip
# See http://bugs.python.org/issue19734 for details
keys_to_remove = [k for k in os.environ if k.startswith("PIP_")]
for k in keys_to_remove:
del os.environ[k]
# We also ignore the settings in the default pip configuration file
# See http://bugs.python.org/issue20053 for details
os.environ['PIP_CONFIG_FILE'] = os.devnull
def bootstrap(*, root=None, upgrade=False, user=False,
altinstall=False, default_pip=False,
verbosity=0):
"""
Bootstrap pip into the current Python installation (or the given root
directory).
Note that calling this function will alter both sys.path and os.environ.
"""
# Discard the return value
_bootstrap(root=root, upgrade=upgrade, user=user,
altinstall=altinstall, default_pip=default_pip,
verbosity=verbosity)
def _bootstrap(*, root=None, upgrade=False, user=False,
altinstall=False, default_pip=False,
verbosity=0):
"""
Bootstrap pip into the current Python installation (or the given root
directory). Returns pip command status code.
Note that calling this function will alter both sys.path and os.environ.
"""
if altinstall and default_pip:
raise ValueError("Cannot use altinstall and default_pip together")
sys.audit("ensurepip.bootstrap", root)
_disable_pip_configuration_settings()
# By default, installing pip and setuptools installs all of the
# following scripts (X.Y == running Python version):
#
# pip, pipX, pipX.Y, easy_install, easy_install-X.Y
#
# pip 1.5+ allows ensurepip to request that some of those be left out
if altinstall:
# omit pip, pipX and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "altinstall"
elif not default_pip:
# omit pip and easy_install
os.environ["ENSUREPIP_OPTIONS"] = "install"
with tempfile.TemporaryDirectory() as tmpdir:
# Put our bundled wheels into a temporary directory and construct the
# additional paths that need added to sys.path
additional_paths = []
for name, package in _get_packages().items():
if package.wheel_name:
# Use bundled wheel package
from ensurepip import _bundled
wheel_name = package.wheel_name
whl = resources.read_binary(_bundled, wheel_name)
else:
# Use the wheel package directory
with open(package.wheel_path, "rb") as fp:
whl = fp.read()
wheel_name = os.path.basename(package.wheel_path)
filename = os.path.join(tmpdir, wheel_name)
with open(filename, "wb") as fp:
fp.write(whl)
additional_paths.append(filename)
# Construct the arguments to be passed to the pip command
args = ["install", "--no-cache-dir", "--no-index", "--find-links", tmpdir]
if root:
args += ["--root", root]
if upgrade:
args += ["--upgrade"]
if user:
args += ["--user"]
if verbosity:
args += ["-" + "v" * verbosity]
return _run_pip([*args, *_PACKAGE_NAMES], additional_paths)
def _uninstall_helper(*, verbosity=0):
"""Helper to support a clean default uninstall process on Windows
Note that calling this function may alter os.environ.
"""
# Nothing to do if pip was never installed, or has been removed
try:
import pip
except ImportError:
return
# If the installed pip version doesn't match the available one,
# leave it alone
available_version = version()
if pip.__version__ != available_version:
print(f"ensurepip will only uninstall a matching version "
f"({pip.__version__!r} installed, "
f"{available_version!r} available)",
file=sys.stderr)
return
_disable_pip_configuration_settings()
# Construct the arguments to be passed to the pip command
args = ["uninstall", "-y", "--disable-pip-version-check"]
if verbosity:
args += ["-" + "v" * verbosity]
return _run_pip([*args, *reversed(_PACKAGE_NAMES)])
def _main(argv=None):
import argparse
parser = argparse.ArgumentParser(prog="python -m ensurepip")
parser.add_argument(
"--version",
action="version",
version="pip {}".format(version()),
help="Show the version of pip that is bundled with this Python.",
)
parser.add_argument(
"-v", "--verbose",
action="count",
default=0,
dest="verbosity",
help=("Give more output. Option is additive, and can be used up to 3 "
"times."),
)
parser.add_argument(
"-U", "--upgrade",
action="store_true",
default=False,
help="Upgrade pip and dependencies, even if already installed.",
)
parser.add_argument(
"--user",
action="store_true",
default=False,
help="Install using the user scheme.",
)
parser.add_argument(
"--root",
default=None,
help="Install everything relative to this alternate root directory.",
)
parser.add_argument(
"--altinstall",
action="store_true",
default=False,
help=("Make an alternate install, installing only the X.Y versioned "
"scripts (Default: pipX, pipX.Y, easy_install-X.Y)."),
)
parser.add_argument(
"--default-pip",
action="store_true",
default=False,
help=("Make a default pip install, installing the unqualified pip "
"and easy_install in addition to the versioned scripts."),
)
args = parser.parse_args(argv)
return _bootstrap(
root=args.root,
upgrade=args.upgrade,
user=args.user,
verbosity=args.verbosity,
altinstall=args.altinstall,
default_pip=args.default_pip,
)
| [
"[email protected]"
] | |
fae38ea09e1beb8207caeb28a4f7c4996b13a758 | af179f861c423a27ed4539882b1b17202c4833b5 | /algorithms/curious_a2c/agent_curious_a2c.py | 5c24f7f2685aa3ba6dedb53e35ac26524afdf941 | [
"MIT"
] | permissive | alex-petrenko/curious-rl | 1666251076859304b55969d590447fc6b5c3b2f6 | 6cd0eb78ab409c68f8dad1a8542d625f0dd39114 | refs/heads/master | 2020-04-08T01:32:28.271135 | 2019-06-13T20:54:22 | 2019-06-13T20:54:22 | 158,899,170 | 22 | 3 | null | null | null | null | UTF-8 | Python | false | false | 24,992 | py | """
Implementation of the curious variant of the Advantage Actor-Critic algorithm.
"""
import time
import numpy as np
import tensorflow as tf
from tensorflow.contrib import slim
from algorithms.algo_utils import RunningMeanStd, EPS, extract_keys
from algorithms.baselines.a2c.agent_a2c import AgentA2C
from algorithms.env_wrappers import has_image_observations
from algorithms.multi_env import MultiEnv
from algorithms.tf_utils import dense, count_total_parameters, conv
from algorithms.utils import summaries_dir
from utils.distributions import CategoricalProbabilityDistribution
from utils.utils import log, AttrDict, put_kernels_on_grid
class CuriousA2CPolicy:
"""A class that represents both the actor's policy and the value estimator."""
def __init__(self, env, observations, timer, params):
self.regularizer = tf.contrib.layers.l2_regularizer(scale=1e-10)
img_model_name = params.image_model_name
fc_layers = params.fc_layers
fc_size = params.fc_size
lowdim_model_name = params.lowdim_model_name
past_frames = params.stack_past_frames
image_obs = has_image_observations(env.observation_space.spaces['obs'])
num_actions = env.action_space.n
if image_obs:
# convolutions
if img_model_name == 'convnet_simple':
conv_filters = self._convnet_simple(observations, [(32, 3, 2)] * 4)
else:
raise Exception('Unknown model name')
encoded_input = tf.contrib.layers.flatten(conv_filters)
else:
# low-dimensional input
if lowdim_model_name == 'simple_fc':
frames = tf.split(observations, past_frames, axis=1)
fc_encoder = tf.make_template('fc_encoder', self._fc_frame_encoder, create_scope_now_=True)
encoded_frames = [fc_encoder(frame) for frame in frames]
encoded_input = tf.concat(encoded_frames, axis=1)
else:
raise Exception('Unknown lowdim model name')
if params.ignore_timer:
timer = tf.multiply(timer, 0.0)
encoded_input_with_timer = tf.concat([encoded_input, tf.expand_dims(timer, 1)], axis=1)
fc = encoded_input_with_timer
for _ in range(fc_layers - 1):
fc = dense(fc, fc_size, self.regularizer)
# fully-connected layers to generate actions
actions_fc = dense(fc, fc_size // 2, self.regularizer)
self.actions = tf.contrib.layers.fully_connected(actions_fc, num_actions, activation_fn=None)
self.best_action_deterministic = tf.argmax(self.actions, axis=1)
self.actions_prob_distribution = CategoricalProbabilityDistribution(self.actions)
self.act = self.actions_prob_distribution.sample()
value_fc = dense(fc, fc_size // 2, self.regularizer)
self.value = tf.squeeze(tf.contrib.layers.fully_connected(value_fc, 1, activation_fn=None), axis=[1])
if image_obs:
# summaries
with tf.variable_scope('conv1', reuse=True):
weights = tf.get_variable('weights')
with tf.name_scope('a2c_agent_summary_conv'):
if weights.shape[2].value in [1, 3, 4]:
tf.summary.image('conv1/kernels', put_kernels_on_grid(weights), max_outputs=1)
log.info('Total parameters in the model: %d', count_total_parameters())
def _fc_frame_encoder(self, x):
return dense(x, 128, self.regularizer)
def _conv(self, x, filters, kernel, stride, scope=None):
return conv(x, filters, kernel, stride=stride, regularizer=self.regularizer, scope=scope)
def _convnet_simple(self, x, convs):
"""Basic stacked convnet."""
layer = x
layer_idx = 1
for filters, kernel, stride in convs:
layer = self._conv(layer, filters, kernel, stride, 'conv' + str(layer_idx))
layer_idx += 1
return layer
class Model:
"""Single class for inverse and forward dynamics model."""
def __init__(self, env, obs, next_obs, actions, past_frames, forward_fc):
"""
:param obs - placeholder for observations
:param actions - placeholder for selected actions
"""
self.regularizer = tf.contrib.layers.l2_regularizer(scale=1e-10)
image_obs = has_image_observations(env.observation_space.spaces['obs'])
num_actions = env.action_space.n
if image_obs:
# convolutions
conv_encoder = tf.make_template(
'conv_encoder',
self._convnet_simple,
create_scope_now_=True,
convs=[(32, 3, 2)] * 4,
)
encoded_obs = conv_encoder(obs=obs)
encoded_obs = tf.contrib.layers.flatten(encoded_obs)
encoded_next_obs = conv_encoder(obs=next_obs)
self.encoded_next_obs = tf.contrib.layers.flatten(encoded_next_obs)
else:
# low-dimensional input
lowdim_encoder = tf.make_template(
'lowdim_encoder',
self._lowdim_encoder,
create_scope_now_=True,
past_frames=past_frames,
)
encoded_obs = lowdim_encoder(obs=obs)
self.encoded_next_obs = lowdim_encoder(obs=next_obs)
self.feature_vector_size = encoded_obs.get_shape().as_list()[-1]
log.info('Feature vector size in ICM: %d', self.feature_vector_size)
actions_one_hot = tf.one_hot(actions, num_actions)
# forward model
forward_model_input = tf.concat(
[encoded_obs, actions_one_hot],
axis=1,
)
forward_model_hidden = dense(forward_model_input, forward_fc, self.regularizer)
forward_model_hidden = dense(forward_model_hidden, forward_fc, self.regularizer)
forward_model_output = tf.contrib.layers.fully_connected(
forward_model_hidden, self.feature_vector_size, activation_fn=None,
)
self.predicted_obs = forward_model_output
# inverse model
inverse_model_input = tf.concat([encoded_obs, self.encoded_next_obs], axis=1)
inverse_model_hidden = dense(inverse_model_input, 256, self.regularizer)
inverse_model_output = tf.contrib.layers.fully_connected(
inverse_model_hidden, num_actions, activation_fn=None,
)
self.predicted_actions = inverse_model_output
log.info('Total parameters in the model: %d', count_total_parameters())
def _fc_frame_encoder(self, x):
return dense(x, 128, self.regularizer)
def _lowdim_encoder(self, obs, past_frames):
frames = tf.split(obs, past_frames, axis=1)
fc_encoder = tf.make_template('fc_encoder', self._fc_frame_encoder, create_scope_now_=True)
encoded_frames = [fc_encoder(frame) for frame in frames]
encoded_input = tf.concat(encoded_frames, axis=1)
return encoded_input
def _conv(self, x, filters, kernel, stride, scope=None):
return conv(x, filters, kernel, stride=stride, regularizer=self.regularizer, scope=scope)
def _convnet_simple(self, convs, obs):
"""Basic stacked convnet."""
layer = obs
layer_idx = 1
for filters, kernel, stride in convs:
layer = self._conv(layer, filters, kernel, stride, 'conv' + str(layer_idx))
layer_idx += 1
return layer
class AgentCuriousA2C(AgentA2C):
"""Agent based on A2C algorithm."""
class Params(AgentA2C.Params):
"""Hyperparams for the algorithm and the training process."""
def __init__(self, experiment_name):
"""Default parameter values set in ctor."""
super(AgentCuriousA2C.Params, self).__init__(experiment_name)
self.icm_beta = 0.5 # in ICM, importance of training forward model vs inverse model
self.model_lr_scale = 10.0 # in ICM, importance of model loss vs actor-critic loss
self.prediction_bonus_coeff = 0.05 # scaling factor for prediction bonus vs env rewards
self.clip_bonus = 0.1
self.clip_advantage = 10
self.ignore_timer = False # whether or not policy uses the remaining episode time
self.forward_fc = 512
self.train_for_env_steps = 10 * 1000 * 1000 * 1000
# noinspection PyMethodMayBeStatic
def filename_prefix(self):
return 'curious_a2c_'
def __init__(self, make_env_func, params):
"""Initialize A2C computation graph and some auxiliary tensors."""
super(AgentA2C, self).__init__(params) # calling grandparent ctor, skipping parent
global_step = tf.train.get_or_create_global_step()
self.make_env_func = make_env_func
self.selected_actions = tf.placeholder(tf.int32, [None]) # action selected by the policy
self.value_estimates = tf.placeholder(tf.float32, [None])
self.discounted_rewards = tf.placeholder(tf.float32, [None]) # estimate of total reward (rollout + value)
self.advantages = tf.placeholder(tf.float32, [None])
env = make_env_func() # we need it to query observation shape, number of actions, etc.
obs_shape = list(env.observation_space.spaces['obs'].shape)
input_shape = [None] + obs_shape # add batch dimension
self.observations = tf.placeholder(tf.float32, shape=input_shape)
self.next_obs = tf.placeholder(tf.float32, shape=input_shape)
self.timer = tf.placeholder(tf.float32, shape=[None])
self.policy = CuriousA2CPolicy(env, self.observations, self.timer, params)
self.model = Model(
env, self.observations, self.next_obs, self.selected_actions, params.stack_past_frames, params.forward_fc,
)
env.close()
# negative logarithm of the probabilities of actions
neglogp_actions = tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=self.policy.actions, labels=self.selected_actions,
)
# maximize probabilities of actions that give high advantage
action_losses = tf.clip_by_value(self.advantages * neglogp_actions, -20.0, 20.0)
action_loss = tf.reduce_mean(action_losses)
# penalize for inaccurate value estimation
value_losses = tf.square(self.discounted_rewards - self.policy.value)
value_losses = tf.clip_by_value(value_losses, -20.0, 20.0)
value_loss = self.params.value_loss_coeff * tf.reduce_mean(value_losses)
# penalize the agent for being "too sure" about it's actions (to prevent converging to the suboptimal local
# minimum too soon)
entropy_loss = -tf.reduce_mean(self.policy.actions_prob_distribution.entropy())
entropy_loss_coeff = tf.train.exponential_decay(
self.params.initial_entropy_loss_coeff, tf.cast(global_step, tf.float32), 20.0, 0.95, staircase=True,
)
entropy_loss_coeff = tf.maximum(entropy_loss_coeff, self.params.min_entropy_loss_coeff)
entropy_loss = entropy_loss_coeff * entropy_loss
# total actor-critic loss
a2c_loss = action_loss + entropy_loss + value_loss
# model losses
forward_loss_batch = 0.5 * tf.square(self.model.encoded_next_obs - self.model.predicted_obs)
forward_loss_batch = tf.reduce_mean(forward_loss_batch, axis=1) * self.model.feature_vector_size
forward_loss = tf.reduce_mean(forward_loss_batch)
inverse_loss = tf.reduce_mean(tf.nn.sparse_softmax_cross_entropy_with_logits(
logits=self.model.predicted_actions, labels=self.selected_actions,
))
icm_beta = self.params.icm_beta
model_loss = forward_loss * icm_beta + inverse_loss * (1.0 - icm_beta)
model_loss = self.params.model_lr_scale * model_loss
# regularization
regularization_loss = tf.reduce_sum(tf.get_collection(tf.GraphKeys.REGULARIZATION_LOSSES))
# total loss
loss = a2c_loss + model_loss + regularization_loss
# training
self.train = tf.contrib.layers.optimize_loss(
loss=loss,
global_step=global_step,
learning_rate=self.params.learning_rate,
optimizer=tf.train.AdamOptimizer,
clip_gradients=self.params.clip_gradients,
)
bonus = self.params.prediction_bonus_coeff * forward_loss_batch
self.prediction_curiosity_bonus = tf.clip_by_value(bonus, -self.params.clip_bonus, self.params.clip_bonus)
# summaries for the agent and the training process
with tf.name_scope('a2c_agent_summary'):
if len(self.observations.shape) >= 4:
tf.summary.image(
'observations',
self.observations[:, :, :, :3], # first three channels
max_outputs=8,
)
# output also last channel
if self.observations.shape[-1].value > 4:
tf.summary.image('observations_last_channel', self.observations[:, :, :, -1:])
tf.summary.scalar('disc_rewards_avg', tf.reduce_mean(self.discounted_rewards))
tf.summary.scalar('disc_rewards_max', tf.reduce_max(self.discounted_rewards))
tf.summary.scalar('disc_rewards_min', tf.reduce_min(self.discounted_rewards))
tf.summary.scalar('bonus_avg', tf.reduce_mean(self.prediction_curiosity_bonus))
tf.summary.scalar('bonus_max', tf.reduce_max(self.prediction_curiosity_bonus))
tf.summary.scalar('bonus_min', tf.reduce_min(self.prediction_curiosity_bonus))
tf.summary.scalar('value', tf.reduce_mean(self.policy.value))
tf.summary.scalar('adv_avg_abs', tf.reduce_mean(tf.abs(self.advantages)))
tf.summary.scalar('adv_max', tf.reduce_max(self.advantages))
tf.summary.scalar('adv_min', tf.reduce_min(self.advantages))
tf.summary.scalar('selected_action_avg', tf.reduce_mean(tf.to_float(self.selected_actions)))
tf.summary.scalar('policy_entropy', tf.reduce_mean(self.policy.actions_prob_distribution.entropy()))
tf.summary.scalar('entropy_coeff', entropy_loss_coeff)
with tf.name_scope('a2c_losses'):
tf.summary.scalar('action_loss', action_loss)
tf.summary.scalar('max_action_loss', tf.reduce_max(action_losses))
tf.summary.scalar('value_loss', value_loss)
tf.summary.scalar('max_value_loss', tf.reduce_max(value_losses))
tf.summary.scalar('entropy_loss', entropy_loss)
tf.summary.scalar('a2c_loss', a2c_loss)
tf.summary.scalar('forward_loss', forward_loss)
tf.summary.scalar('inverse_loss', inverse_loss)
tf.summary.scalar('model_loss', model_loss)
tf.summary.scalar('regularization_loss', regularization_loss)
tf.summary.scalar('loss', loss)
summary_dir = summaries_dir(self.params.experiment_dir())
self.summary_writer = tf.summary.FileWriter(summary_dir)
self.all_summaries = tf.summary.merge_all()
with tf.name_scope('a2c_aux_summary'):
tf.summary.scalar('training_steps', global_step, collections=['aux'])
# if it's not "initialized" yet, just report 0 to preserve tensorboard plot scale
best_reward_report = tf.cond(
tf.equal(self.best_avg_reward, self.initial_best_avg_reward),
true_fn=lambda: 0.0,
false_fn=lambda: self.best_avg_reward,
)
tf.summary.scalar('best_reward_ever', best_reward_report, collections=['aux'])
tf.summary.scalar('avg_reward', self.avg_reward_placeholder, collections=['aux'])
self.avg_length_placeholder = tf.placeholder(tf.float32, [])
tf.summary.scalar('avg_lenght', self.avg_length_placeholder, collections=['aux'])
self.aux_summaries = tf.summary.merge_all(key='aux')
self.saver = tf.train.Saver(max_to_keep=3)
all_vars = tf.trainable_variables()
log.warn('curious a2c variables:')
slim.model_analyzer.analyze_vars(all_vars, print_info=True)
def best_action(self, observation, deterministic=False):
obs, timer = extract_keys([observation], 'obs', 'timer')
actions, _ = self._policy_step_timer(obs, timer, deterministic)
return actions[0]
def _policy_step_timer(self, observations, timer, deterministic=False):
"""
Select the best action by sampling from the distribution generated by the policy. Also estimate the
value for the currently observed environment state.
"""
ops = [
self.policy.best_action_deterministic if deterministic else self.policy.act,
self.policy.value,
]
actions, values = self.session.run(
ops,
feed_dict={
self.observations: observations,
self.timer: timer,
},
)
return actions, values
def _estimate_values_timer(self, observations, timer):
values = self.session.run(
self.policy.value,
feed_dict={
self.observations: observations,
self.timer: timer,
},
)
return values
def _prediction_curiosity_bonus(self, observations, actions, next_obs):
bonuses = self.session.run(
self.prediction_curiosity_bonus,
feed_dict={
self.selected_actions: actions,
self.observations: observations,
self.next_obs: next_obs,
}
)
return bonuses
def _curious_train_step(
self, step, env_steps, observations, timer, actions, values, discounted_rewards, advantages, next_obs
):
"""
Actually do a single iteration of training. See the computational graph in the ctor to figure out
the details.
"""
with_summaries = self._should_write_summaries(step)
summaries = [self.all_summaries] if with_summaries else []
result = self.session.run(
[self.train] + summaries,
feed_dict={
self.observations: observations,
self.timer: timer,
self.selected_actions: actions,
self.value_estimates: values,
self.discounted_rewards: discounted_rewards,
self.advantages: advantages,
self.next_obs: next_obs,
},
)
step = tf.train.global_step(self.session, tf.train.get_global_step())
if with_summaries:
summary = result[1]
self.summary_writer.add_summary(summary, global_step=env_steps)
return step
def _learn_loop(self, multi_env, step_callback=None):
"""
Main training loop.
:param step_callback: a hacky callback that takes a dictionary with all local variables as an argument.
Allows you too look inside the training process.
"""
step = initial_step = tf.train.global_step(self.session, tf.train.get_global_step())
env_steps = self.total_env_steps.eval(session=self.session)
batch_size = self.params.rollout * self.params.num_envs
img_obs, timer_obs = extract_keys(multi_env.initial_obs(), 'obs', 'timer')
adv_running_mean_std = RunningMeanStd(max_past_samples=10000)
def end_of_training(s, es):
return s >= self.params.train_for_steps or es > self.params.train_for_env_steps
while not end_of_training(step, env_steps):
timing = AttrDict({'experience': time.time(), 'batch': time.time()})
experience_start = time.time()
env_steps_before_batch = env_steps
batch_obs, batch_timer = [img_obs], [timer_obs]
env_steps += len(img_obs)
batch_actions, batch_values, batch_rewards, batch_dones, batch_next_obs = [], [], [], [], []
for rollout_step in range(self.params.rollout):
actions, values = self._policy_step_timer(img_obs, timer_obs)
batch_actions.append(actions)
batch_values.append(values)
# wait for all the workers to complete an environment step
next_obs, rewards, dones, infos = multi_env.step(actions)
next_img_obs, next_timer = extract_keys(next_obs, 'obs', 'timer')
# calculate curiosity bonus
bonuses = self._prediction_curiosity_bonus(img_obs, actions, next_img_obs)
rewards += bonuses
batch_rewards.append(rewards)
batch_dones.append(dones)
batch_next_obs.append(next_img_obs)
img_obs = next_img_obs
timer_obs = next_timer
if infos is not None and 'num_frames' in infos[0]:
env_steps += sum((info['num_frames'] for info in infos))
else:
env_steps += multi_env.num_envs
if rollout_step != self.params.rollout - 1:
# we don't need the newest observation in the training batch, already have enough
batch_obs.append(img_obs)
batch_timer.append(timer_obs)
assert len(batch_obs) == len(batch_rewards)
assert len(batch_obs) == len(batch_next_obs)
batch_rewards = np.asarray(batch_rewards, np.float32).swapaxes(0, 1)
batch_dones = np.asarray(batch_dones, np.bool).swapaxes(0, 1)
batch_values = np.asarray(batch_values, np.float32).swapaxes(0, 1)
# Last value won't be valid for envs with done=True (because env automatically resets and shows 1st
# observation of the next episode. But that's okay, because we should never use last_value in this case.
last_values = self._estimate_values_timer(img_obs, timer_obs)
gamma = self.params.gamma
disc_rewards = []
for i in range(len(batch_rewards)):
env_rewards = self._calc_discounted_rewards(gamma, batch_rewards[i], batch_dones[i], last_values[i])
disc_rewards.extend(env_rewards)
disc_rewards = np.asarray(disc_rewards, np.float32)
# convert observations and estimations to meaningful n-step batches
batch_obs_shape = (self.params.rollout * multi_env.num_envs,) + img_obs[0].shape
batch_obs = np.asarray(batch_obs, np.float32).swapaxes(0, 1).reshape(batch_obs_shape)
batch_next_obs = np.asarray(batch_next_obs, np.float32).swapaxes(0, 1).reshape(batch_obs_shape)
batch_actions = np.asarray(batch_actions, np.int32).swapaxes(0, 1).flatten()
batch_timer = np.asarray(batch_timer, np.float32).swapaxes(0, 1).flatten()
batch_values = batch_values.flatten()
advantages = disc_rewards - batch_values
if self.params.normalize_adv:
adv_running_mean_std.update(advantages)
advantages = (advantages - adv_running_mean_std.mean) / (np.sqrt(adv_running_mean_std.var) + EPS)
advantages = np.clip(advantages, -self.params.clip_advantage, self.params.clip_advantage)
timing.experience = time.time() - timing.experience
timing.train = time.time()
step = self._curious_train_step(
step,
env_steps,
batch_obs,
batch_timer,
batch_actions,
batch_values,
disc_rewards,
advantages,
batch_next_obs,
)
self._maybe_save(step, env_steps)
timing.train = time.time() - timing.train
avg_reward = multi_env.calc_avg_rewards(n=self.params.stats_episodes)
avg_length = multi_env.calc_avg_episode_lengths(n=self.params.stats_episodes)
fps = (env_steps - env_steps_before_batch) / (time.time() - timing.batch)
self._maybe_print(step, avg_reward, avg_length, fps, timing)
self._maybe_aux_summaries(step, env_steps, avg_reward, avg_length)
self._maybe_update_avg_reward(avg_reward, multi_env.stats_num_episodes())
if step_callback is not None:
step_callback(locals(), globals())
def learn(self, step_callback=None):
try:
multi_env = MultiEnv(
self.params.num_envs,
self.params.num_workers,
make_env_func=self.make_env_func,
stats_episodes=self.params.stats_episodes,
)
self._learn_loop(multi_env, step_callback)
except Exception as exc:
log.exception(exc)
finally:
log.info('Closing env...')
multi_env.close()
| [
"[email protected]"
] | |
163265522ac5b1d53899d5d114cb4432cf72522d | 1548ce77537dcd50ab04b0eaee050b5d30553e23 | /tests/test_pipeline/components/classification/test_lda.py | f78f133407c5e5dff1614b0807339f117fb6d6e8 | [
"Apache-2.0"
] | permissive | Shamoo100/AutoTabular | 4a20e349104246bf825ebceae33dca0a79928f2e | 7d71bf01d2b7d84fcf5f65c9f45c5cea1255d8a2 | refs/heads/main | 2023-08-13T21:34:34.329888 | 2021-10-02T07:06:00 | 2021-10-02T07:06:00 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 768 | py | import sklearn.discriminant_analysis
from autotabular.pipeline.components.classification.lda import LDA
from .test_base import BaseClassificationComponentTest
class LDAComponentTest(BaseClassificationComponentTest):
__test__ = True
res = dict()
res['default_iris'] = 1.0
res['default_iris_iterative'] = -1
res['default_iris_proba'] = 0.5614481896257509
res['default_iris_sparse'] = -1
res['default_digits'] = 0.88585306618093507
res['default_digits_iterative'] = -1
res['default_digits_binary'] = 0.9811778992106861
res['default_digits_multilabel'] = 0.82204896441795205
res['default_digits_multilabel_proba'] = 0.9833070018235553
sk_mod = sklearn.discriminant_analysis.LinearDiscriminantAnalysis
module = LDA
| [
"[email protected]"
] | |
9c94a6ae985e0ffbcc4884ebef338fa1f8d357d0 | b7a2a80843fa5141ffb9c7b4439f1d2ac713af30 | /Version2/U7.2_Threads_Alt.py | 6e674dc4ae02171ef537759fd638fb0b727f2a73 | [] | no_license | wunnox/python_grundlagen | df1bc2b9b1b561bd6733ccc25305e799a48e714e | fa84d7aae7332a7acbb3ba7ff0fe2216cc345fc0 | refs/heads/master | 2023-05-01T12:19:23.208445 | 2023-04-16T11:29:01 | 2023-04-16T11:29:01 | 222,099,539 | 2 | 3 | null | 2019-12-19T10:56:43 | 2019-11-16T12:57:54 | Python | UTF-8 | Python | false | false | 1,241 | py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
####################################################
#
# Uebung:
# Erstellen Sie ein Programm, welches drei Threads startet
# Der erste Thread läuft 8 Sekunden, der zweite 4 Sekunden und der dritte 6 Sekunden
# Nehmen Sie als Vorlage , die vorhergehenden Folie.
#
####################################################
#### Lösung: ####
import time
import _thread
t = []
def show(c, s):
t.append(c)
print("Starte Thread", c, "mit", s, "Sek.")
time.sleep(s)
t.remove(c)
_thread.start_new_thread(show, (1, 12,))
time.sleep(0.5)
_thread.start_new_thread(show, (2, 22,))
time.sleep(0.5)
_thread.start_new_thread(show, (3, 18,))
time.sleep(0.5)
_thread.start_new_thread(show, (4, 14,))
time.sleep(0.5)
_thread.start_new_thread(show, (5, 21,))
time.sleep(0.5)
_thread.start_new_thread(show, (6, 19,))
time.sleep(0.5)
_thread.start_new_thread(show, (7, 15,))
time.sleep(0.5)
_thread.start_new_thread(show, (8, 18,))
time.sleep(0.5)
_thread.start_new_thread(show, (9, 13,))
time.sleep(0.5)
_thread.start_new_thread(show, (10, 14,))
time.sleep(0.5)
while t:
print("Warte auf Ende der Threads", t)
time.sleep(1)
print("Ende der Threads")
| [
"[email protected]"
] | |
b20ec919b3bf275ed1bcbe843963d49d1abfdeae | d6a87864028abde8da69b0a1075e3d4c483ed73c | /base/baseheap.py | 6db645d2a85ffa7480fc4454289c7144d0ee5942 | [] | no_license | Windsooon/LeetCode | 7ef78c7e001c1e6924244869a7ba5491d33eb246 | 409d7db811d41dbcc7ce8cda82b77eff35585657 | refs/heads/master | 2021-01-10T15:26:16.986357 | 2020-01-01T14:57:58 | 2020-01-01T14:57:58 | 54,531,267 | 1 | 1 | null | null | null | null | UTF-8 | Python | false | false | 105 | py | class BaseHeap:
def upheap(self, p):
pass
def downheap(self, p):
pass
def
| [
"[email protected]"
] | |
9a4a66b73d5ac59e838f0aa82bbb615cf4efa43f | 6c58da2c54a3d35273e7984313d181f1da9981fc | /Multiple_Apps/djangoEnv/bin/django-admin.py | 78fd42c83301322a9da7ef20392fed2b3158a0b1 | [
"MIT-0"
] | permissive | py1-10-2017/rgero215_PY1-10-2017 | e582cb12cc63f84b1c0c14d09a922cb6cb228016 | f455b335ec9c8c850571f3a75dcd95759b4cfdad | refs/heads/master | 2021-09-04T03:23:48.062326 | 2018-01-14T21:07:26 | 2018-01-14T21:07:26 | 105,612,652 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 187 | py | #!/Users/RGero13/Desktop/rgero215_PY1-10-2017/Multiple_Apps/djangoEnv/bin/python
from django.core import management
if __name__ == "__main__":
management.execute_from_command_line()
| [
"[email protected]"
] | |
30d76d5a0ff3e6f690abdabd2e750c027eb4391d | c05f9fb686ef49c093e618a4078ffe723231f346 | /config/conf.py | aa2247f1303787036d7ea5fca1e3fa1d81a42f4c | [] | no_license | wmm0165/PytestAuto | d1bb40dcc5760439658c15af653953646119af44 | 42846b12ed7aefaa4e5890529ec71a76d27f245d | refs/heads/master | 2020-07-16T16:35:19.962864 | 2019-09-16T10:08:37 | 2019-09-16T10:08:37 | 205,825,635 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 881 | py | from datetime import datetime
import os
# 项目根目录
ROOT_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# 报告目录
REPORT_DIR = os.path.join(ROOT_DIR, 'report')
# ui对象库config.ini文件所在目录
CONF_PATH = os.path.join(ROOT_DIR, 'config', 'config.ini')
# 测试数据所在目录
DATA_Path = os.path.join(ROOT_DIR, 'data', 'tcData.xlsx')
# 当前时间
CURRENT_TIME = datetime.now().strftime('%H_%M_%S')
# 邮件配置信息
# 邮件服务器
SMTP_SERVER = 'smtp.qq.com'
# 发送者
FROM_USER = '[email protected]'
# 发送者密码
FROM_PASSWORD = 'mhxvqpewblldbjhf'
# 接收者
TO_USER = ['账号@qq.com'] # 可以同时发送给多人,追加到列表中
# 邮件标题
SUBJECT = 'xx项目自动化测试报告'
# 邮件正文
CONTENTS = '测试报告正文'
# 报告名称
HTML_NAME = 'testReport{}.html'.format(CURRENT_TIME)
print(HTML_NAME) | [
"[email protected]"
] | |
592ac9a1613e8c2b0e733f3b1ebe6ebb4046e7ca | cb12e3eff7bbb5fe2f4d0e2be9ca165a5577dc93 | /plt-and-algo/webrtc-p2pframe/serve.py | a43133af73a4329d22e725d2ebc34e112a0c7968 | [] | no_license | overminder/kitchen-sink | 6b1227ff00f8804d4d0a632e613ee903d51ab753 | 2e61b9041ceed536d42b42b75a5c50dae080c0ba | refs/heads/master | 2023-06-09T05:48:47.291336 | 2023-05-29T17:38:34 | 2023-05-29T17:38:34 | 50,777,705 | 18 | 3 | null | 2020-02-09T19:22:18 | 2016-01-31T14:13:16 | Scala | UTF-8 | Python | false | false | 3,346 | py | #!/usr/bin/env python3.5
import logging
import os
import sys
import random
import json
HERE = os.path.abspath(os.path.dirname(os.path.realpath(__file__)))
STATIC_PATH = os.path.join(HERE, 'static')
from tornado import gen
from tornado.websocket import WebSocketHandler
from tornado.web import Application, RequestHandler, StaticFileHandler
from tornado.ioloop import IOLoop
class Offer:
resp_cb = None
def __init__(self, key=None, offer_payload=None):
self.key = key
self.offer_payload = offer_payload
self.candidates = []
def __repr__(self):
return '<Offer candidates=%d %r>' % (len(self.candidates), self.offer_payload)
def wait_resp(self, callback=None):
self.resp_cb = callback
class Offers:
def __init__(self):
self.offers = {}
@classmethod
def mkkey(cls):
return str(random.randint(10000, 99999)) # Just to be simple.
def add(self, offer):
self.offers[offer.key] = offer
return offer
def find(self, key):
return self.offers[key]
def pop(self, key):
return self.offers.pop(key)
offers = Offers()
class OfferListingHandler(RequestHandler):
def get(self):
self.write({
'offers': [{'key': key, 'resp_cb': repr(resp_cb)}
for (key, resp_cb) in offers.offers.items()],
})
class OfferHandler(WebSocketHandler):
offer = None
key = None
def open(self):
self.key = Offers.mkkey()
def _ensure_offer(self):
if self.offer is None:
self.offer = Offer(key=self.key)
return self.offer
@gen.coroutine
def on_message(self, s):
msg = json.loads(s)
print('msg', type(msg), repr(msg))
if msg['type'] == 'allocKey':
self.write_message({
'type': 'allocKeyResp',
'key': self.key,
})
elif msg['type'] == 'offer':
offer = offers.add(self._ensure_offer())
offer.offer_payload = msg
self.write_message(json.dumps({
'type': 'offer-created',
}))
resp = yield gen.Task(offer.wait_resp)
self.write_message(json.dumps({
'type': 'offer-accepted',
'resp': resp,
}))
elif msg['type'] == 'take-offer':
offer = offers.find(msg['key'])
self.write_message(offer.offer_payload)
for c in offer.candidates:
self.write_message(c)
elif msg['type'] == 'answer':
key = msg.pop('forKey')
offer = offers.pop(key)
offer.resp_cb(msg)
elif msg['type'] == 'candidate':
self._ensure_offer().candidates.append(msg)
class NoCacheStaticFileHandler(StaticFileHandler):
def set_extra_headers(self, path):
self.set_header('Cache-Control', 'no-store, no-cache, must-revalidate, max-age=0')
def mkapp():
return Application([
(r'/offer', OfferHandler),
(r'/offers/list', OfferListingHandler),
(r'/(.*)', NoCacheStaticFileHandler, {
'path': STATIC_PATH,
}),
], gzip=True)
def main():
port = 17080
mkapp().listen(port)
print('Listening on :%d' % port)
IOLoop.current().start()
if __name__ == '__main__':
main()
| [
"[email protected]"
] | |
8704bbaf901d8a35e0ee5512cc626afd639f0d60 | bed0d23d35b42b7316dee35f9fa06d4d2cc9de26 | /src/custom_admin/__init__.py | 5ec402089bc364c75d9685df1a8d89ebdb5cca66 | [] | no_license | Bloodlettinger/meandre | b55911c93faf6c279f496394137def21ec181e6a | f9a8c5dc709fcdda808fc1329264724c7b8d951e | refs/heads/master | 2020-05-17T23:01:15.326103 | 2012-10-11T17:22:48 | 2012-10-11T17:22:48 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | # -*- coding: utf-8 -*-
from datetime import date, datetime
def ddmmyy(value):
if isinstance(value, (date, datetime)):
return value.strftime('%d.%m.%y')
else:
return u'--'
| [
"[email protected]"
] | |
8b92035f4b34e0556c903e155ba9a8618bf17529 | 275a96a33ae1f89e7b2ee0ecdbac7d78abe6d6cc | /test/test_bad_request_error_code.py | 670128cb3664339498ad0e2fe8a03b0977a7c7ff | [] | no_license | cascadiarc/cyclos-python-client | 8029ce07174f2fe92350a92dda9a60976b2bb6c2 | a2e22a30e22944587293d51be2b8268bce808d70 | refs/heads/main | 2023-04-03T16:52:01.618444 | 2021-04-04T00:00:52 | 2021-04-04T00:00:52 | 354,419,532 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 913 | py | # coding: utf-8
"""
Cyclos 4.11.5 API
The REST API for Cyclos 4.11.5 # noqa: E501
OpenAPI spec version: 4.11.5
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import swagger_client
from swagger_client.models.bad_request_error_code import BadRequestErrorCode # noqa: E501
from swagger_client.rest import ApiException
class TestBadRequestErrorCode(unittest.TestCase):
"""BadRequestErrorCode unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testBadRequestErrorCode(self):
"""Test BadRequestErrorCode"""
# FIXME: construct object with mandatory attributes with example values
# model = swagger_client.models.bad_request_error_code.BadRequestErrorCode() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
| [
"[email protected]"
] | |
4fd9c70db157736bfaf3aab4bd859f51b90b8f41 | 82aee3211216f55392d5a757eb57f02c859e9a28 | /Easy/599_minimumIndexSumOfTwoLists.py | d92b6e8b5c30aa160a4ed09faac635a69b9d9ca6 | [] | no_license | Yucheng7713/CodingPracticeByYuch | 505d18095d4b9a35c1f3b23632a90a76d811b64a | 1461b10b8910fa90a311939c6df9082a8526f9b1 | refs/heads/master | 2022-05-01T11:51:00.612603 | 2022-04-18T09:46:55 | 2022-04-18T09:46:55 | 198,961,132 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 605 | py | class Solution:
def findRestaurant(self, list1: List[str], list2: List[str]) -> List[str]:
r_set = set(list1 + list2)
map_1 = {res : i for i, res in enumerate(list1)}
map_2 = {res : i for i, res in enumerate(list2)}
common_res = []
min_sum = float('inf')
for r in r_set:
if r in map_1 and r in map_2:
k = map_1[r] + map_2[r]
if min_sum > k:
common_res = [r]
min_sum = k
elif min_sum == k:
common_resI += [r]
return common_res | [
"[email protected]"
] |
Subsets and Splits
No saved queries yet
Save your SQL queries to embed, download, and access them later. Queries will appear here once saved.