metadata
dict | text
stringlengths 60
3.49M
|
---|---|
{
"source": "jimbob88/passmark_scraper",
"score": 3
} |
#### File: passmark_scraper/passmark_scraper/harddrivebenchmark.py
```python
import requests
import sys
from bs4 import BeautifulSoup
from .logger import Logger
from .cpubenchmark import cpubenchmark_scraper_single, cpubenchmark_scraper_mega
logger = Logger("harddrivebenchmark_sraper")
class single_hdd:
def __init__(
self,
id=None,
hdd_name=None,
hdd_size=None,
hdd_mark=None,
hdd_rank=None,
hdd_value=None,
hdd_price=None
):
self._url = None
self.id = id
self._hdd_name = hdd_name
self._hdd_size = hdd_size
self._hdd_mark = hdd_mark
self._hdd_rank = hdd_rank
self._hdd_value = hdd_value
self._hdd_price = hdd_price
@property
def id(self):
return self._id
@id.setter
def id(self, value):
if type(value) == str:
value = value.replace("gpu", "")
self._id = value
self._url = "https://www.harddrivebenchmark.net/hdd.php?id=%s" % str(value)
def __repr__(self):
return str(self.__dict__())
def __dict__(self):
return {
"id": self.id,
"url": self._url,
"hdd_name": self._hdd_name,
"hdd_size": self._hdd_size,
"hdd_mark": self._hdd_mark,
"hdd_rank": self._hdd_rank,
"hdd_value": self._hdd_value,
"hdd_price": self._hdd_price
}
class mega_hdd:
def __init__(
self,
id=None,
hdd_name=None,
hdd_size=None,
hdd_mark=None,
hdd_samples=None,
hdd_rank=None,
hdd_price=None,
hdd_value=None,
test_date=None,
hdd_type=None
):
self._url = None
self.id = id
self._hdd_name = hdd_name
self._hdd_size = hdd_size
self._hdd_mark = hdd_mark
self._hdd_samples = hdd_samples
self._hdd_rank = hdd_rank
self._hdd_price = hdd_price
self._hdd_value = hdd_value
self._test_date = test_date
self._hdd_type = hdd_type
@property
def id(self):
return self._id
@id.setter
def id(self, value):
if type(value) == str:
value = value.replace("gpu", "")
self._id = value
self._url = "https://www.harddrivebenchmark.net/hdd.php?&id=%s" % str(value)
def __repr__(self):
return str(self.__dict__())
def __dict__(self):
return {
"id": self.id,
"url": self._url,
"hdd_name": self._hdd_name,
"hdd_size": self._hdd_size,
"hdd_mark": self._hdd_mark,
"hdd_samples": self._hdd_samples,
"hdd_rank": self._hdd_rank,
"hdd_price": self._hdd_price,
"hdd_value": self._hdd_value,
"test_date": self._test_date,
"hdd_type": self._hdd_type
}
class harddrivebenchmark_scraper_single(cpubenchmark_scraper_single):
def __init__(self, url="https://www.harddrivebenchmark.net/hdd_list.php"):
super().__init__(url=url)
def refresh_hdds(self, raw_rows=None, headers=None):
if raw_rows is None:
raw_rows = self._raw_rows
if headers is None:
headers = self._headers
_hdds = []
for idx, row in enumerate(raw_rows):
_cols = row.find_all("td")
if len(_cols) < 5:
logger.warning("Not enough columns for HDD: %i" % idx)
raise Exception("Not enough columns in table for extraction")
else:
_id = _cols[0].find('a').attrs['href'].split('id=')[-1]
if _id is None:
logger.warning(
"%s: unable to find HDD id, the url returned will most likely be wrong."
)
_hdds.append(
single_hdd(
id=_id,
hdd_name=_cols[0].text,
hdd_size=_cols[1].text,
hdd_mark=_cols[2].text,
hdd_rank=_cols[3].text,
hdd_value=_cols[4].text,
hdd_price=_cols[5].text
)
)
return _hdds
def refresh_all(self):
self._source = self.refresh_request()
self._soup = self.refresh_soup()
self._table = self.refresh_table()
self._headers = self.refresh_headers()
self._raw_rows = self.refresh_raw_rows()
self._hdds = self.refresh_hdds()
class harddrivebenchmark_scraper_mega(cpubenchmark_scraper_mega):
def __init__(self, url="https://www.harddrivebenchmark.net/hdd-mega-page.html"):
super().__init__(url=url)
def refresh_hdds(self, raw_rows=None, headers=None):
if raw_rows is None:
raw_rows = self._raw_rows
if headers is None:
headers = self._headers
_hdds = []
for idx, row in enumerate(raw_rows):
_cols = row[0].find_all("td")
if len(_cols) < 9:
logger.warning("Not enough columns for GPU: %i" % idx)
raise Exception("Not enough columns in table for extraction")
else:
_id = _cols[0].find('a').attrs['href'].split('id=')[-1]
if _id is None:
logger.warning(
"%s: unable to find GPU id, the url returned will most likely be wrong."
)
_hdds.append(mega_hdd(
id=_id,
hdd_name=_cols[0].text,
hdd_size=_cols[1].text,
hdd_mark=_cols[2].text,
hdd_samples=_cols[3].text,
hdd_rank=_cols[4].text,
hdd_price=_cols[5].text,
hdd_value=_cols[6].text,
test_date=_cols[7].text,
hdd_type=_cols[8].text
))
return _hdds
def refresh_all(self):
self._source = self.refresh_request()
self._soup = self.refresh_soup()
self._table = self.refresh_table()
self._headers = self.refresh_headers()
self._raw_rows = self.refresh_raw_rows()
self._hdds = self.refresh_hdds()
``` |
{
"source": "jimbob88/QuickWall",
"score": 3
} |
#### File: QuickWall/QuickWall/wal.py
```python
from pywal import (
colors, export, sequences, theme, settings
)
import os
from QuickWall.logger import Logger
logger = Logger("wal")
class Wal:
"""Change the theme based on the passed wallpaper"""
def set(self, wallpaper):
logger.debug("{}".format(wallpaper))
self.colors_plain = colors.get(wallpaper)
sequences.send(self.colors_plain, to_send=True)
colors.palette()
def save(self):
export.every(self.colors_plain)
def restore(self):
self.colors_plain = theme.file(os.path.join(
settings.CACHE_DIR,
"colors.json"
))
sequences.send(self.colors_plain, to_send=True)
``` |
{
"source": "jimbobbennett/Adafruit_CircuitPython_Base64",
"score": 2
} |
#### File: jimbobbennett/Adafruit_CircuitPython_Base64/circuitpython_base64.py
```python
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/jimbobbennett/CircuitPython_base64.git"
import re
import struct
import adafruit_binascii as binascii
__all__ = [
# Legacy interface exports traditional RFC 1521 Base64 encodings
"encode",
"decode",
"encodebytes",
"decodebytes",
# Generalized interface for other encodings
"b64encode",
"b64decode",
"b32encode",
"b32decode",
"b16encode",
"b16decode",
# Standard Base64 encoding
"standard_b64encode",
"standard_b64decode",
]
BYTES_TYPES = (bytes, bytearray) # Types acceptable as binary data
def _bytes_from_decode_data(data):
if isinstance(data, str):
try:
return data.encode("ascii")
# except UnicodeEncodeError:
except:
raise ValueError("string argument should contain only ASCII characters")
elif isinstance(data, BYTES_TYPES):
return data
else:
raise TypeError(
"argument should be bytes or ASCII string, not %s" % data.__class__.__name__
)
# Base64 encoding/decoding uses binascii
def b64encode(toencode, altchars=None):
"""Encode a byte string using Base64.
toencode is the byte string to encode. Optional altchars must be a byte
string of length 2 which specifies an alternative alphabet for the
'+' and '/' characters. This allows an application to
e.g. generate url or filesystem safe Base64 strings.
The encoded byte string is returned.
"""
if not isinstance(toencode, BYTES_TYPES):
raise TypeError("expected bytes, not %s" % toencode.__class__.__name__)
# Strip off the trailing newline
encoded = binascii.b2a_base64(toencode)[:-1]
if altchars is not None:
if not isinstance(altchars, BYTES_TYPES):
raise TypeError("expected bytes, not %s" % altchars.__class__.__name__)
assert len(altchars) == 2, repr(altchars)
return encoded.translate(bytes.maketrans(b"+/", altchars))
return encoded
def b64decode(todecode, altchars=None, validate=False):
"""Decode a Base64 encoded byte string.
todecode is the byte string to decode. Optional altchars must be a
string of length 2 which specifies the alternative alphabet used
instead of the '+' and '/' characters.
The decoded string is returned. A binascii.Error is raised if todecode is
incorrectly padded.
If validate is False (the default), non-base64-alphabet characters are
discarded prior to the padding check. If validate is True,
non-base64-alphabet characters in the input result in a binascii.Error.
"""
todecode = _bytes_from_decode_data(todecode)
if altchars is not None:
altchars = _bytes_from_decode_data(altchars)
assert len(altchars) == 2, repr(altchars)
todecode = todecode.translate(bytes.maketrans(altchars, b"+/"))
if validate and not re.match(b"^[A-Za-z0-9+/]*={0,2}$", todecode):
raise binascii.Error("Non-base64 digit found")
return binascii.a2b_base64(todecode)
def standard_b64encode(toencode):
"""Encode a byte string using the standard Base64 alphabet.
toencode is the byte string to encode. The encoded byte string is returned.
"""
return b64encode(toencode)
def standard_b64decode(todecode):
"""Decode a byte string encoded with the standard Base64 alphabet.
todecode is the byte string to decode. The decoded byte string is
returned. binascii.Error is raised if the input is incorrectly
padded or if there are non-alphabet characters present in the
input.
"""
return b64decode(todecode)
# Base32 encoding/decoding must be done in Python
BASE32_ALPHABET = {
0: b"A",
9: b"J",
18: b"S",
27: b"3",
1: b"B",
10: b"K",
19: b"T",
28: b"4",
2: b"C",
11: b"L",
20: b"U",
29: b"5",
3: b"D",
12: b"M",
21: b"V",
30: b"6",
4: b"E",
13: b"N",
22: b"W",
31: b"7",
5: b"F",
14: b"O",
23: b"X",
6: b"G",
15: b"P",
24: b"Y",
7: b"H",
16: b"Q",
25: b"Z",
8: b"I",
17: b"R",
26: b"2",
}
BASE32_TAB = [v[0] for k, v in sorted(BASE32_ALPHABET.items())]
BASE32_REV = dict([(v[0], k) for k, v in BASE32_ALPHABET.items()])
def b32encode(toencode):
"""Encode a byte string using Base32.
toencode is the byte string to encode. The encoded byte string is returned.
"""
if not isinstance(toencode, BYTES_TYPES):
raise TypeError("expected bytes, not %s" % toencode.__class__.__name__)
quanta, leftover = divmod(len(toencode), 5)
# Pad the last quantum with zero bits if necessary
if leftover:
toencode = toencode + bytes(5 - leftover) # Don't use += !
quanta += 1
encoded = bytearray()
for i in range(quanta):
# part1 and part2 are 16 bits wide, part3 is 8 bits wide. The intent of this
# code is to process the 40 bits in units of 5 bits. So we take the 1
# leftover bit of part1 and tack it onto part2. Then we take the 2 leftover
# bits of part2 and tack them onto part3. The shifts and masks are intended
# to give us values of exactly 5 bits in width.
part1, part2, part3 = struct.unpack("!HHB", toencode[i * 5 : (i + 1) * 5])
part2 += (part1 & 1) << 16 # 17 bits wide
part3 += (part2 & 3) << 8 # 10 bits wide
encoded += bytes(
[
BASE32_TAB[part1 >> 11], # bits 1 - 5
BASE32_TAB[(part1 >> 6) & 0x1F], # bits 6 - 10
BASE32_TAB[(part1 >> 1) & 0x1F], # bits 11 - 15
BASE32_TAB[part2 >> 12], # bits 16 - 20 (1 - 5)
BASE32_TAB[(part2 >> 7) & 0x1F], # bits 21 - 25 (6 - 10)
BASE32_TAB[(part2 >> 2) & 0x1F], # bits 26 - 30 (11 - 15)
BASE32_TAB[part3 >> 5], # bits 31 - 35 (1 - 5)
BASE32_TAB[part3 & 0x1F], # bits 36 - 40 (1 - 5)
]
)
# Adjust for any leftover partial quanta
if leftover == 1:
encoded = encoded[:-6] + b"======"
elif leftover == 2:
encoded = encoded[:-4] + b"===="
elif leftover == 3:
encoded = encoded[:-3] + b"==="
elif leftover == 4:
encoded = encoded[:-1] + b"="
return bytes(encoded)
def b32decode(todecode, casefold=False, map01=None):
"""Decode a Base32 encoded byte string.
todecode is the byte string to decode. Optional casefold is a flag
specifying whether a lowercase alphabet is acceptable as input.
For security purposes, the default is False.
RFC 3548 allows for optional mapping of the digit 0 (zero) to the
letter O (oh), and for optional mapping of the digit 1 (one) to
either the letter I (eye) or letter L (el). The optional argument
map01 when not None, specifies which letter the digit 1 should be
mapped to (when map01 is not None, the digit 0 is always mapped to
the letter O). For security purposes the default is None, so that
0 and 1 are not allowed in the input.
The decoded byte string is returned. binascii.Error is raised if
the input is incorrectly padded or if there are non-alphabet
characters present in the input.
"""
todecode = _bytes_from_decode_data(todecode)
_, leftover = divmod(len(todecode), 8)
if leftover:
raise binascii.Error("Incorrect padding")
# Handle section 2.4 zero and one mapping. The flag map01 will be either
# False, or the character to map the digit 1 (one) to. It should be
# either L (el) or I (eye).
if map01 is not None:
map01 = _bytes_from_decode_data(map01)
assert len(map01) == 1, repr(map01)
todecode = todecode.translate(bytes.maketrans(b"01", b"O" + map01))
if casefold:
todecode = todecode.upper()
# Strip off pad characters from the right. We need to count the pad
# characters because this will tell us how many null bytes to remove from
# the end of the decoded string.
padchars = todecode.find(b"=")
if padchars > 0:
padchars = len(todecode) - padchars
todecode = todecode[:-padchars]
else:
padchars = 0
# Now decode the full quanta
parts = []
acc = 0
shift = 35
for char in todecode:
val = BASE32_REV.get(char)
if val is None:
raise binascii.Error("Non-base32 digit found")
acc += BASE32_REV[char] << shift
shift -= 5
if shift < 0:
parts.append(binascii.unhexlify(bytes("%010x" % acc, "ascii")))
acc = 0
shift = 35
# Process the last, partial quanta
last = binascii.unhexlify(bytes("%010x" % acc, "ascii"))
if padchars == 0:
last = b"" # No characters
elif padchars == 1:
last = last[:-1]
elif padchars == 3:
last = last[:-2]
elif padchars == 4:
last = last[:-3]
elif padchars == 6:
last = last[:-4]
else:
raise binascii.Error("Incorrect padding")
parts.append(last)
return b"".join(parts)
# RFC 3548, Base 16 Alphabet specifies uppercase, but hexlify() returns
# lowercase. The RFC also recommends against accepting input case
# insensitively.
def b16encode(toencode):
"""Encode a byte string using Base16.
toencode is the byte string to encode. The encoded byte string is returned.
"""
if not isinstance(toencode, BYTES_TYPES):
raise TypeError("expected bytes, not %s" % toencode.__class__.__name__)
return binascii.hexlify(toencode).upper()
def b16decode(todecode, casefold=False):
"""Decode a Base16 encoded byte string.
todecode is the byte string to decode. Optional casefold is a flag
specifying whether a lowercase alphabet is acceptable as input.
For security purposes, the default is False.
The decoded byte string is returned. binascii.Error is raised if
todecode were incorrectly padded or if there are non-alphabet characters
present in the string.
"""
todecode = _bytes_from_decode_data(todecode)
if casefold:
todecode = todecode.upper()
if re.search(b"[^0-9A-F]", todecode):
raise binascii.Error("Non-base16 digit found")
return binascii.unhexlify(todecode)
# Legacy interface. This code could be cleaned up since I don't believe
# binascii has any line length limitations. It just doesn't seem worth it
# though. The files should be opened in binary mode.
MAXLINESIZE = 76 # Excluding the CRLF
MAXBINSIZE = (MAXLINESIZE // 4) * 3
def encode(inval, outval):
"""Encode a file; input and output are binary files."""
while True:
read = inval.read(MAXBINSIZE)
if not read:
break
while len(read) < MAXBINSIZE:
next_read = inval.read(MAXBINSIZE - len(read))
if not next_read:
break
read += next_read
line = binascii.b2a_base64(read)
outval.write(line)
def decode(inval, outval):
"""Decode a file; input and output are binary files."""
while True:
line = inval.readline()
if not line:
break
outval.write(binascii.a2b_base64(line))
def encodebytes(toencode):
"""Encode a bytestring into a bytestring containing multiple lines
of base-64 data."""
if not isinstance(toencode, BYTES_TYPES):
raise TypeError("expected bytes, not %s" % toencode.__class__.__name__)
pieces = []
for i in range(0, len(toencode), MAXBINSIZE):
chunk = toencode[i : i + MAXBINSIZE]
pieces.append(binascii.b2a_base64(chunk))
return b"".join(pieces)
def encodestring(toencode):
"""Legacy alias of encodebytes()."""
import warnings
warnings.warn(
"encodestring() is a deprecated alias, use encodebytes()", DeprecationWarning, 2
)
return encodebytes(toencode)
def decodebytes(todecode):
"""Decode a bytestring of base-64 data into a bytestring."""
if not isinstance(todecode, BYTES_TYPES):
raise TypeError("expected bytes, not %s" % todecode.__class__.__name__)
return binascii.a2b_base64(todecode)
def decodestring(todecode):
"""Legacy alias of decodebytes()."""
import warnings
warnings.warn(
"decodestring() is a deprecated alias, use decodebytes()", DeprecationWarning, 2
)
return decodebytes(todecode)
``` |
{
"source": "jimbobbennett/Adafruit_CircuitPython_HMAC",
"score": 2
} |
#### File: jimbobbennett/Adafruit_CircuitPython_HMAC/circuitpython_hmac.py
```python
__version__ = "0.0.0-auto.0"
__repo__ = "https://github.com/jimbobbennett/CircuitPython_HMAC.git"
import adafruit_hashlib as _hashlib
def __translate(key, translation):
return bytes(translation[x] for x in key)
TRANS_5C = bytes((x ^ 0x5C) for x in range(256))
TRANS_36 = bytes((x ^ 0x36) for x in range(256))
class HMAC:
"""RFC 2104 HMAC class. Also complies with RFC 4231.
This supports the API for Cryptographic Hash Functions (PEP 247).
"""
blocksize = 64 # 512-bit HMAC; can be changed in subclasses.
def __init__(self, key, msg=None, digestmod=None):
"""Create a new HMAC object.
key: key for the keyed hash object.
msg: Initial input for the hash, if provided.
digestmod: A module supporting PEP 247. *OR*
A hashlib constructor returning a new hash object. *OR*
A hash name suitable for hashlib.new().
Defaults to hashlib.md5.
Implicit default to hashlib.md5 is deprecated and will be
removed in Python 3.6.
Note: key and msg must be a bytes or bytearray objects.
"""
if not isinstance(key, (bytes, bytearray)):
raise TypeError(
"key: expected bytes or bytearray, but got %r" % type(key).__name__
)
if digestmod is None:
digestmod = _hashlib.sha256
if callable(digestmod):
self.digest_cons = digestmod
elif isinstance(digestmod, str):
self.digest_cons = lambda d=b"": _hashlib.new(digestmod, d)
else:
self.digest_cons = lambda d=b"": digestmod.new(d)
self.outer = self.digest_cons()
self.inner = self.digest_cons()
self.digest_size = self.inner.digest_size
if hasattr(self.inner, "block_size"):
blocksize = self.inner.block_size
if blocksize < 16:
blocksize = self.blocksize
else:
blocksize = self.blocksize
# self.blocksize is the default blocksize. self.block_size is
# effective block size as well as the public API attribute.
self.block_size = blocksize
if len(key) > blocksize:
key = self.digest_cons(key).digest()
key = key + bytes(blocksize - len(key))
self.outer.update(__translate(key, TRANS_5C))
self.inner.update(__translate(key, TRANS_36))
if msg is not None:
self.update(msg)
@property
def name(self):
"""Return the name of this object
"""
return "hmac-" + self.inner.name
def update(self, msg):
"""Update this hashing object with the string msg.
"""
self.inner.update(msg)
def copy(self):
"""Return a separate copy of this hashing object.
An update to this copy won't affect the original object.
"""
# Call __new__ directly to avoid the expensive __init__.
other = self.__class__.__new__(self.__class__)
other.digest_cons = self.digest_cons
other.digest_size = self.digest_size
other.inner = self.inner.copy()
other.outer = self.outer.copy()
return other
def _current(self):
"""Return a hash object for the current state.
To be used only internally with digest() and hexdigest().
"""
hmac = self.outer.copy()
hmac.update(self.inner.digest())
return hmac
def digest(self):
"""Return the hash value of this hashing object.
This returns a string containing 8-bit data. The object is
not altered in any way by this function; you can continue
updating the object after calling this function.
"""
hmac = self._current()
return hmac.digest()
def hexdigest(self):
"""Like digest(), but returns a string of hexadecimal digits instead.
"""
hmac = self._current()
return hmac.hexdigest()
def new(key, msg=None, digestmod=None):
"""Create a new hashing object and return it.
key: The starting key for the hash.
msg: if available, will immediately be hashed into the object's starting
state.
You can now feed arbitrary strings into the object using its update()
method, and can ask for the hash value at any time by calling its digest()
method.
"""
return HMAC(key, msg, digestmod)
``` |
{
"source": "jimbobbennett/Circuitpython-AzureIoT",
"score": 3
} |
#### File: jimbobbennett/Circuitpython-AzureIoT/device_registration.py
```python
import gc
import json
import time
import circuitpython_base64 as base64
import circuitpython_hmac as hmac
import circuitpython_parse as parse
from adafruit_esp32spi.adafruit_esp32spi_wifimanager import ESPSPI_WiFiManager
import adafruit_logging as logging
from adafruit_logging import Logger
import adafruit_hashlib as hashlib
from constants import constants
AZURE_HTTP_ERROR_CODES = [400, 401, 404, 403, 412, 429, 500] # Azure HTTP Status Codes
class DeviceRegistrationError(Exception):
"""
An error from the device registration
"""
def __init__(self, message):
super(DeviceRegistrationError, self).__init__(message)
self.message = message
class DeviceRegistration:
"""
Handles registration of IoT Central devices, and gets the hostname to use when connecting
to IoT Central over MQTT
"""
_dps_endpoint = constants["dpsEndPoint"]
_dps_api_version = constants["dpsAPIVersion"]
_loop_interval = 2
@staticmethod
def _parse_http_status(status_code, status_reason):
"""Parses status code, throws error based on Azure IoT Common Error Codes.
:param int status_code: HTTP status code.
:param str status_reason: Description of HTTP status.
"""
for error in AZURE_HTTP_ERROR_CODES:
if error == status_code:
raise TypeError("Error {0}: {1}".format(status_code, status_reason))
def __init__(self, wifi_manager: ESPSPI_WiFiManager, id_scope: str, device_id: str, key: str, logger: Logger = None):
"""Creates an instance of the device registration
:param wifi_manager: WiFiManager object from ESPSPI_WiFiManager.
:param str id_scope: The ID scope of the device to register
:param str device_id: The device ID of the device to register
:param str key: The primary or secondary key of the device to register
:param adafruit_logging.Logger key: The primary or secondary key of the device to register
"""
wifi_type = str(type(wifi_manager))
if "ESPSPI_WiFiManager" not in wifi_type:
raise TypeError("This library requires a WiFiManager object.")
self._wifi_manager = wifi_manager
self._id_scope = id_scope
self._device_id = device_id
self._key = key
self._logger = logger if logger is not None else logging.getLogger("log")
@staticmethod
def compute_derived_symmetric_key(secret, reg_id):
"""Computes a derived symmetric key from a secret and a message
"""
secret = base64.b64decode(secret)
return base64.b64encode(hmac.new(secret, msg=reg_id.encode("utf8"), digestmod=hashlib.sha256).digest())
def _loop_assign(self, operation_id, headers) -> str:
uri = "https://%s/%s/registrations/%s/operations/%s?api-version=%s" % (
self._dps_endpoint,
self._id_scope,
self._device_id,
operation_id,
self._dps_api_version,
)
self._logger.info("- iotc :: _loop_assign :: " + uri)
target = parse.urlparse(uri)
response = self.__run_get_request_with_retry(target.geturl(), headers)
try:
data = response.json()
except Exception as error:
err = "ERROR: " + str(error) + " => " + str(response)
self._logger.error(err)
raise DeviceRegistrationError(err)
loop_try = 0
if data is not None and "status" in data:
if data["status"] == "assigning":
time.sleep(self._loop_interval)
if loop_try < 20:
loop_try = loop_try + 1
return self._loop_assign(operation_id, headers)
err = "ERROR: Unable to provision the device."
self._logger.error(err)
raise DeviceRegistrationError(err)
if data["status"] == "assigned":
state = data["registrationState"]
return state["assignedHub"]
else:
data = str(data)
err = "DPS L => " + str(data)
self._logger.error(err)
raise DeviceRegistrationError(err)
def __run_put_request_with_retry(self, url, body, headers):
retry = 0
response = None
while True:
gc.collect()
try:
self._logger.debug("Trying to send...")
response = self._wifi_manager.put(url, json=body, headers=headers)
self._logger.debug("Sent!")
break
except RuntimeError as runtime_error:
self._logger.info("Could not send data, retrying after 0.5 seconds: " + str(runtime_error))
retry = retry + 1
if retry >= 10:
self._logger.error("Failed to send data")
raise
time.sleep(0.5)
continue
gc.collect()
return response
def __run_get_request_with_retry(self, url, headers):
retry = 0
response = None
while True:
gc.collect()
try:
self._logger.debug("Trying to send...")
response = self._wifi_manager.get(url, headers=headers)
self._logger.debug("Sent!")
break
except RuntimeError as runtime_error:
self._logger.info("Could not send data, retrying after 0.5 seconds: " + str(runtime_error))
retry = retry + 1
if retry >= 10:
self._logger.error("Failed to send data")
raise
time.sleep(0.5)
continue
gc.collect()
return response
def register_device(self, expiry: int) -> str:
"""
Registers the device with the IoT Central device registration service.
Returns the hostname of the IoT hub to use over MQTT
:param str expiry: The expiry time
"""
# pylint: disable=c0103
sr = self._id_scope + "%2Fregistrations%2F" + self._device_id
sig_no_encode = DeviceRegistration.compute_derived_symmetric_key(self._key, sr + "\n" + str(expiry))
sig_encoded = parse.quote(sig_no_encode, "~()*!.'")
auth_string = "SharedAccessSignature sr=" + sr + "&sig=" + sig_encoded + "&se=" + str(expiry) + "&skn=registration"
headers = {
"content-type": "application/json; charset=utf-8",
"user-agent": "iot-central-client/1.0",
"Accept": "*/*",
}
if auth_string is not None:
headers["authorization"] = auth_string
body = {"registrationId": self._device_id}
uri = "https://%s/%s/registrations/%s/register?api-version=%s" % (
self._dps_endpoint,
self._id_scope,
self._device_id,
self._dps_api_version,
)
target = parse.urlparse(uri)
self._logger.info("Connecting...")
self._logger.info("URL: " + target.geturl())
self._logger.info("body: " + json.dumps(body))
print("headers: " + json.dumps(headers))
response = self.__run_put_request_with_retry(target.geturl(), body, headers)
data = None
try:
data = response.json()
except Exception as e:
err = "ERROR: non JSON is received from " + self._dps_endpoint + " => " + str(response) + " .. message : " + str(e)
self._logger.error(err)
raise DeviceRegistrationError(err)
if "errorCode" in data:
err = "DPS => " + str(data)
self._logger.error(err)
raise DeviceRegistrationError(err)
time.sleep(1)
return self._loop_assign(data["operationId"], headers)
```
#### File: jimbobbennett/Circuitpython-AzureIoT/iot_error.py
```python
class IoTError(Exception):
"""
An error from the IoT service
"""
def __init__(self, message):
super(IoTError, self).__init__(message)
self.message = message
``` |
{
"source": "jimbobbennett/CircuitPython_CustomVision",
"score": 3
} |
#### File: jimbobbennett/CircuitPython_CustomVision/azurecustomvision_prediction.py
```python
import json
import time
import gc
import adafruit_requests as requests
import adafruit_logging as logging
VERSION = "3.0"
class CustomVisionError(Exception):
"""
An error from the custom vision service
"""
def __init__(self, message):
super().__init__(message)
self.message = message
class BoundingBox:
"""Bounding box that defines a region of an image.
All required parameters must be populated in order to send to Azure.
:param left: Required. Coordinate of the left boundary.
:type left: float
:param top: Required. Coordinate of the top boundary.
:type top: float
:param width: Required. Width.
:type width: float
:param height: Required. Height.
:type height: float
"""
def __init__(self, left: float, top: float, width: float, height: float) -> None:
self.left = left
self.top = top
self.width = width
self.height = height
def __str__(self):
return "Top: " + str(self.top) + ", Left: " + str(self.left) + ", Width: " + str(self.width) + ", Height: " + str(self.height)
class Prediction:
"""Prediction result.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar probability: Probability of the tag.
:vartype probability: float
:ivar tag_id: Id of the predicted tag.
:vartype tag_id: str
:ivar tag_name: Name of the predicted tag.
:vartype tag_name: str
:ivar bounding_box: Bounding box of the prediction. This is None for image classification
:vartype bounding_box:
~circuitpython_azurecustomvision_prediction.BoundingBox
"""
def __init__(self, probability: float, tag_id: str, tag_name: str, bounding_box) -> None:
self.probability = probability
self.tag_id = tag_id
self.tag_name = tag_name
self.bounding_box = bounding_box
class ImagePrediction:
"""Result of an image prediction request.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar id: Prediction Id.
:vartype id: str
:ivar project: Project Id.
:vartype project: str
:ivar iteration: Iteration Id.
:vartype iteration: str
:ivar created: Date this prediction was created.
:vartype created: datetime
:ivar predictions: List of predictions.
:vartype predictions:
list[~circuitpython_azurecustomvision_prediction.Prediction]
"""
def __init__(self, response) -> None:
if not isinstance(response, dict):
response = json.loads(response)
self.prediction_id = response["id"]
self.project = response["project"]
self.iteration = response["iteration"]
self.created = response["created"]
self.predictions = []
for pred in response["predictions"]:
if "boundingBox" in pred:
box = pred["boundingBox"]
bounding_box = BoundingBox(left=box["left"], top=box["top"], width=box["width"], height=box["height"])
else:
bounding_box = None
prediction = Prediction(
probability=pred["probability"], tag_id=pred["tagId"], tag_name=pred["tagName"], bounding_box=bounding_box
)
self.predictions.append(prediction)
self.predictions.sort(key=lambda x: x.probability, reverse=True)
def _run_request_with_retry(url, body, headers):
retry = 0
r = None
logger = logging.getLogger("log")
while retry < 10:
gc.collect()
try:
logger.debug("Trying to send...")
r = requests.post(url, data=body, headers=headers)
if r.status_code != 200:
raise CustomVisionError(r.text)
break
except RuntimeError as runtime_error:
logger.info("Could not send data, retrying after 5 seconds: " + str(runtime_error))
retry = retry + 1
if retry >= 10:
raise
time.sleep(0.5)
continue
gc.collect()
return r
class CustomVisionPredictionClient:
"""CustomVisionPredictionClient
:param prediction_key: Prediction key.
:type prediction_key: str
:param endpoint: Supported Cognitive Services endpoints.
:type endpoint: str
"""
_classify_image_url_route = "customvision/v" + VERSION + "/Prediction/{projectId}/classify/iterations/{publishedName}/url"
_classify_image_route = "customvision/v" + VERSION + "/Prediction/{projectId}/classify/iterations/{publishedName}/image"
_detect_image_url_route = "customvision/v" + VERSION + "/Prediction/{projectId}/detect/iterations/{publishedName}/url"
_detect_image_route = "customvision/v" + VERSION + "/Prediction/{projectId}/detect/iterations/{publishedName}/image"
def __init__(self, prediction_key, endpoint):
self._prediction_key = prediction_key
# build the root endpoint
if not endpoint.lower().startswith("https://"):
endpoint = "https://" + endpoint
if not endpoint.endswith("/"):
endpoint = endpoint + "/"
self._base_endpoint = endpoint
self.api_version = VERSION
def _format_endpoint(self, url_format: str, project_id: str, published_name: str, store: bool, application):
endpoint = self._base_endpoint + url_format.format(projectId=project_id, publishedName=published_name)
if not store:
endpoint = endpoint + "/nostore"
if application is not None:
application = "?" + application
endpoint = endpoint + application
return endpoint
def _process_image_url(self, route: str, project_id: str, published_name: str, url: str, store: bool, application):
endpoint = self._format_endpoint(route, project_id, published_name, store, application)
headers = {"Content-Type": "application/json", "Prediction-Key": self._prediction_key}
body = json.dumps({"url": url})
result = _run_request_with_retry(endpoint, body, headers)
return ImagePrediction(result.text)
def _process_image(self, route: str, project_id: str, published_name: str, image_data: bytearray, store: bool, application):
endpoint = self._format_endpoint(route, project_id, published_name, store, application)
headers = {"Content-Type": "application/octet-stream", "Prediction-Key": self._prediction_key}
result = _run_request_with_retry(endpoint, image_data, headers)
return ImagePrediction(result.text)
def _classify_image_url(self, project_id: str, published_name: str, url: str, store: bool, application):
return self._process_image_url(self._classify_image_url_route, project_id, published_name, url, store, application)
def _classify_image(self, project_id: str, published_name: str, image_data: bytearray, store: bool, application):
return self._process_image(self._classify_image_route, project_id, published_name, image_data, store, application)
def _detect_image_url(self, project_id: str, published_name: str, url: str, store: bool, application):
return self._process_image_url(self._detect_image_url_route, project_id, published_name, url, store, application)
def _detect_image(self, project_id: str, published_name: str, image_data: bytearray, store: bool, application):
return self._process_image(self._detect_image_route, project_id, published_name, image_data, store, application)
def classify_image_url(self, project_id: str, published_name: str, url: str, application=None) -> ImagePrediction:
"""Classify an image url and saves the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param url: Url of the image.
:type url: str
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_prediction.ImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._classify_image_url(project_id, published_name, url, True, application)
def classify_image_url_with_no_store(self, project_id: str, published_name: str, url: str, application=None) -> ImagePrediction:
"""Classify an image url without saving the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param url: Url of the image.
:type url: str
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_predictionImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._classify_image_url(project_id, published_name, url, False, application)
def classify_image(self, project_id: str, published_name: str, image_data: bytearray, application=None) -> ImagePrediction:
"""Classify an image and saves the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param image_data: Binary image data. Supported formats are JPEG, GIF,
PNG, and BMP. Supports images up to 4MB.
:type image_data: bytearray
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_prediction.ImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._classify_image(project_id, published_name, image_data, True, application)
def classify_image_with_no_store(
self, project_id: str, published_name: str, image_data: bytearray, application=None
) -> ImagePrediction:
"""Classify an image without saving the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param image_data: Binary image data. Supported formats are JPEG, GIF,
PNG, and BMP. Supports images up to 4MB.
:type image_data: bytearray
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_prediction.ImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._classify_image(project_id, published_name, image_data, False, application)
def detect_image_url(self, project_id: str, published_name: str, url: str, application=None) -> ImagePrediction:
"""Detect objects in an image url and saves the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param url: Url of the image.
:type url: str
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_prediction.ImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._detect_image_url(project_id, published_name, url, True, application)
def detect_image_url_with_no_store(self, project_id: str, published_name: str, url: str, application=None) -> ImagePrediction:
"""Detect objects in an image url without saving the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param url: Url of the image.
:type url: str
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_prediction.ImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._detect_image_url(project_id, published_name, url, False, application)
def detect_image(self, project_id: str, published_name: str, image_data: bytearray, application=None) -> ImagePrediction:
"""Detect objects in an image and saves the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param image_data: Binary image data. Supported formats are JPEG, GIF,
PNG, and BMP. Supports images up to 4MB.
:type image_data: bytearray
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_prediction.ImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._detect_image(project_id, published_name, image_data, True, application)
def detect_image_with_no_store(self, project_id: str, published_name: str, image_data: bytearray, application=None) -> ImagePrediction:
"""Detect objects in an image without saving the result.
:param project_id: The project id.
:type project_id: str
:param published_name: Specifies the name of the model to evaluate
against.
:type published_name: str
:param image_data: Binary image data. Supported formats are JPEG, GIF,
PNG, and BMP. Supports images up to 4MB.
:type image_data: bytearray
:param application: Optional. Specifies the name of application using
the endpoint.
:type application: str
:return: ImagePrediction
:rtype:
~circuitpython_azurecustomvision_prediction.ImagePrediction
:raises:
:class:`CustomVisionError<circuitpython_azurecustomvision_prediction.CustomVisionErrorException>`
"""
return self._detect_image(project_id, published_name, image_data, False, application)
``` |
{
"source": "jimbobbennett/farmbeats-vnext-experimental",
"score": 3
} |
#### File: farmbeats-vnext-experimental/farmbeats-server/button.py
```python
from grove.factory import Factory
class DualButton:
def __init__(self, pin: int):
self.__button1 = Factory.getButton("GPIO-LOW", pin)
self.__button2 = Factory.getButton("GPIO-LOW", pin + 1)
self.__button1_pressed = False
self.__button2_pressed = False
def capture_values(self) -> None:
self.__button1_pressed = self.__button1.is_pressed()
self.__button2_pressed = self.__button2.is_pressed()
@property
def button1(self) -> bool:
return self.__button1_pressed
@property
def button2(self) -> bool:
return self.__button2_pressed
```
#### File: farmbeats-vnext-experimental/farmbeats-server/sensor_db.py
```python
import sqlite3
import sys
import time
from soilmoisture import SoilMoistureSensor
from sunlight import SunLightSensor
from temperaturehumidity import TemperatureAndHumiditySensor
from relay import Relay
from button import DualButton
def row_to_dict(cursor: sqlite3.Cursor, row: sqlite3.Row) -> dict:
data = {}
for idx, col in enumerate(cursor.description):
data[col[0]] = row[idx]
return data
class SensorDB:
def __init__(self, db_file: str):
self.__db_file = db_file
with sqlite3.connect(self.__db_file) as connection:
table_cursor = connection.cursor()
table_cursor.execute('''CREATE TABLE IF NOT EXISTS sensor_values
(date INTEGER PRIMARY KEY,
soil_moisture INTEGER,
temperature INTEGER,
humidity INTEGER,
soil_temperature INTEGER,
visible INTEGER,
infra_red INTEGER,
ultra_violet REAL,
relay_state INTEGER,
button1_state INTEGER,
button2_state INTEGER)''')
connection.commit()
table_cursor.close()
def save_values(self, soil_moisture_sensor: SoilMoistureSensor, temperature_humidity_sensor: TemperatureAndHumiditySensor, sunlight_sensor: SunLightSensor,
relay: Relay, button: DualButton) -> None:
print('Saving sensor values...', file=sys.stdout)
utc_time = int(time.time())
soil_moisture = soil_moisture_sensor.moisture
temperature = temperature_humidity_sensor.temperature
humidity = temperature_humidity_sensor.humidity
soil_temperature = temperature_humidity_sensor.soil_temperature
visible_light = sunlight_sensor.visible
infra_red = sunlight_sensor.infra_red
ultra_violet = sunlight_sensor.ultra_violet
relay_state = relay.state
button1_state = button.button1
button2_state = button.button2
with sqlite3.connect(self.__db_file) as connection:
insert_cursor = connection.cursor()
insert_cursor.execute('''INSERT INTO sensor_values VALUES
(?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)''',
(utc_time, soil_moisture, temperature, humidity, soil_temperature, visible_light, infra_red, ultra_violet, relay_state, button1_state, button2_state))
connection.commit()
insert_cursor.close()
print('Sensor values saved!', file=sys.stdout)
def get_history(self, from_date: int) -> list:
with sqlite3.connect(self.__db_file) as connection:
connection.row_factory = row_to_dict
select_cursor = connection.cursor()
select_cursor.execute(f'SELECT * FROM sensor_values DESC WHERE date > {from_date} ORDER BY date')
rows = select_cursor.fetchall()
select_cursor.close()
return rows
```
#### File: farmbeats-vnext-experimental/farmbeats-server/soilmoisture.py
```python
from grove import adc
class SoilMoistureSensor:
def __init__(self, pin:int):
self.__pin = pin
self.__adc = adc.ADC()
self.__moisture = -1
def capture_values(self) -> None:
self.__moisture = self.__adc.read(self.__pin)
@property
def moisture(self) -> int:
return self.__moisture
``` |
{
"source": "jimbobbennett/gps-animal-tracker",
"score": 4
} |
#### File: device/decode-gps-data/app.py
```python
import asyncio
from typing import NamedTuple
# The pynmea2 library is used to decode NMEA sentences
# These are the messages that are sent by GPS devices
# You can read more about NMEA sentences at
# https://github.com/microsoft/IoT-For-Beginners/tree/main/3-transport/lessons/1-location-tracking#nmea-gps-data
import pynmea2
# The pyserial library is used to read serial data over a UART connection to
# the GPS sensor
import serial
class LatLon(NamedTuple):
'''
A named tuple type to define a GPS coordinate as a latitude and longitude,
along with the number of satellites use to get the fix
'''
lat: float
lon: float
num_satellites: int
def flush_serial(serial_conn: serial.Serial) -> None:
'''
Drains the serial data from the UART connection.
This is done so we can read data every 10 seconds, and ignore the data in-between
'''
# Clear and flush the input buffer to remove all data
serial_conn.reset_input_buffer()
serial_conn.flush()
# Try to read and decode a line. This is needed as the data is utf-8, so can be variable width
# and we don't want to read a partial line that starts part-way through a variable width character.
# The code here will read until is gets a full line that can be decoded successfully
line = None
while not line:
try:
# Decode the line as utf-8
line = serial_conn.readline().decode('utf-8').strip()
except UnicodeDecodeError:
# If we are reading part way through a character, this exception will be thrown.
# Reset the line and read again
line = None
def get_next_location(serial_conn: serial.Serial) -> LatLon:
'''
Gets the next lat and lon pair from the GPS sensor.
This reads data from the serial port until a GGA sentence is read - this is the sentence
with the GPS coordinates read from satelites. Once this sentence is read, the lat and lon
are returned as a tuple.
If no GPS coordinates are found, -999, -999 is returned.
'''
# Drain the serial buffer as we want the latest GPS reading
flush_serial(serial_conn)
# Set up a retry count - this code will try 100 times to get a valid
# GGA sentence, that is a sentence that has GPS coordinates from multiple
# satellites
retry = 0
# Start looping looking for a GGA sentence
while retry < 100:
# Read the next line from the serial buffer
line = serial_conn.readline().decode('utf-8')
# Try reading a sentence from the line read from the GPS sensor.
# If the line read is incomplete, the sentence will fail to parse, so we can try again
sentence = None
while sentence is None:
try:
# Use PyNMEA to parse the NMEA sentence from the line of data
sentence = pynmea2.parse(line)
except pynmea2.nmea.ParseError:
# If we get a parse error, read the next line
line = serial_conn.readline().decode('utf-8')
# Each sentence has a type specifying the data it has. This code is after a GGA
# sentence which has the GPS position
if sentence.sentence_type == 'GGA':
# If we have a GGA, read the lat and lon. The values are in degrees and minutes, so
# convert to decimal degrees
lat = pynmea2.dm_to_sd(sentence.lat)
lon = pynmea2.dm_to_sd(sentence.lon)
# The positions are given as N/S or E/W. For decimal degrees, these should be converted
# to positive or negative values. S of the equator is negative, so is west of the
# prime meridian
if sentence.lat_dir == 'S':
lat = lat * -1
if sentence.lon_dir == 'W':
lon = lon * -1
# Return the lat and lon as a tuple
return LatLon(lat, lon, sentence.num_sats)
# Increment the retry
retry += 1
# If we don't successfully get a lat and lon, return -999,-999
return LatLon(-999, -999)
async def main() -> None:
'''
The main loop of the application.
This connects to the serial port, then reads and decodes GPS coordinates.
'''
# Connect to the GPS sensor. This is always a serial connection at 9,600 baud
# on the /dev/ttyAMA0 port
serial_connection = serial.Serial('/dev/ttyAMA0', 9600, timeout=1)
# Clear out any serial data to ensure we are reading full sentences
flush_serial(serial_connection)
# The main loop of the application. Loop forever
while True:
# Get the latest GPS coordinates
lat_lon = get_next_location(serial_connection)
# If there isn't a valid set of coordinates available, the call to get_next_location will
# return -999, -999 as the location. Test for this and only proceed if the coordinates are valid
if lat_lon.lat > -999 and lat_lon.lon > -999:
# If the coordinates are valid, print them
print(f'Lat: {lat_lon.lat}, Lon: {lat_lon.lon}. Data from {lat_lon.num_satellites} satellites')
# Sleep for 10 seconds between coordinates
await asyncio.sleep(10)
# Start the main loop running.
asyncio.run(main())
```
#### File: device/print-gps-data/app.py
```python
import serial
def flush_serial(serial_conn: serial.Serial) -> None:
'''
Drains the serial data from the UART connection.
This is done so we can read data every 10 seconds, and ignore the data in-between
'''
# Clear and flush the input buffer to remove all data
serial_conn.reset_input_buffer()
serial_conn.flush()
# Try to read and decode a line. This is needed as the data is utf-8, so can be variable width
# and we don't want to read a partial line that starts part-way through a variable width character.
# The code here will read until is gets a full line that can be decoded successfully
read_line = None
while read_line is None:
try:
# Decode the line as utf-8
read_line = serial_conn.readline().decode('utf-8')
except UnicodeDecodeError:
# If we are reading part way through a character, this exception will be thrown.
# Reset the line and read again
read_line = None
# Connect to the GPS sensor. This is always a serial connection at 9,600 baud
# on the /dev/ttyAMA0 port
serial_connection = serial.Serial('/dev/ttyAMA0', 9600, timeout=1)
# Clear out any serial data to ensure we are reading full sentences
flush_serial(serial_connection)
# The main loop of the application. Loop forever
# There is no pause here - the application will block whilst waiting for a new line from the serial port
while True:
# Read the line of data from the serial connection
line = serial_connection.readline().decode('utf-8').strip()
if line:
print(line)
``` |
{
"source": "jimbobbennett/iotc-python-client",
"score": 2
} |
#### File: iotc-python-client/samples/async_x509.py
```python
import os
import asyncio
import configparser
import sys
from random import randint
config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(__file__),'samples.ini'))
# Change config section name to reflect sample.ini
device_id = config['DEVICE_A']['DeviceId']
scope_id = config['DEVICE_A']['ScopeId']
hub_name = config["DEVICE_A"]["HubName"]
x509 = {'cert_file': config['DEVICE_A']['CertFilePath'],'key_file':config['DEVICE_A']['KeyFilePath'],'cert_phrase':config['DEVICE_A']['CertPassphrase']}
if config['DEFAULT'].getboolean('Local'):
sys.path.insert(0, 'src')
from iotc import IOTCConnectType, IOTCLogLevel, IOTCEvents
from iotc.aio import IoTCClient
class MemStorage(Storage):
def retrieve(self):
return CredentialsCache(
hub_name,
device_id,
certificate=x509,
)
def persist(self, credentials):
# a further option would be updating config file with latest hub name
return None
# optional model Id for auto-provisioning
try:
model_id = config["DEVICE_M3"]["ModelId"]
except:
model_id = None
async def on_props(property_name, property_value, component_name):
print("Received {}:{}".format(property_name, property_value))
return True
async def on_commands(command: Command):
print("Received command {} with value {}".format(command.name, command.value))
await command.reply()
async def on_enqueued_commands(command:Command):
print("Received offline command {} with value {}".format(command.name, command.value))
# change connect type to reflect the used key (device or group)
client = IoTCClient(
device_id,
scope_id,
IOTCConnectType.IOTC_CONNECT_X509_CERT,
x509,
storage=MemStorage(),
)
if model_id != None:
client.set_model_id(model_id)
client.set_log_level(IOTCLogLevel.IOTC_LOGGING_ALL)
client.on(IOTCEvents.IOTC_PROPERTIES, on_props)
client.on(IOTCEvents.IOTC_COMMAND, on_commands)
client.on(IOTCEvents.IOTC_ENQUEUED_COMMAND, on_enqueued_commands)
async def main():
await client.connect()
await client.send_property({"writeableProp": 50})
while not client.terminated():
if client.is_connected():
await client.send_telemetry(
{
"temperature": randint(20, 45)
},{
"$.sub": "firstcomponent"
}
)
await asyncio.sleep(3)
asyncio.run(main())
```
#### File: iotc/aio/__init__.py
```python
import sys
import signal
import asyncio
import pkg_resources
from .. import (
AbstractClient,
IOTCLogLevel,
IOTCEvents,
IOTCConnectType,
Command,
CredentialsCache,
Storage,
GracefulExit,
)
from contextlib import suppress
from azure.iot.device.common.transport_exceptions import ConnectionDroppedError
from azure.iot.device import X509, MethodResponse, Message
from azure.iot.device.aio import IoTHubDeviceClient, ProvisioningDeviceClient
try:
__version__ = pkg_resources.get_distribution("iotc").version
except:
pass
try:
import hmac
except ImportError:
print("ERROR: missing dependency `hmac`")
sys.exit(3)
try:
import hashlib
except ImportError:
print("ERROR: missing dependency `hashlib`")
sys.exit(3)
try:
import base64
except ImportError:
print("ERROR: missing dependency `base64`")
sys.exit(3)
try:
import json
except ImportError:
print("ERROR: missing dependency `json`")
sys.exit(3)
try:
import uuid
except ImportError:
print("ERROR: missing dependency `uuid`")
sys.exit(3)
class ConsoleLogger:
def __init__(self, log_level):
self._log_level = log_level
async def _log(self, message):
print(message)
async def info(self, message):
if self._log_level != IOTCLogLevel.IOTC_LOGGING_DISABLED:
await self._log(message)
async def debug(self, message):
if self._log_level == IOTCLogLevel.IOTC_LOGGING_ALL:
await self._log(message)
def set_log_level(self, log_level):
self._log_level = log_level
class IoTCClient(AbstractClient):
def __init__(
self, device_id, scope_id, cred_type, key_or_cert, logger=None, storage=None, max_connection_attempts=5
):
AbstractClient.__init__(
self, device_id, scope_id, cred_type, key_or_cert, storage, max_connection_attempts
)
if logger is None:
self._logger = ConsoleLogger(IOTCLogLevel.IOTC_LOGGING_API_ONLY)
else:
if (
hasattr(logger, "info")
and hasattr(logger, "debug")
and hasattr(logger, "set_log_level")
):
self._logger = logger
else:
print(
"ERROR: Logger object has unsupported format. It must implement the following functions\n\
info(message);\ndebug(message);\nset_log_level(message);"
)
sys.exit()
def raise_graceful_exit(self, *args):
async def handle_disconnection():
await self.disconnect()
try:
asyncio.run_coroutine_threadsafe(
handle_disconnection(), asyncio.get_event_loop()
)
except:
pass
async def _handle_property_ack(
self,
callback,
property_name,
property_value,
property_version,
component_name=None,
):
if callback is not None:
ret = await callback(property_name, property_value, component_name)
else:
ret = True
if ret:
if component_name is not None:
await self._logger.debug("Acknowledging {}".format(property_name))
await self.send_property(
{
"{}".format(component_name): {
"__t": "c",
"{}".format(property_name): {
"value": property_value,
"ac": 200,
"ad": "Completed",
"av": property_version
}
}
}
)
else:
await self._logger.debug("Acknowledging {}".format(property_name))
await self.send_property(
{
"{}".format(property_name): {
"ac": 200,
"ad": "Completed",
"av": property_version,
"value": property_value,
}
}
)
else:
await self._logger.debug(
'Property "{}" unsuccessfully processed'.format(property_name)
)
async def _update_properties(self, patch, prop_cb):
for prop in patch:
is_component = False
if prop == "$version":
continue
# check if component
try:
is_component = str(
type(patch[prop])) == "<class 'dict'>" and patch[prop]["__t"]
except KeyError:
pass
if is_component:
for component_prop in patch[prop]:
if component_prop == "__t":
continue
await self._logger.debug(
'In component "{}" for property "{}"'.format(
prop, component_prop
)
)
await self._handle_property_ack(
prop_cb,
component_prop,
patch[prop][component_prop],
patch["$version"],
prop,
)
else:
await self._handle_property_ack(
prop_cb, prop, patch[prop], patch["$version"]
)
async def _on_properties(self, patch):
await self._logger.debug("Setup properties listener")
try:
prop_cb = self._events[IOTCEvents.IOTC_PROPERTIES]
except KeyError:
await self._logger.debug("Properties callback not found")
return
await self._update_properties(patch, prop_cb)
async def _on_commands(self, method_request):
await self._logger.debug("Setup commands listener")
try:
cmd_cb = self._events[IOTCEvents.IOTC_COMMAND]
except KeyError:
await self._logger.debug("Command callback not found")
return
command = Command(method_request.name, method_request.payload)
try:
command_name_with_components = method_request.name.split("*")
if len(command_name_with_components) > 1:
# In a component
await self._logger.debug("Command in a component")
command = Command(
command_name_with_components[1],
method_request.payload,
command_name_with_components[0],
)
except:
pass
async def reply_fn():
await self._device_client.send_method_response(
MethodResponse.create_from_method_request(
method_request,
200,
{"result": True, "data": "Command received"},
)
)
command.reply = reply_fn
await self._logger.debug("Received command {}".format(method_request.name))
await cmd_cb(command)
async def _on_enqueued_commands(self, c2d):
await self._logger.debug("Setup offline commands listener")
try:
c2d_cb = self._events[IOTCEvents.IOTC_ENQUEUED_COMMAND]
except KeyError:
await self._logger.debug("Command callback not found")
return
# Wait for unknown method calls
c2d_name = c2d.custom_properties["method-name"]
command = Command(c2d_name, c2d.data)
try:
command_name_with_components = c2d_name.split("*")
if len(command_name_with_components) > 1:
# In a component
await self._logger.debug("Command in a component")
command = Command(
command_name_with_components[1],
c2d.data,
command_name_with_components[0],
)
except:
pass
await self._logger.debug("Received offline command {}".format(command.name))
await c2d_cb(command)
async def _send_message(self, payload, properties):
msg = self._prepare_message(payload, properties)
await self._device_client.send_message(msg)
async def send_property(self, payload):
"""
Send a property message
:param dict payload: The properties payload. Can contain multiple properties in the form {'<propName>':{'value':'<propValue>'}}
"""
await self._logger.debug("Sending property {}".format(json.dumps(payload)))
await self._device_client.patch_twin_reported_properties(payload)
async def send_telemetry(self, payload, properties=None):
"""
Send a telemetry message
:param dict payload: The telemetry payload. Can contain multiple telemetry fields in the form {'<fieldName1>':<fieldValue1>,...,'<fieldNameN>':<fieldValueN>}
:param dict optional properties: An object with custom properties to add to the message.
"""
await self._logger.info("Sending telemetry message: {}".format(payload))
await self._send_message(json.dumps(payload), properties)
async def connect(self, force_dps=False):
"""
Connects the device.
:raises exception: If connection fails
"""
if self._connection_attempts_count > self._max_connection_attempts: # max number of retries. exit
self._terminate = True
self._connecting = False
return
self._terminate = False
self._connecting = True
_credentials = None
if self._storage is not None and force_dps is False:
_credentials = self._storage.retrieve()
await self._logger.debug("Found cached credentials")
if _credentials is None:
if self._cred_type in (
IOTCConnectType.IOTC_CONNECT_DEVICE_KEY,
IOTCConnectType.IOTC_CONNECT_SYMM_KEY,
):
if self._cred_type == IOTCConnectType.IOTC_CONNECT_SYMM_KEY:
self._key_or_cert = await self._compute_derived_symmetric_key(
self._key_or_cert, self._device_id
)
await self._logger.debug("Device key: {}".format(self._key_or_cert))
self._provisioning_client = (
ProvisioningDeviceClient.create_from_symmetric_key(
self._global_endpoint,
self._device_id,
self._scope_id,
self._key_or_cert,
)
)
else:
self._key_file = self._key_or_cert["key_file"]
self._cert_file = self._key_or_cert["cert_file"]
try:
self._cert_phrase = self._key_or_cert["cert_phrase"]
x509 = X509(self._cert_file, self._key_file,
self._cert_phrase)
except:
await self._logger.debug(
"No passphrase available for certificate. Trying without it"
)
x509 = X509(self._cert_file, self._key_file)
# Certificate provisioning
self._provisioning_client = (
ProvisioningDeviceClient.create_from_x509_certificate(
provisioning_host=self._global_endpoint,
registration_id=self._device_id,
id_scope=self._scope_id,
x509=x509,
)
)
if self._model_id:
print("Provision model Id")
self._provisioning_client.provisioning_payload = {
"iotcModelId": self._model_id
}
try:
registration_result = await self._provisioning_client.register()
assigned_hub = registration_result.registration_state.assigned_hub
_credentials = CredentialsCache(
assigned_hub,
self._device_id,
device_key=self._key_or_cert
if self._cred_type
in (
IOTCConnectType.IOTC_CONNECT_DEVICE_KEY,
IOTCConnectType.IOTC_CONNECT_SYMM_KEY,
)
else None,
certificate=self._key_or_cert
if self._cred_type == IOTCConnectType.IOTC_CONNECT_X509_CERT
else None,
)
except Exception as e:
await self._logger.info(
"ERROR: Failed to get device provisioning information. {}".format(
e)
)
sys.exit(1)
# Connect to iothub
try:
if self._cred_type in (
IOTCConnectType.IOTC_CONNECT_DEVICE_KEY,
IOTCConnectType.IOTC_CONNECT_SYMM_KEY,
):
self._device_client = IoTHubDeviceClient.create_from_connection_string(
_credentials.connection_string
)
else:
if 'cert_phrase' in _credentials.certificate:
x509 = X509(
_credentials.certificate['cert_file'], _credentials.certificate['key_file'], _credentials.certificate['cert_phrase'])
else:
x509 = X509(
_credentials.certificate['cert_file'], _credentials.certificate['key_file'])
self._device_client = IoTHubDeviceClient.create_from_x509_certificate(
x509=x509,
hostname=_credentials.hub_name,
device_id=_credentials.device_id,
)
await self._device_client.connect()
await self._logger.debug("Device connected to '{}'".format(_credentials.hub_name))
self._connecting = False
self._twin = await self._device_client.get_twin()
await self._logger.debug("Current twin: {}".format(self._twin))
twin_patch = self._sync_twin()
if twin_patch is not None:
await self._update_properties(twin_patch, None)
except Exception as e: # connection to hub failed. hub can be down or connection string expired. fallback to dps
await self._logger.info("ERROR: Failed to connect to Hub. {}".format(e))
if force_dps is True:
sys.exit(1)
self._connection_attempts_count += 1
await self.connect(True)
# setup listeners
self._device_client.on_twin_desired_properties_patch_received = self._on_properties
self._device_client.on_method_request_received = self._on_commands
self._device_client.on_message_received = self._on_enqueued_commands
if hasattr(self,'_conn_thread') and self._conn_thread is not None:
try:
self._conn_thread.cancel()
await self._conn_thread
except asyncio.CancelledError:
print("Resetting conn_status thread")
self._conn_thread = asyncio.create_task(self._on_connection_state())
signal.signal(signal.SIGINT, self.raise_graceful_exit)
signal.signal(signal.SIGTERM, self.raise_graceful_exit)
async def _on_connection_state(self):
while not self._terminate:
if not self._connecting and not self.is_connected():
await self._device_client.shutdown()
self._device_client = None
self._connection_attempts_count = 0
await self.connect(True)
await asyncio.sleep(1.0)
async def disconnect(self):
await self._logger.info("Received shutdown signal")
self._terminate = True
if hasattr(self,'_conn_thread') and self._conn_thread is not None:
tasks = asyncio.gather(
self._conn_thread
)
try:
await tasks
except:
pass
await self._device_client.shutdown()
await self._logger.info("Disconnecting client...")
await self._logger.info("Client disconnected.")
await self._logger.info("See you!")
async def _compute_derived_symmetric_key(self, secret, reg_id):
# pylint: disable=no-member
try:
secret = base64.b64decode(secret)
except:
await self._logger.debug("ERROR: broken base64 secret => `" + secret + "`")
sys.exit(2)
return base64.b64encode(
hmac.new(
secret, msg=reg_id.encode("utf8"), digestmod=hashlib.sha256
).digest()
).decode("utf-8")
```
#### File: test/async/test_keys.py
```python
import pytest
import asyncio
import configparser
import os
import sys
config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(__file__), "../tests.ini"))
if config["TESTS"].getboolean("Local"):
sys.path.insert(0, "src")
from iotc import IOTCConnectType, IOTCLogLevel, IOTCEvents
from iotc.aio import IoTCClient
from iotc.test import dummy_storage
@pytest.mark.asyncio
async def init_compute_key_tests(mocker, key_type, key, device_id):
client = IoTCClient(
device_id,
"scope_id",
key_type,
key,
)
spy = mocker.spy(client, "_compute_derived_symmetric_key")
ProvisioningClient = mocker.patch("iotc.aio.ProvisioningDeviceClient")
DeviceClient = mocker.patch("iotc.aio.IoTHubDeviceClient")
provisioning_client_instance = mocker.AsyncMock()
ProvisioningClient.create_from_symmetric_key.return_value = (
provisioning_client_instance
)
DeviceClient.create_from_connection_string.return_value = mocker.AsyncMock()
await client.connect()
await client.disconnect()
return spy
@pytest.mark.asyncio
async def test_compute_device_key_success(mocker):
group_key = "<KEY>
device_id = "pytest"
device_key = "<KEY>
spy = await init_compute_key_tests(
mocker, IOTCConnectType.IOTC_CONNECT_SYMM_KEY, group_key, device_id
)
spy.assert_called_once_with(group_key, device_id)
assert spy.spy_return == device_key
@pytest.mark.asyncio
async def test_compute_device_key_skip(mocker):
group_key = "<KEY>
device_id = "pytest"
device_key = "<KEY>
spy = await init_compute_key_tests(
mocker, IOTCConnectType.IOTC_CONNECT_DEVICE_KEY, device_key, device_id
)
spy.assert_not_called()
@pytest.mark.asyncio
async def test_compute_device_key_failed(mocker):
group_key = "<KEY>
device_id = "pytest"
device_key = "<KEY>
spy = await init_compute_key_tests(
mocker, IOTCConnectType.IOTC_CONNECT_SYMM_KEY, group_key, device_id
)
spy.assert_called_once_with(group_key, device_id)
assert spy.spy_return != device_key
```
#### File: iotc/test/__init__.py
```python
class dummy_storage:
def retrieve(self):
return {}
def persist(self, credentials):
return None
```
#### File: test/sync/test_listeners.py
```python
from iotc import IOTCConnectType, IOTCLogLevel, IOTCEvents, IoTCClient, Command
from iotc.test import dummy_storage
from azure.iot.device import MethodRequest, Message
import pytest
import configparser
import os
import time
import sys
config = configparser.ConfigParser()
config.read(os.path.join(os.path.dirname(__file__), "../tests.ini"))
if config["TESTS"].getboolean("Local"):
sys.path.insert(0, "src")
DEFAULT_COMPONENT_PROP = {"prop1": {"value": "value1"}, "$version": 1}
COMPONENT_PROP = {
"component1": {"__t": "c", "prop1": {"value": "value1"}},
"$version": 1,
}
COMPLEX_COMPONENT_PROP = {
"component1": {"__t": "c", "prop1": {"item1": "value1"}},
"component2": {
"__t": "c",
"prop1": "value1",
"prop2": 2,
},
"prop2": {"item2": "value2"},
"$version": 1,
}
DEFAULT_COMMAND = MethodRequest(1, "cmd1", "sample")
COMPONENT_COMMAND = MethodRequest(1, "commandComponent*cmd1", "sample")
COMPONENT_ENQUEUED = Message("sample_data")
COMPONENT_ENQUEUED.custom_properties = {
"method-name": "component*command_name"}
DEFAULT_COMPONENT_ENQUEUED = Message("sample_data")
DEFAULT_COMPONENT_ENQUEUED.custom_properties = {"method-name": "command_name"}
def command_equals(self, other):
return (
self.name == other.name
and self.component_name == other.component_name
and self.value == other.value
)
Command.__eq__ = command_equals
@pytest.fixture()
def iotc_client(mocker):
ProvisioningClient = mocker.patch("iotc.ProvisioningDeviceClient")
DeviceClient = mocker.patch("iotc.IoTHubDeviceClient")
ProvisioningClient.create_from_symmetric_key.return_value = mocker.MagicMock()
device_client_instance = (
DeviceClient.create_from_connection_string.return_value
) = mocker.MagicMock()
mocked_client = IoTCClient(
"device_id",
"scope_id",
IOTCConnectType.IOTC_CONNECT_DEVICE_KEY,
"device_key_base64",
)
mocked_client._device_client = device_client_instance
yield mocked_client
try:
mocked_client.disconnect()
except:
pass
def test_on_properties_triggered(mocker, iotc_client):
prop_stub = mocker.MagicMock()
iotc_client.on(IOTCEvents.IOTC_PROPERTIES, prop_stub)
iotc_client.connect()
iotc_client._device_client.on_twin_desired_properties_patch_received(DEFAULT_COMPONENT_PROP)
prop_stub.assert_called_with("prop1", {"value":"value1"}, None)
def test_on_properties_triggered_with_component(mocker, iotc_client):
prop_stub = mocker.MagicMock()
# set return value, otherwise a check for the function result will execute a mock again
prop_stub.return_value = True
iotc_client.on(IOTCEvents.IOTC_PROPERTIES, prop_stub)
iotc_client.connect()
iotc_client._device_client.on_twin_desired_properties_patch_received(COMPONENT_PROP)
prop_stub.assert_called_with("prop1", {"value": "value1"}, "component1")
def test_on_properties_triggered_with_complex_component(mocker, iotc_client):
prop_stub = mocker.MagicMock()
# set return value, otherwise a check for the function result will execute a mock again
prop_stub.return_value = True
iotc_client.on(IOTCEvents.IOTC_PROPERTIES, prop_stub)
iotc_client.connect()
iotc_client._device_client.on_twin_desired_properties_patch_received(COMPLEX_COMPONENT_PROP)
prop_stub.assert_has_calls(
[
mocker.call("prop1", {"item1": "value1"}, "component1"),
mocker.call("prop1", "value1", "component2"),
mocker.call("prop2", 2, "component2"),
mocker.call("prop2", {"item2": "value2"}, None),
], any_order=True
)
def test_on_command_triggered(mocker, iotc_client):
cmd_stub = mocker.MagicMock()
iotc_client.on(IOTCEvents.IOTC_COMMAND, cmd_stub)
iotc_client.connect()
iotc_client._device_client.on_method_request_received(DEFAULT_COMMAND)
cmd_stub.assert_called_with(Command("cmd1", "sample", None))
def test_on_command_triggered_with_component(mocker, iotc_client):
cmd_stub = mocker.MagicMock()
iotc_client.on(IOTCEvents.IOTC_COMMAND, cmd_stub)
iotc_client.connect()
iotc_client._device_client.on_method_request_received(COMPONENT_COMMAND)
cmd_stub.assert_called_with(Command("cmd1", "sample", "commandComponent"))
def test_on_enqueued_command_triggered(mocker, iotc_client):
cmd_stub = mocker.MagicMock()
iotc_client.on(IOTCEvents.IOTC_ENQUEUED_COMMAND, cmd_stub)
iotc_client.connect()
iotc_client._device_client.on_message_received(DEFAULT_COMPONENT_ENQUEUED)
cmd_stub.assert_called_with(Command("command_name", "sample_data", None))
def test_on_enqueued_command_triggered_with_component(mocker, iotc_client):
cmd_stub = mocker.MagicMock()
iotc_client.on(IOTCEvents.IOTC_ENQUEUED_COMMAND, cmd_stub)
iotc_client.connect()
iotc_client._device_client.on_message_received(COMPONENT_ENQUEUED)
cmd_stub.assert_called_with(
Command("command_name", "sample_data", "component"))
``` |
{
"source": "jimbobbennett/iot-hub-gps-route-simulator",
"score": 3
} |
#### File: jimbobbennett/iot-hub-gps-route-simulator/app.py
```python
import argparse
import json
import os
import time
from azure.iot.device import IoTHubDeviceClient
from bs4 import BeautifulSoup
from dotenv import load_dotenv
load_dotenv()
try:
device_connection_string = os.environ['DEVICE_CONNECTION_STRING']
except KeyError:
device_connection_string = ''
parser = argparse.ArgumentParser()
parser.add_argument('file', metavar='file', type=str, help='The .gpx file to upload')
parser.add_argument('-cs', '--connection_string', metavar='connection_string', type=str, default=device_connection_string, help='The IoT Hub device connection string to use to connect. You can also set this in a .env file with the DEVICE_CONNECTION_STRING key')
parser.add_argument('-fq', '--frequency', metavar='frequency', type=int, default=5, help='The number of seconds to wait between sending each point')
parser.add_argument('-r', '--repeat', action='store_true', help='Set this to continuously send the file')
parser.add_argument('-rv', '--reverse', action='store_true', help='Set this to reverse the points in the file after they\'ve all been sent')
args = parser.parse_args()
file_name = args.file
device_connection_string = args.connection_string
frequency = args.frequency
repeat = args.repeat
reverse = args.reverse
if device_connection_string is None or device_connection_string == '':
print('Missing connection string - either add it to a .env file with a key of DEVICE_CONNECTION_STRING, or pass it as a parameter using --connection_string <connection string>')
exit()
device_client = IoTHubDeviceClient.create_from_connection_string(device_connection_string)
# Connect the client.
print('Connecting to Azure IoT Hub...')
device_client.connect()
print('Connected!')
def send_track_part(track_part):
telemetry = {
'lat' : track_part['lat'],
'lon' : track_part['lon']
}
print('Sending telemetry:', telemetry)
device_client.send_message(json.dumps(telemetry))
def send_file():
print('Processing route file:', file_name)
with open(file_name, 'r') as gpx_file:
soup = BeautifulSoup(gpx_file, 'lxml')
track_parts = soup.find_all('trkpt')
for track_part in track_parts:
send_track_part(track_part)
time.sleep(frequency)
if reverse:
print('Sending file in reverse')
track_parts.reverse()
for track_part in track_parts:
send_track_part(track_part)
time.sleep(frequency)
if repeat:
while True: send_file()
else:
send_file()
print('Done!')
``` |
{
"source": "jimbobbennett/LinkSharingBadge",
"score": 3
} |
#### File: LinkSharingBadge/WebApp/app.py
```python
import os, requests, qrcode, base64, io
from flask import Flask, render_template, request, jsonify
from PIL import Image
def image_to_byte_array(image:Image):
imgByteArr = io.BytesIO()
image.save(imgByteArr, format=image.format)
imgByteArr = imgByteArr.getvalue()
return imgByteArr
iot_central_url_root = 'https://' + \
os.environ['APP_NAME'] + \
'.azureiotcentral.com/api/preview/devices/' + \
os.environ['DEVICE_NAME'] + \
'/components/' + \
os.environ['COMPONENT_NAME'] + \
'/commands/'
api_key = os.environ['API_KEY']
app = Flask(__name__)
# The root route, returns the home.html page
@app.route('/')
def home():
# Add any required page data here
page_data = {}
return render_template('home.html', page_data = page_data)
def postCommand(command_name, request):
headers = {'Authorization': api_key}
requests.post(iot_central_url_root + command_name, json={'request': request}, headers=headers)
@app.route('/updateName', methods=['POST'])
def update_name():
body = request.get_json()
name = body['wearer_name']
postCommand('UpdateName', name)
return {}
@app.route('/updateLink', methods=['POST'])
def update_link():
body = request.get_json()
link = body['badge_link']
img = qrcode.make(link)
print(type(img))
print(img.size)
bytes = image_to_byte_array(img)
postCommand('UpdateImage', base64.b64encode(bytes).decode())
return {}
``` |
{
"source": "jimbobbennett/youre-a-mean-on-mr-grinch",
"score": 3
} |
#### File: youre-a-mean-on-mr-grinch/code/app.py
```python
from model import Model
import io, base64
from flask import Flask, render_template, request, jsonify
from PIL import Image
app = Flask(__name__)
print("Loading model...")
model = Model()
model.load()
print("Model loaded!")
@app.route('/')
def home():
return render_template('home.html')
@app.route('/result', methods=['POST'])
def check_results():
body = request.get_json()
image_bytes = base64.b64decode(body['image_base64'].split(',')[1])
image = Image.open(io.BytesIO(image_bytes))
if image.mode != "RGB":
image = image.convert("RGB")
predictions = model.predict(image)
print(predictions)
message = predictions['Prediction']
values = "Jim = {:.0f}%, Grinch = {:.0f}%".format(predictions['Confidences'][1] * 100, predictions['Confidences'][0] * 100)
return jsonify({
'message': message,
'values': values
})
``` |
{
"source": "jimbobhickville/aumbry",
"score": 3
} |
#### File: aumbry/cli/app.py
```python
import argparse
from aumbry.cli import upload, edit, view
def parse_arguments(argv=None):
parser = argparse.ArgumentParser(
'aumbry',
description='CLI Tool for Aumbry'
)
subparsers = parser.add_subparsers()
upload.setup_arguments(subparsers)
edit.setup_arguments(subparsers)
view.setup_arguments(subparsers)
return parser.parse_args(argv)
def main(argv=None):
arguments = parse_arguments(argv)
commands = {
'upload': upload.command,
'edit': edit.command,
'view': view.command,
}
return commands[arguments.command](arguments)
```
#### File: aumbry/cli/view.py
```python
from aumbry.cli.utils import setup_up_config
def setup_arguments(subparsers):
view = subparsers.add_parser(
'view',
help='Displays a configuration file'
)
view.set_defaults(command='view')
view.add_argument('--fernet-key', type=str)
view.add_argument('path', type=str, help='Config file path')
@setup_up_config
def command(arguments):
with open(arguments.path, 'r') as fp:
print(fp.read())
```
#### File: aumbry/formats/generic.py
```python
from alchemize.transmute import JsonTransmuter
from aumbry.contract import AbstractHandler, AumbryConfig
class GenericHandler(AbstractHandler):
extras_name = 'generic'
@property
def imports(self):
return []
def serialize(self, config):
return JsonTransmuter.transmute_to(config, to_string=False)
def deserialize(self, raw_config, config_cls):
return JsonTransmuter.transmute_from(raw_config, config_cls)
def parse(self, raw_config):
return raw_config
class GenericConfig(AumbryConfig):
""" A type of AumbryConfig for Generic Dict Configurations."""
__handler__ = GenericHandler
```
#### File: aumbry/spec/loader.py
```python
import base64
import json
import os
import tempfile
from textwrap import dedent
from cryptography.fernet import Fernet
import requests_mock
from specter import Spec, DataSpec, expect
from six.moves import urllib
from moto import mock_ssm
from pike.discovery import py
import aumbry
from aumbry.errors import LoadError, SaveError, UnknownSourceError
from aumbry.formats.generic import GenericHandler
raw_json = dedent("""
{
"nope": "testing"
}
""")
raw_yaml = dedent("""
nope: testing
""")
partial_yaml1 = dedent("""
nope: testing
sample_list:
- list1
""")
partial_yaml2 = dedent("""
sample_list:
- list2
sample_dict:
which: 2
a: b
""")
partial_yaml3 = dedent("""
sample_dict:
which: 3
c: d
""")
class SampleJsonConfig(aumbry.JsonConfig):
__mapping__ = {
'nope': ['nope', str]
}
class SampleYamlConfig(aumbry.YamlConfig):
__mapping__ = {
'nope': ['nope', str]
}
class SampleExtendedYamlConfig(aumbry.YamlConfig):
__mapping__ = {
'nope': ['nope', str],
'sample_list': ['sample_list', list],
'sample_dict': ['sample_dict', dict],
}
class SampleGenericConfig(aumbry.GenericConfig):
__mapping__ = {
'nope': ['nope', str],
'sample_list': ['sample_list', list],
'sample_dict': ['sample_dict', dict],
'sample_model': ['sample_model', SampleJsonConfig],
}
def write_temp_file(raw):
temp = tempfile.NamedTemporaryFile(delete=False)
options = {'CONFIG_FILE_PATH': temp.name}
with temp as fp:
fp.write(bytes(raw.encode('utf-8')))
return temp, options
class VerifyLoaderHandlingFileBased(DataSpec):
DATASET = {
'yaml': {'raw': raw_yaml, 'cls': SampleYamlConfig},
'json': {'raw': raw_json, 'cls': SampleJsonConfig},
}
def can_load(self, raw, cls):
temp, options = write_temp_file(raw)
cfg = aumbry.load(aumbry.FILE, cls, options)
os.remove(temp.name)
expect(cfg.nope).to.equal('testing')
def can_save(self, raw, cls):
cfg = cls()
cfg.nope = 'testing'
with tempfile.NamedTemporaryFile() as temp:
options = {'CONFIG_FILE_PATH': temp.name}
aumbry.save(aumbry.FILE, cfg, options)
# Load up the saved file
loaded_cfg = aumbry.load(aumbry.FILE, cls, options)
expect(loaded_cfg.nope).to.equal(cfg.nope)
def can_use_preprocessors(self, raw, cls):
cfg = cls()
cfg.nope = 'testing'
with tempfile.NamedTemporaryFile() as temp:
options = {'CONFIG_FILE_PATH': temp.name}
aumbry.save(
aumbry.FILE,
cfg,
options,
preprocessor=lambda data: base64.b64encode(data)
)
expect('testing').not_to.be_in(temp.file.read().decode('utf-8'))
# Load up the saved file
loaded_cfg = aumbry.load(
aumbry.FILE,
cls,
options,
preprocessor=lambda data: base64.b64decode(data)
)
expect(loaded_cfg.nope).to.equal(cfg.nope)
class VerifyLoaderHandlingFernetFile(Spec):
def can_save_and_load(self):
cfg = SampleYamlConfig()
cfg.nope = 'testing'
with tempfile.NamedTemporaryFile() as temp:
options = {
'CONFIG_FILE_PATH': temp.name,
'CONFIG_FILE_FERNET_KEY': Fernet.generate_key().decode('utf-8')
}
aumbry.save(aumbry.FERNET, cfg, options)
# Load up the saved file
loaded_cfg = aumbry.load(aumbry.FERNET, SampleYamlConfig, options)
expect(loaded_cfg.nope).to.equal(cfg.nope)
class VerifyLoaderHandlingConsul(Spec):
def can_successfully_load_from_consul(self):
with requests_mock.Mocker() as mock:
value = base64.b64encode(raw_yaml.encode('utf-8'))
resp = [{
'Value': value.decode('utf-8')
}]
mock.get('http://bam/v1/kv/test_key', text=json.dumps(resp))
options = {
'CONSUL_URI': 'http://bam',
'CONSUL_KEY': 'test_key',
}
cfg = aumbry.load(aumbry.CONSUL, SampleYamlConfig, options)
expect(cfg.nope).to.equal('testing')
def can_handle_404_from_consul(self):
with requests_mock.Mocker() as mock:
mock.get('http://bam/v1/kv/test_key', status_code=404)
options = {
'CONSUL_URI': 'http://bam',
'CONSUL_KEY': 'test_key',
}
expect(
aumbry.load,
['consul', SampleYamlConfig, options]
).to.raise_a(LoadError)
def will_retry_on_other_codes(self):
with requests_mock.Mocker() as mock:
mock.get('http://bam/v1/kv/test_key', status_code=503)
options = {
'CONSUL_URI': 'http://bam',
'CONSUL_KEY': 'test_key',
'CONSUL_TIMEOUT': 1,
'CONSUL_RETRY_INTERVAL': 1,
}
expect(
aumbry.load,
['consul', SampleYamlConfig, options]
).to.raise_a(LoadError)
expect(len(mock.request_history)).to.equal(2)
def save_raises_a_not_implemented_error(self):
cfg = SampleYamlConfig()
cfg.nope = 'testing'
expect(
aumbry.save,
[aumbry.CONSUL, cfg, {}]
).to.raise_a(NotImplementedError)
class VerifyLoaderHandlingEtcd2(Spec):
def can_successfully_load_yaml_from_etcd(self):
with requests_mock.Mocker() as mock:
value = base64.b64encode(raw_yaml.encode('utf-8'))
resp = {
'node': {
'value': value.decode('utf-8'),
},
}
mock.get('http://bam/v2/keys/test_key', text=json.dumps(resp))
options = {
'ETCD2_URI': 'http://bam',
'ETCD2_KEY': 'test_key',
}
cfg = aumbry.load(aumbry.ETCD2, SampleYamlConfig, options)
expect(cfg.nope).to.equal('testing')
def can_successfully_save_to_etcd(self):
with requests_mock.Mocker() as mock:
mock_save = mock.put(
'http://bam/v2/keys/test_key',
status_code=201,
text='{}'
)
cfg = SampleYamlConfig()
cfg.nope = 'testing'
aumbry.save(
aumbry.ETCD2,
cfg,
options={
'ETCD2_URI': 'http://bam',
'ETCD2_KEY': 'test_key',
}
)
body = urllib.parse.unquote(mock_save.last_request.text)
expect(body).to.equal('value=bm9wZTogdGVzdGluZwo=')
def can_successfully_update_existing_in_etcd(self):
with requests_mock.Mocker() as mock:
mock_save = mock.put(
'http://bam/v2/keys/test_key',
status_code=200,
text='{}'
)
cfg = SampleYamlConfig()
cfg.nope = 'testing'
aumbry.save(
aumbry.ETCD2,
cfg,
options={
'ETCD2_URI': 'http://bam',
'ETCD2_KEY': 'test_key',
}
)
body = urllib.parse.unquote(mock_save.last_request.text)
expect(body).to.equal('value=bm9wZTogdGVzdGluZwo=')
def handles_save_failure(self):
with requests_mock.Mocker() as mock:
mock.put(
'http://bam/v2/keys/test_key',
status_code=400,
text='{}'
)
args = [
aumbry.ETCD2,
SampleYamlConfig(),
{
'ETCD2_URI': 'http://bam',
'ETCD2_KEY': 'test_key',
}
]
expect(aumbry.save, args).to.raise_a(SaveError)
def can_handle_404_from_consul(self):
with requests_mock.Mocker() as mock:
mock.get('http://bam/v2/keys/test_key', status_code=404)
options = {
'ETCD2_URI': 'http://bam',
'ETCD2_KEY': 'test_key',
}
expect(
aumbry.load,
['etcd2', SampleYamlConfig, options]
).to.raise_a(LoadError)
def will_retry_on_other_codes(self):
with requests_mock.Mocker() as mock:
mock.get('http://bam/v2/keys/test_key', status_code=503)
options = {
'ETCD2_URI': 'http://bam',
'ETCD2_KEY': 'test_key',
'ETCD2_TIMEOUT': 1,
'ETCD2_RETRY_INTERVAL': 1,
}
expect(
aumbry.load,
['etcd2', SampleYamlConfig, options]
).to.raise_a(LoadError)
expect(len(mock.request_history)).to.equal(2)
class VerifyLoaderHandlingParameterStore(Spec):
def can_successfully_save_and_load(self):
with mock_ssm():
options = {
'PARAMETER_STORE_AWS_REGION': 'us-west-2',
'PARAMETER_STORE_PREFIX': '/aumbry-test',
}
expected_cfg = SampleGenericConfig()
expected_cfg.nope = 'testing'
expected_cfg.sample_list = ['trace']
expected_cfg.sample_dict = {'trace': 'boom'}
expected_cfg.sample_model = SampleJsonConfig()
expected_cfg.sample_model.nope = 'testing2'
# Save Sample Config
aumbry.save(
aumbry.PARAM_STORE,
expected_cfg,
options
)
# Retrieve back the config
cfg = aumbry.load(
aumbry.PARAM_STORE,
SampleGenericConfig,
options
)
expect(cfg.nope).to.equal(expected_cfg.nope)
expect(cfg.sample_dict).to.equal({'trace': 'boom'})
expect(cfg.sample_list).to.equal(expected_cfg.sample_list)
expect(cfg.sample_model.nope).to.equal(expected_cfg.sample_model.nope)
def can_use_yaml_cfg_with_handler_override(self):
with mock_ssm():
options = {
'PARAMETER_STORE_AWS_REGION': 'us-west-2',
'PARAMETER_STORE_PREFIX': '/aumbry-test',
}
expected_cfg = SampleYamlConfig()
expected_cfg.nope = 'testing'
handler = GenericHandler()
# Save Sample Config
aumbry.save(
aumbry.PARAM_STORE,
expected_cfg,
options,
handler=handler
)
# Retrieve back the config
cfg = aumbry.load(
aumbry.PARAM_STORE,
SampleGenericConfig,
options,
handler=handler
)
expect(cfg.nope).to.equal(expected_cfg.nope)
class CheckInvalidLoader(Spec):
def raises_an_error(self):
expect(aumbry.load, ['bam', None]).to.raise_a(UnknownSourceError)
expect(aumbry.save, ['bam', None]).to.raise_a(UnknownSourceError)
class CustomSourcePluginPaths(Spec):
def setting_a_valid_path(self):
search_paths = py.get_module_by_name('aumbry.sources').__path__
temp, options = write_temp_file(raw_yaml)
cfg = aumbry.load(
'file',
SampleYamlConfig,
options,
search_paths=search_paths
)
os.remove(temp.name)
expect(cfg.nope).to.equal('testing')
def empty_list_raises_unknown_source(self):
expect(
aumbry.load,
['bam', None, ['/tmp']]
).to.raise_a(UnknownSourceError)
class VerifyMergingMultipleConfigs(Spec):
def can_merge_multiple_yaml(self):
temp1, options1 = write_temp_file(partial_yaml1)
temp2, options2 = write_temp_file(partial_yaml2)
temp3, options3 = write_temp_file(partial_yaml3)
cfg = aumbry.merge(SampleExtendedYamlConfig, (
{
'source_name': 'file',
'options': options1,
},
{
'source_name': 'file',
'options': options2,
},
{
'source_name': 'file',
'options': options3,
},
))
os.remove(temp1.name)
os.remove(temp2.name)
os.remove(temp3.name)
expect(cfg.nope).to.equal('testing')
expect(cfg.sample_list).to.equal(['list2'])
expect(cfg.sample_dict).to.equal({'which': 3, 'a': 'b', 'c': 'd'})
```
#### File: spec/utils/file.py
```python
import os
import mock
import six
import tempfile
from specter import Spec, expect
from aumbry.errors import LoadError, SaveError
from aumbry.utils.file import load_file, save_file
class VerifyFileUtils(Spec):
def before_each(self):
self.cfg_file = tempfile.NamedTemporaryFile(delete=False)
with self.cfg_file as fp:
fp.write(b'bam')
def after_each(self):
os.remove(self.cfg_file.name)
def can_load_file(self):
data = load_file(self.cfg_file.name)
expect(data).to.equal(b'bam')
def bad_path_raises_error(self):
expect(load_file, ['nope']).to.raise_a(LoadError)
expect(save_file, [None, 'bam']).to.raise_a(SaveError)
def error_during_open_or_read_raises_error(self):
def magic_open(fn, mode):
raise LoadError()
open_mock = mock.MagicMock()
open_mock.side_effect = magic_open
raised_error = False
patch_name = '{}.open'.format(six.moves.builtins.__name__)
with mock.patch(patch_name, open_mock):
# Doing this manually as test suites use open()
try:
load_file(self.cfg_file.name)
except LoadError:
raised_error = True
expect(raised_error).to.be_true()
```
#### File: spec/utils/__init__.py
```python
import six
import sys
class OutputCapture(list):
def __enter__(self):
self._stdout = sys.stdout
self._stderr = sys.stderr
sys.stdout = self._stdoutio = six.StringIO()
sys.stderr = self._stderrio = six.StringIO()
return self
def __exit__(self, *args):
self.extend(self._stdoutio.getvalue().splitlines())
self.extend(self._stderrio.getvalue().splitlines())
del self._stdoutio
del self._stderrio
sys.stdout = self._stdout
sys.stderr = self._stderr
``` |
{
"source": "jimbobhickville/libcloud",
"score": 2
} |
#### File: test/loadbalancer/test_dimensiondata.py
```python
import sys
import unittest
from libcloud.utils.py3 import httplib
from libcloud.common.types import InvalidCredsError
from libcloud.common.dimensiondata import DimensionDataVIPNode, DimensionDataPool
from libcloud.common.dimensiondata import DimensionDataPoolMember
from libcloud.loadbalancer.base import LoadBalancer, Member, Algorithm
from libcloud.loadbalancer.drivers.dimensiondata \
import DimensionDataLBDriver as DimensionData
from libcloud.loadbalancer.types import State
from libcloud.test import MockHttp
from libcloud.test.file_fixtures import LoadBalancerFileFixtures
from libcloud.test.secrets import DIMENSIONDATA_PARAMS
class DimensionDataTests(unittest.TestCase):
def setUp(self):
DimensionData.connectionCls.conn_classes = (None, DimensionDataMockHttp)
DimensionDataMockHttp.type = None
self.driver = DimensionData(*DIMENSIONDATA_PARAMS)
def test_invalid_region(self):
try:
self.driver = DimensionData(*DIMENSIONDATA_PARAMS, region='blah')
except ValueError:
pass
def test_invalid_creds(self):
DimensionDataMockHttp.type = 'UNAUTHORIZED'
try:
self.driver.list_balancers()
self.assertTrue(False)
# Above command should have thrown an InvalidCredsException
except InvalidCredsError:
pass
def test_create_balancer(self):
self.driver.ex_set_current_network_domain('1234')
members = []
members.append(Member(
id=None,
ip='1.2.3.4',
port=80))
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_with_defaults(self):
self.driver.ex_set_current_network_domain('1234')
balancer = self.driver.create_balancer(
name='test',
port=None,
protocol=None,
algorithm=None,
members=None)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_no_members(self):
self.driver.ex_set_current_network_domain('1234')
members = None
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_create_balancer_empty_members(self):
self.driver.ex_set_current_network_domain('1234')
members = []
balancer = self.driver.create_balancer(
name='test',
port=80,
protocol='http',
algorithm=Algorithm.ROUND_ROBIN,
members=members)
self.assertEqual(balancer.name, 'test')
self.assertEqual(balancer.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(balancer.ip, '172.16.58.3')
self.assertEqual(balancer.port, 80)
self.assertEqual(balancer.extra['pool_id'], '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(balancer.extra['network_domain_id'], '1234')
def test_list_balancers(self):
bal = self.driver.list_balancers()
self.assertEqual(bal[0].name, 'myProduction.Virtual.Listener')
self.assertEqual(bal[0].id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal[0].port, '8899')
self.assertEqual(bal[0].ip, '172.16.58.3')
self.assertEqual(bal[0].state, State.RUNNING)
def test_balancer_list_members(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
members = self.driver.balancer_list_members(balancer)
self.assertEqual(2, len(members))
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].port, 9889)
def test_balancer_attach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id=None,
ip='192.168.3.11',
port=80,
balancer=balancer,
extra=None)
member = self.driver.balancer_attach_member(balancer, member)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
def test_balancer_detach_member(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
member = Member(
id='3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0',
ip='192.168.3.11',
port=80,
balancer=balancer,
extra=None)
result = self.driver.balancer_detach_member(balancer, member)
self.assertEqual(result, True)
def test_destroy_balancer(self):
extra = {'pool_id': '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
'network_domain_id': '1234'}
balancer = LoadBalancer(
id='234',
name='test',
state=State.RUNNING,
ip='1.2.3.4',
port=1234,
driver=self.driver,
extra=extra
)
response = self.driver.destroy_balancer(balancer)
self.assertEqual(response, True)
def test_set_get_network_domain_id(self):
self.driver.ex_set_current_network_domain('1234')
nwd = self.driver.ex_get_current_network_domain()
self.assertEqual(nwd, '1234')
def test_ex_create_pool_member(self):
pool = DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
)
node = DimensionDataVIPNode(
id='2344',
name='test',
status=State.RUNNING,
ip='192.168.3.11'
)
member = self.driver.ex_create_pool_member(
pool=pool,
node=node,
port=80
)
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.ip, '192.168.3.11')
def test_ex_create_node(self):
node = self.driver.ex_create_node(
network_domain_id='12345',
name='test',
ip='192.168.127.12',
ex_description='',
connection_limit=25000,
connection_rate_limit=2000)
self.assertEqual(node.name, 'myProductionNode.1')
self.assertEqual(node.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
def test_ex_create_pool(self, ):
pool = self.driver.ex_create_pool(
network_domain_id='1234',
name='test',
balancer_method='ROUND_ROBIN',
ex_description='test',
service_down_action='NONE',
slow_ramp_time=30)
self.assertEqual(pool.id, '9e6b496d-5261-4542-91aa-b50c7f569c54')
self.assertEqual(pool.name, 'test')
self.assertEqual(pool.status, State.RUNNING)
def test_ex_create_virtual_listener(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=80,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_ex_create_virtual_listener_unusual_port(self):
listener = self.driver.ex_create_virtual_listener(
network_domain_id='12345',
name='test',
ex_description='test',
port=8900,
pool=DimensionDataPool(
id='1234',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None
))
self.assertEqual(listener.id, '8334f461-0df0-42d5-97eb-f4678eb26bea')
self.assertEqual(listener.name, 'test')
def test_get_balancer(self):
bal = self.driver.get_balancer('6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.name, 'myProduction.Virtual.Listener')
self.assertEqual(bal.id, '6115469d-a8bb-445b-bb23-d23b5283f2b9')
self.assertEqual(bal.port, '8899')
self.assertEqual(bal.ip, '172.16.58.3')
self.assertEqual(bal.state, State.RUNNING)
def test_list_protocols(self):
protocols = self.driver.list_protocols()
self.assertNotEqual(0, len(protocols))
def test_ex_get_nodes(self):
nodes = self.driver.ex_get_nodes()
self.assertEqual(2, len(nodes))
self.assertEqual(nodes[0].name, 'ProductionNode.1')
self.assertEqual(nodes[0].id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(nodes[0].ip, '10.10.10.101')
def test_ex_get_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.name, 'ProductionNode.2')
self.assertEqual(node.id, '34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertEqual(node.ip, '10.10.10.101')
def test_ex_update_node(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
node.connection_limit = '100'
result = self.driver.ex_update_node(node)
self.assertEqual(result.connection_limit, '100')
def test_ex_destroy_node(self):
result = self.driver.ex_destroy_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
self.assertTrue(result)
def test_ex_set_node_state(self):
node = self.driver.ex_get_node('34de6ed6-46a4-4dae-a753-2f8d3840c6f9')
result = self.driver.ex_set_node_state(node, False)
self.assertEqual(result.connection_limit, '10000')
def test_ex_get_pools(self):
pools = self.driver.ex_get_pools()
self.assertNotEqual(0, len(pools))
self.assertEqual(pools[0].name, 'myDevelopmentPool.1')
self.assertEqual(pools[0].id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_get_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(pool.name, 'myDevelopmentPool.1')
self.assertEqual(pool.id, '4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
def test_ex_update_pool(self):
pool = self.driver.ex_get_pool('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
pool.slow_ramp_time = '120'
result = self.driver.ex_update_pool(pool)
self.assertTrue(result)
def test_ex_destroy_pool(self):
response = self.driver.ex_destroy_pool(
pool=DimensionDataPool(
id='4d360b1f-bc2c-4ab7-9884-1f03ba2768f7',
name='test',
description='test',
status=State.RUNNING,
health_monitor_id=None,
load_balance_method=None,
service_down_action=None,
slow_ramp_time=None))
self.assertTrue(response)
def test_get_pool_members(self):
members = self.driver.ex_get_pool_members('4d360b1f-bc2c-4ab7-9884-1f03ba2768f7')
self.assertEqual(2, len(members))
self.assertEqual(members[0].id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(members[0].name, '10.0.3.13')
self.assertEqual(members[0].status, 'NORMAL')
self.assertEqual(members[0].ip, '10.0.3.13')
self.assertEqual(members[0].port, 9889)
self.assertEqual(members[0].node_id, '3c207269-e75e-11e4-811f-005056806999')
def test_get_pool_member(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.id, '3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
self.assertEqual(member.name, '10.0.3.13')
self.assertEqual(member.status, 'NORMAL')
self.assertEqual(member.ip, '10.0.3.13')
self.assertEqual(member.port, 9889)
def test_set_pool_member_state(self):
member = self.driver.ex_get_pool_member('3dd806a2-c2c8-4c0c-9a4f-5219ea9266c0')
result = self.driver.ex_set_pool_member_state(member, True)
self.assertTrue(result)
def test_ex_destroy_pool_member(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='3c207269-e75e-11e4-811f-005056806999'),
destroy_node=False)
self.assertTrue(response)
def test_ex_destroy_pool_member_with_node(self):
response = self.driver.ex_destroy_pool_member(
member=DimensionDataPoolMember(
id='',
name='test',
status=State.RUNNING,
ip='1.2.3.4',
port=80,
node_id='34de6ed6-46a4-4dae-a753-2f8d3840c6f9'),
destroy_node=True)
self.assertTrue(response)
def test_ex_get_default_health_monitors(self):
monitors = self.driver.ex_get_default_health_monitors(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(monitors), 6)
self.assertEqual(monitors[0].id, '01683574-d487-11e4-811f-005056806999')
self.assertEqual(monitors[0].name, 'CCDEFAULT.Http')
self.assertFalse(monitors[0].node_compatible)
self.assertTrue(monitors[0].pool_compatible)
def test_ex_get_default_persistence_profiles(self):
profiles = self.driver.ex_get_default_persistence_profiles(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(profiles), 4)
self.assertEqual(profiles[0].id, 'a34ca024-f3db-11e4-b010-005056806999')
self.assertEqual(profiles[0].name, 'CCDEFAULT.Cookie')
self.assertEqual(profiles[0].fallback_compatible, False)
self.assertEqual(len(profiles[0].compatible_listeners), 1)
self.assertEqual(profiles[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
def test_ex_get_default_irules(self):
irules = self.driver.ex_get_default_irules(
'4d360b1f-bc2c-4ab7-9884-1f03ba2768f7'
)
self.assertEqual(len(irules), 4)
self.assertEqual(irules[0].id, '2b20cb2c-ffdc-11e4-b010-005056806999')
self.assertEqual(irules[0].name, 'CCDEFAULT.HttpsRedirect')
self.assertEqual(len(irules[0].compatible_listeners), 1)
self.assertEqual(irules[0].compatible_listeners[0].type, 'PERFORMANCE_LAYER_4')
class DimensionDataMockHttp(MockHttp):
fixtures = LoadBalancerFileFixtures('dimensiondata')
def _oec_0_9_myaccount_UNAUTHORIZED(self, method, url, body, headers):
return (httplib.UNAUTHORIZED, "", {}, httplib.responses[httplib.UNAUTHORIZED])
def _oec_0_9_myaccount(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _oec_0_9_myaccount_INPROGRESS(self, method, url, body, headers):
body = self.fixtures.load('oec_0_9_myaccount.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_virtualListener_6115469d_a8bb_445b_bb23_d23b5283f2b9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_pool_4d360b1f_bc2c_4ab7_9884_1f03ba2768f7.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_poolMember_3dd806a2_c2c8_4c0c_9a4f_5219ea9266c0.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createPool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createNode(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_addPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_addPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_createVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_removePoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_removePoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteVirtualListener(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteVirtualListener.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deletePool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deletePool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteNode(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_deleteNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_node_34de6ed6_46a4_4dae_a753_2f8d3840c6f9.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editNode(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editNode.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPool(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPool.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPoolMember(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_editPoolMember.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultHealthMonitor(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultHealthMonitor.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultPersistenceProfile(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultPersistenceProfile.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
def _caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultIrule(self, method, url, body, headers):
body = self.fixtures.load(
'caas_2_1_8a8f6abc_2745_4d8a_9cbc_8dabe5a7d0e4_networkDomainVip_defaultIrule.xml')
return (httplib.OK, body, {}, httplib.responses[httplib.OK])
if __name__ == '__main__':
sys.exit(unittest.main())
``` |
{
"source": "jimbobhickville/py-stellar-base",
"score": 3
} |
#### File: py-stellar-base/stellar_base/address.py
```python
import requests
from .horizon import Horizon
from .keypair import Keypair
from .exceptions import AccountNotExistError, NotValidParamError
from .horizon import HORIZON_LIVE, HORIZON_TEST
class Address(object):
"""The :class:`Address` object, which represents an address (public key) on
Stellar's network.
An :class:`Address` is initialized via a public key string, or derived via
a secret seed. The network on which the account exists is also specified,
as it is used to verify and set attributes via connecting to Horizon. It
mostly exists as a helper class for Horizon operations on a given account
ID.
:param str address: The address string that represents this
:class:`Address`.
:param str secret: The secret seed string that is used to derive the
address for this :class:`Address`.
:param str network: The network to connect to for verifying and retrieving
additional attributes from. Must be either 'PUBLIC' or 'TESTNET'.
:param Horizon horizon: The :class:`Horizon` instance to use for
connecting to for additional information for the account to which this
address corresponds to.
"""
# TODO: Make network an enum
def __init__(
self, address=None, secret=None, network='TESTNET', horizon=None):
if address is None and secret is None:
# FIXME: Throw a better exception
raise Exception('oops,need a stellar address or secret')
if address is None and secret is not None:
self.address = Keypair.from_seed(secret).address().decode()
else:
self.address = address
self.secret = secret
if network.upper() != 'PUBLIC':
self.network = 'TESTNET'
else:
self.network = 'PUBLIC'
if horizon:
if isinstance(horizon, Horizon):
self.horizon = horizon
else:
self.horizon = Horizon(horizon)
elif network.upper() == 'PUBLIC':
self.horizon = Horizon(HORIZON_LIVE)
else:
self.horizon = Horizon(HORIZON_TEST)
self.sequence = None
self.balances = None
self.paging_token = None
self.thresholds = None
self.flags = None
self.signers = None
self.data = None
def get(self):
"""Retrieve the account data that corresponds to this :class:`Address`.
Retrieve the account data from Horizon for the account that corresponds
to this :class:`Address`. Attempt to retrieve the following attributes
from Horizon:
* Sequence Number
* Balances
* Paging Token
* Thresholds
* Flags
* Signers
* Data
:raises AccountNotExistError: If the account does not exist, shown by a
404 response from a Horizon server.
:raises Exception: If any other problems come up, or if a network
connection happens.
"""
try:
acc = self.horizon.account(self.address)
if acc.get('sequence'):
self.sequence = acc.get('sequence')
self.balances = acc.get('balances')
self.paging_token = acc.get('paging_token')
self.thresholds = acc.get('thresholds')
self.flags = acc.get('flags')
self.signers = acc.get('signers')
self.data = acc.get('data')
elif acc.get('status') == 404:
raise AccountNotExistError(acc.get('title'))
else:
# FIXME: Throw a more specific exception.
raise Exception(acc.get('detail'))
except requests.ConnectionError:
raise Exception('network problem')
def payments(self, sse=False, **kwargs):
"""Retrieve the payments JSON from this instance's Horizon server.
Retrieve the payments JSON response for the account associated with
this :class:`Address`.
:param bool sse: Use the SSE client for connecting to Horizon.
"""
check_params(kwargs)
return self.horizon.account_payments(
self.address, params=kwargs, sse=sse)
def offers(self, **kwargs):
"""Retrieve the offers JSON from this instance's Horizon server.
Retrieve the offers JSON response for the account associated with
this :class:`Address`.
:param bool sse: Use the SSE client for connecting to Horizon.
"""
check_params(kwargs)
return self.horizon.account_offers(self.address, params=kwargs)
def transactions(self, sse=False, **kwargs):
"""Retrieve the transactions JSON from this instance's Horizon server.
Retrieve the transactions JSON response for the account associated with
this :class:`Address`.
:param bool sse: Use the SSE client for connecting to Horizon.
"""
check_params(kwargs)
return self.horizon.account_transactions(
self.address, params=kwargs, sse=sse)
def operations(self, sse=False, **kwargs):
"""Retrieve the operations JSON from this instance's Horizon server.
Retrieve the operations JSON response for the account associated with
this :class:`Address`.
:param bool sse: Use the SSE client for connecting to Horizon.
"""
check_params(kwargs)
return self.horizon.account_operations(
self.address, params=kwargs, sse=sse)
def effects(self, sse=False, **kwargs):
"""Retrieve the effects JSON from this instance's Horizon server.
Retrieve the effects JSON response for the account associated with
this :class:`Address`.
:param bool sse: Use the SSE client for connecting to Horizon.
"""
check_params(kwargs)
return self.horizon.account_effects(
self.address, params=kwargs, sse=sse)
# TODO: Make this a private method of the Address class.
def check_params(data):
"""Check for appropriate keywords for a Horizon request method.
Check a dict of arguments to make sure that they only contain allowable
params for requests to Horizon, such as 'cursor', 'limit', and 'order'.
"""
params_allowed = {'cursor', 'limit', 'order'}
params = set(data.keys())
if params - params_allowed:
raise NotValidParamError('not valid params')
```
#### File: py-stellar-base/stellar_base/horizon.py
```python
import requests
try:
from sseclient import SSEClient
except ImportError:
SSEClient = None
try:
# Python 3
from urllib.parse import urlencode
except ImportError:
# Python 2
from urllib import urlencode
from .exceptions import HorizonError
HORIZON_LIVE = "https://horizon.stellar.org"
HORIZON_TEST = "https://horizon-testnet.stellar.org"
class Horizon(object):
def __init__(self, horizon=None, sse=False, timeout=20):
"""The :class:`Horizon` object, which represents the interface for
making requests to a Horizon server instance.
This class aims to be up to date with Horizon's API endpoints; however,
you can utilize the internal session via ``self.session`` (which is a
:class:`requests.Session` object) to make arbitrary requests to
a Horizon instance's API.
In general, on HTTP errors (non 2XX/3XX responses), no exception is
raised, and the return dictionary must be checked to see if it is an
error or a valid response. Any other errors however are raised by this
class.
:param str horizon: The horizon base URL
:param bool sse: Default to using server side events for streaming
responses when available.
:param int timeout: The timeout for all requests.
"""
if sse and SSEClient is None:
raise ValueError('SSE not supported, missing sseclient module')
if horizon is None:
self.horizon = HORIZON_TEST
else:
self.horizon = horizon
self.session = requests.Session()
self.sse = sse
self.timeout = timeout
def _request(self, verb, endpoint, **kwargs):
url = '{base}{endpoint}'.format(base=self.horizon, endpoint=endpoint)
if kwargs.get('sse', False):
if 'params' in kwargs and kwargs['params']:
url = '{}?{}'.format(url, urlencode(kwargs['params']))
messages = SSEClient(url)
return messages
else:
try:
# FIXME: We should really consider raising the HTTPError when
# it happens and wrapping its JSON response in a HorizonError
resp = self.session.request(
verb, url, timeout=self.timeout, **kwargs)
return resp.json()
except requests.RequestException:
raise HorizonError(
'Could not successfully make a request to Horizon.')
def _get(self, endpoint, **kwargs):
# If sse has been passed in by an endpoint (meaning it supports sse)
# but it hasn't been explicitly been set by the request, default to the
# this instance's setting on SSE requests.
if 'sse' in kwargs and kwargs['sse'] is None:
kwargs['sse'] = self.sse
return self._request('GET', endpoint, **kwargs)
def _post(self, endpoint, **kwargs):
return self._request('POST', endpoint, **kwargs)
def submit(self, te, **kwargs):
"""Submit a transaction to Horizon.
`POST /transactions
<https://www.stellar.org/developers/horizon/reference/endpoints/transactions-create.html>`_
Uses form-encoded data to send over to Horizon.
:param bytes te: The transaction envelope to submit
:return: The JSON response indicating the success/failure of the
submitted transaction.
:rtype: dict
"""
payload = {'tx': te}
return self._post('/transactions', data=payload, **kwargs)
def account(self, address, **kwargs):
"""Returns information and links relating to a single account.
`GET /accounts/{account}
<https://www.stellar.org/developers/horizon/reference/endpoints/accounts-single.html>`_
:param str address: The account ID to retrieve details about
:return: The account details in a JSON response
:rtype: dict
"""
endpoint = '/accounts/{account_id}'.format(account_id=address)
return self._get(endpoint, **kwargs)
def account_data(self, account_id, data_key, **kwargs):
"""This endpoint represents a single data associated with a given
account.
`GET /accounts/{account}/data/{key}
<https://www.stellar.org/developers/horizon/reference/endpoints/data-for-account.html>`_
:param str account_id: The account ID to look up a data item from
:param str data_key: The name of the key for the data item in question
:return: The value of the data field for the given account and data
key
:rtype: dict
"""
endpoint = '/accounts/{account_id}/data/{data_key}'.format(
account_id=account_id, data_key=data_key)
return self._get(endpoint, **kwargs)
def account_effects(self, address, params=None, sse=None, **kwargs):
"""This endpoint represents all effects that changed a given account.
`GET /accounts/{account}/effects{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-account.html>`_
:param str address: The account ID to look up effects for.
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:param bool sse: Use server side events for streaming responses
:return: The list of effects in a JSON response.
:rtype: dict
"""
endpoint = '/accounts/{account_id}/effects'.format(account_id=address)
return self._get(endpoint, params=params, **kwargs)
def account_offers(self, address, params=None, **kwargs):
"""This endpoint represents all the offers a particular account makes.
`GET /accounts/{account}/offers{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/offers-for-account.html>`_
:param str address: The account ID to retrieve offers from
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: The list of offers for an account in a JSON response.
:rtype: dict
"""
endpoint = '/accounts/{account_id}/offers'.format(account_id=address)
return self._get(endpoint, params=params, **kwargs)
def account_operations(self, address, params=None, sse=None, **kwargs):
"""This endpoint represents all operations that were included in valid
transactions that affected a particular account.
`GET /accounts/{account}/operations{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/operations-for-account.html>`_
:param str address: The account ID to list operations on
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:param bool sse: Use server side events for streaming responses
:return: The list of operations for an account in a JSON response.
:rtype: dict
"""
endpoint = '/accounts/{account_id}/operations'.format(
account_id=address)
return self._get(endpoint, params=params, sse=sse, **kwargs)
def account_transactions(self, address, params=None, sse=None, **kwargs):
"""This endpoint represents all transactions that affected a given
account.
`GET /accounts/{account_id}/transactions{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/transactions-for-account.html>`_
:param str address: The account ID to list transactions from
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: The list of transactions for an account in a JSON response.
:rtype: dict
"""
endpoint = '/accounts/{account_id}/transactions'.format(
account_id=address)
return self._get(endpoint, params=params, sse=sse, **kwargs)
def account_payments(self, address, params=None, sse=None, **kwargs):
"""This endpoint responds with a collection of Payment operations where
the given account was either the sender or receiver.
`GET /accounts/{id}/payments{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/payments-for-account.html>`_
:param str address: The account ID to list payments to/from
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:param bool sse: Use server side events for streaming responses
:return: The list of payments for an account in a JSON response.
:rtype: dict
"""
endpoint = '/accounts/{account_id}/payments'.format(
account_id=address)
return self._get(endpoint, params=params, sse=sse, **kwargs)
def assets(self, params=None, **kwargs):
"""This endpoint represents all assets. It will give you all the assets
in the system along with various statistics about each.
See the documentation below for details on query parameters that are
available.
`GET /assets{?asset_code,asset_issuer,cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/assets-all.html>`_
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: A list of all valid payment operations
:rtype: dict
"""
endpoint = '/assets'
return self._get(endpoint, params=params, **kwargs)
def transactions(self, params=None, sse=None, **kwargs):
"""This endpoint represents all validated transactions.
`GET /transactions{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/transactions-all.html>`_
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:param bool sse: Use server side events for streaming responses
:return: The list of all transactions
:rtype: dict
"""
endpoint = '/transactions'
return self._get(endpoint, params=params, sse=sse, **kwargs)
def transaction(self, tx_hash, **kwargs):
"""The transaction details endpoint provides information on a single
transaction.
`GET /transactions/{hash}
<https://www.stellar.org/developers/horizon/reference/endpoints/transactions-single.html>`_
:param str tx_hash: The hex-encoded transaction hash
:return: A single transaction's details
:rtype: dict
"""
endpoint = '/transactions/{tx_hash}'.format(tx_hash=tx_hash)
return self._get(endpoint, **kwargs)
def transaction_operations(self, tx_hash, params=None, **kwargs):
"""This endpoint represents all operations that are part of a given
transaction.
`GET /transactions/{hash}/operations{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/operations-for-transaction.html>`_
:param str tx_hash: The hex-encoded transaction hash
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: A single transaction's operations
:rtype: dict
"""
endpoint = '/transactions/{tx_hash}/operations'.format(
tx_hash=tx_hash)
return self._get(endpoint, params=params, **kwargs)
def transaction_effects(self, tx_hash, params=None, **kwargs):
"""This endpoint represents all effects that occurred as a result of a
given transaction.
`GET /transactions/{hash}/effects{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-transaction.html>`_
:param str tx_hash: The hex-encoded transaction hash
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: A single transaction's effects
:rtype: dict
"""
endpoint = '/transactions/{tx_hash}/effects'.format(
tx_hash=tx_hash)
return self._get(endpoint, params=params, **kwargs)
def transaction_payments(self, tx_hash, params=None, **kwargs):
"""This endpoint represents all payment operations that are part of a
given transaction.
`GET /transactions/{hash}/payments{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/payments-for-transaction.html>`_
:param str tx_hash: The hex-encoded transaction hash
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: A single transaction's payment operations
:rtype: dict
"""
endpoint = '/transactions/{tx_hash}/payments'.format(
tx_hash=tx_hash)
return self._get(endpoint, params=params, **kwargs)
def order_book(self, params=None, **kwargs):
"""Return, for each orderbook, a summary of the orderbook and the bids
and asks associated with that orderbook.
See the external docs below for information on the arguments required.
`GET /order_book
<https://www.stellar.org/developers/horizon/reference/endpoints/orderbook-details.html>`_
:param dict params: The query parameters to pass to this request.
:return: A list of orderbook summaries as a JSON object.
:rtype: dict
"""
endpoint = '/order_book'
return self._get(endpoint, params=params, **kwargs)
def ledgers(self, params=None, sse=None, **kwargs):
"""This endpoint represents all ledgers.
`GET /ledgers{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/ledgers-all.html>`_
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: All ledgers on the network.
:rtype: dict
"""
endpoint = '/ledgers'
return self._get(endpoint, params=params, sse=sse, **kwargs)
def ledger(self, ledger_id, **kwargs):
"""The ledger details endpoint provides information on a single ledger.
`GET /ledgers/{sequence}
<https://www.stellar.org/developers/horizon/reference/endpoints/ledgers-single.html>`_
:param int ledger_id: The id of the ledger to look up
:return: The details of a single ledger
:rtype: dict
"""
endpoint = '/ledgers/{ledger_id}'.format(ledger_id=ledger_id)
return self._get(endpoint, **kwargs)
def ledger_effects(self, ledger_id, params=None, **kwargs):
"""This endpoint represents all effects that occurred in the given
ledger.
`GET /ledgers/{id}/effects{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-ledger.html>`_
:param int ledger_id: The id of the ledger to look up
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: The effects for a single ledger
:rtype: dict
"""
endpoint = '/ledgers/{ledger_id}/effects'.format(ledger_id=ledger_id)
return self._get(endpoint, params=params, **kwargs)
def ledger_operations(self, ledger_id, params=None, **kwargs):
"""This endpoint returns all operations that occurred in a given
ledger.
`GET /ledgers/{id}/operations{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/operations-for-ledger.html>`_
:param int ledger_id: The id of the ledger to look up
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: The operations contained in a single ledger
:rtype: dict
"""
endpoint = '/ledgers/{ledger_id}/operations'.format(
ledger_id=ledger_id)
return self._get(endpoint, params=params, **kwargs)
def ledger_payments(self, ledger_id, params=None, **kwargs):
"""This endpoint represents all payment operations that are part of a
valid transactions in a given ledger.
`GET /ledgers/{id}/payments{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/payments-for-ledger.html>`_
:param int ledger_id: The id of the ledger to look up
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: The payments contained in a single ledger
:rtype: dict
"""
endpoint = '/ledgers/{ledger_id}/payments'.format(ledger_id=ledger_id)
return self._get(endpoint, params=params, **kwargs)
def ledger_transactions(self, ledger_id, params=None, **kwargs):
"""This endpoint represents all transactions in a given ledger.
`GET /ledgers/{id}/transactions{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/transactions-for-ledger.html>`_
:param int ledger_id: The id of the ledger to look up.
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: The transactions contained in a single ledger
:rtype: dict
"""
endpoint = '/ledgers/{ledger_id}/transactions'.format(
ledger_id=ledger_id)
return self._get(endpoint, params=params, **kwargs)
def effects(self, params=None, sse=None, **kwargs):
"""This endpoint represents all effects.
`GET /effects{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/effects-all.html>`_
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:param bool sse: Use server side events for streaming responses
:return: A list of all effects
:rtype: dict
"""
endpoint = '/effects'
return self._get(endpoint, params=params, sse=sse, **kwargs)
def operations(self, params=None, sse=None, **kwargs):
"""This endpoint represents all operations that are part of validated
transactions.
`GET /operations{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/operations-all.html>`_
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:param bool sse: Use server side events for streaming responses
:return: A list of all operations
:rtype: dict
"""
endpoint = '/operations'
return self._get(endpoint, params=params, sse=sse, **kwargs)
def operation(self, op_id, **kwargs):
"""The operation details endpoint provides information on a single
operation.
`GET /operations/{id}
<https://www.stellar.org/developers/horizon/reference/endpoints/operations-single.html>`_
:param id op_id: The operation ID to get details on.
:return: Details on a single operation
:rtype: dict
"""
endpoint = '/operations/{op_id}'.format(op_id=op_id)
return self._get(endpoint, **kwargs)
def operation_effects(self, op_id, params=None, **kwargs):
"""This endpoint represents all effects that occurred as a result of a
given operation.
`GET /operations/{id}/effects{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/effects-for-operation.html>`_
:param int op_id: The operation ID to get effects on.
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:return: A list of effects on the given operation
:rtype: dict
"""
endpoint = '/operations/{op_id}/effects'.format(op_id=op_id)
return self._get(endpoint, params=params, **kwargs)
def payments(self, params=None, sse=None, **kwargs):
"""This endpoint represents all payment operations that are part of
validated transactions.
`GET /payments{?cursor,limit,order}
<https://www.stellar.org/developers/horizon/reference/endpoints/payments-all.html>`_
:param dict params: The query parameters to pass to this request, such
as cursor, order, and limit.
:param bool sse: Use server side events for streaming responses
:return: A list of all valid payment operations
:rtype: dict
"""
endpoint = '/payments'
return self._get(endpoint, params=params, sse=sse, **kwargs)
def paths(self, params=None, **kwargs):
"""Load a list of assets available to the source account id and find
any payment paths from those source assets to the desired
destination asset.
See the below docs for more information on required and optional
parameters for further specifying your search.
`GET /paths
<https://www.stellar.org/developers/horizon/reference/endpoints/path-finding.html>`_
:param dict params: The query parameters to pass to this request, such
as source_account, destination_account, destination_asset_type,
etc.
:return: A list of paths that can be used to complete a payment based
on a given query.
:rtype: dict
"""
endpoint = '/paths'
return self._get(endpoint, params=params, **kwargs)
def trades(self, params=None, **kwargs):
"""Load a list of trades, optionally filtered by an orderbook.
See the below docs for more information on required and optional
parameters for further specifying your search.
`GET /trades
<https://www.stellar.org/developers/horizon/reference/endpoints/trades.html>`_
:param dict params: The query parameters to pass to this request, such
as base_asset_type, counter_asset_type, cursor, order, limit, etc.
:return: A list of trades filtered by a given query
:rtype: dict
"""
endpoint = '/trades'
return self._get(endpoint, params=params, **kwargs)
def trade_aggregations(self, params=None, **kwargs):
"""Load a list of aggregated historical trade data, optionally filtered
by an orderbook.
`GET /trade_aggregations
<https://www.stellar.org/developers/horizon/reference/endpoints/trade_aggregations.html>`_
:param dict params: The query parameters to pass to this request, such
as start_time, end_time, base_asset_type, counter_asset_type,
order, limit, etc.
:return: A list of collected trade aggregations
:rtype: dict
"""
endpoint = '/trade_aggregations'
return self._get(endpoint, params=params, **kwargs)
def horizon_testnet():
"""Create a Horizon instance utilizing SDF's Test Network."""
return Horizon(HORIZON_TEST)
def horizon_livenet():
"""Create a Horizon instance utilizing SDF's Live Network."""
return Horizon(HORIZON_LIVE)
```
#### File: py-stellar-base/stellar_base/transaction_envelope.py
```python
import base64
from .keypair import Keypair
from .network import Network, NETWORKS
from .stellarxdr import Xdr
from .transaction import Transaction
from .utils import hashX_sign_decorated, xdr_hash
from .exceptions import SignatureExistError, PreimageLengthError
class TransactionEnvelope(object):
"""The :class:`TransactionEnvelope` object, which represents a transaction
envelope ready to sign and submit to send over the network.
When a transaction is ready to be prepared for sending over the network, it
must be put into a :class:`TransactionEnvelope`, which includes additional
metadata such as the signers for a given transaction. Ultimately, this
class handles signing and conversion to and from XDR for usage on Stellar's
network.
:param tx: The transaction that is encapsulated in this envelope.
:type tx: :class:`Transaction <stellar_base.transaction.Transaction>`
:param dict opts: Additional options, such as:
- opts.signatures, which contains a list of signatures that have
already been created.
- opts.network_id, which contains the network ID for which network this
transaction envelope is associated with.
"""
def __init__(self, tx, opts=None):
self.tx = tx
try:
self.signatures = opts.get('signatures') or []
except AttributeError:
self.signatures = []
if 'network_id' in opts:
if opts['network_id'] in NETWORKS:
passphrase = NETWORKS[opts['network_id']]
else:
passphrase = opts['network_id']
else:
passphrase = NETWORKS['TEST<PASSWORD>']
self.network_id = Network(passphrase).network_id()
def sign(self, keypair):
"""Sign this transaction envelope with a given keypair.
Note that the signature must not already be in this instance's list of
signatures.
:param keypair: The keypair to use for signing this transaction
envelope.
:type keypair: :class:`Keypair <stellar_base.keypair.Keypair>`
:raises: :exc:`SignatureExistError
<stellar_base.utils.SignatureExistError>`
"""
assert isinstance(keypair, Keypair)
tx_hash = self.hash_meta()
sig = keypair.sign_decorated(tx_hash)
sig_dict = [signature.__dict__ for signature in self.signatures]
if sig.__dict__ in sig_dict:
raise SignatureExistError('already signed')
else:
self.signatures.append(sig)
def sign_hashX(self, preimage):
"""Sign this transaction envelope with a Hash(X) signature.
See Stellar's documentation on `Multi-Sig
<https://www.stellar.org/developers/guides/concepts/multi-sig.html>`_
for more details on Hash(x) signatures.
:param str preimage: The "x" value to be hashed and used as a
signature.
"""
if len(preimage) > 64:
raise PreimageLengthError('preimage must <= 64 bytes')
sig = hashX_sign_decorated(preimage)
sig_dict = [signature.__dict__ for signature in self.signatures]
if sig.__dict__ in sig_dict:
raise SignatureExistError('already signed')
else:
self.signatures.append(sig)
def hash_meta(self):
"""Get the XDR Hash of the signature base.
This hash is ultimately what is signed before transactions are sent
over the network. See :meth:`signature_base` for more details about
this process.
:return: The XDR Hash of this transaction envelope's signature base.
"""
return xdr_hash(self.signature_base())
def signature_base(self):
"""Get the signature base of this transaction envelope.
Return the "signature base" of this transaction, which is the value
that, when hashed, should be signed to create a signature that
validators on the Stellar Network will accept.
It is composed of a 4 prefix bytes followed by the xdr-encoded form of
this transaction.
:return: The signature base of this transaction envelope.
"""
network_id = self.network_id
tx_type = Xdr.StellarXDRPacker()
tx_type.pack_EnvelopeType(Xdr.const.ENVELOPE_TYPE_TX)
tx_type = tx_type.get_buffer()
tx = Xdr.StellarXDRPacker()
tx.pack_Transaction(self.tx.to_xdr_object())
tx = tx.get_buffer()
return network_id + tx_type + tx
def to_xdr_object(self):
"""Get an XDR object representation of this
:class:`TransactionEnvelope`.
"""
tx = self.tx.to_xdr_object()
return Xdr.types.TransactionEnvelope(tx, self.signatures)
def xdr(self):
"""Get the base64 encoded XDR string representing this
:class:`TransactionEnvelope`.
"""
te = Xdr.StellarXDRPacker()
te.pack_TransactionEnvelope(self.to_xdr_object())
te = base64.b64encode(te.get_buffer())
return te
# FIXME: can not get network id from XDR , default is 'TESTNET'
@classmethod
def from_xdr(cls, xdr):
"""Create a new :class:`TransactionEnvelope` from an XDR string.
:param bytes xdr: The XDR string that represents a transaction
envelope.
"""
xdr_decoded = base64.b64decode(xdr)
te = Xdr.StellarXDRUnpacker(xdr_decoded)
te_xdr_object = te.unpack_TransactionEnvelope()
signatures = te_xdr_object.signatures
tx_xdr_object = te_xdr_object.tx
tx = Transaction.from_xdr_object(tx_xdr_object)
te = TransactionEnvelope(tx, {'signatures': signatures})
# te = TransactionEnvelope(
# tx, {'signatures': signatures, 'network_id': 'PUBLIC'})
return te
```
#### File: py-stellar-base/tests/test_builder.py
```python
import json
import requests
from stellar_base.builder import Builder, HORIZON_TEST
from stellar_base.keypair import Keypair
from stellar_base.exceptions import SignatureExistError
# TODO: These endpoints really need to be mocked out.
class TestBuilder(object):
def __init__(self):
self.cold = Keypair.random()
self.hot = Keypair.random()
# self.hot = Keypair.from_seed('SECRET')
# self.cold = Keypair.from_seed('SECRET')
def test_builder(self):
cold_account = self.cold.address().decode()
hot_account = self.hot.address().decode()
fund(cold_account)
cold = Builder(self.cold.seed().decode()) \
.append_create_account_op(hot_account, 200) \
.append_set_options_op(inflation_dest=cold_account, set_flags=1,
home_domain='256kw.com', master_weight=10,
low_threshold=5, ) \
.append_trust_op(cold_account, 'BEER', 1000, source=hot_account) \
.append_allow_trust_op(hot_account, 'BEER', True)
# append twice for test
cold.append_payment_op(hot_account, 50.123, 'BEER', cold_account) \
.append_payment_op(hot_account, 50.123, 'BEER', cold_account)
cold.sign(self.hot.seed().decode())
try: # sign twice
cold.sign(self.hot.seed().decode())
except SignatureExistError:
assert True
except:
assert False
cold.sign()
assert len(cold.te.signatures) == 2
assert len(cold.ops) == 5
try:
response = cold.submit()
print(response)
except:
assert False
def test_builder_xdr(self):
cold_account = self.cold.address().decode()
hot_account = self.hot.address().decode()
fund(cold_account)
fund(hot_account)
cold = Builder(self.cold.seed().decode()) \
.append_trust_op(cold_account, 'BEER', 1000, hot_account) \
.append_payment_op(hot_account, 100, 'BEER', cold_account) \
.append_payment_op(cold_account, 2.222,
'BEER', cold_account, hot_account)
xdr = cold.gen_xdr()
hot = Builder(self.hot.seed().decode())
hot.import_from_xdr(xdr)
# hot.sign()
try:
response = hot.submit()
except:
assert False
def fund(address):
for attempt in range(3):
r = requests.get('https://friendbot.stellar.org/?addr=' +
address) # Get 10000 lumens
t = r.text
try:
assert 'hash' in json.loads(
t) or 'op_already_exists' in json.loads(t)
return True
except AssertionError:
pass
raise Exception("fund failed")
```
#### File: py-stellar-base/tests/test_keypair.py
```python
import unittest
from stellar_base.keypair import Keypair
class TestKeypair(unittest.TestCase):
def test_sep0005(self):
# https://github.com/stellar/stellar-protocol/blob/master/ecosystem/sep-0005.md
mnemonic = 'illness spike retreat truth genius clock brain pass fit cave bargain toe'
seed = Keypair.deterministic(mnemonic).seed().decode()
seed_expect = 'SBGWSG6BTNCKCOB3DIFBGCVMUPQFYPA2G4O34RMTB343OYPXU5DJDVMN'
self.assertEqual(seed, seed_expect)
address = Keypair.deterministic(mnemonic, index=6).address().decode()
address_expect = 'GBY27SJVFEWR3DUACNBSMJB6T4ZPR4C7ZXSTHT6GMZUDL23LAM5S2PQX'
self.assertEqual(address, address_expect)
mnemonic = 'cable spray genius state float twenty onion head street palace net private method loan turn phrase state blanket interest dry amazing dress blast tube'
seed = Keypair.deterministic(mnemonic, passphrase='<PASSWORD>').seed().decode()
seed_expect = 'SAFWTGXVS7ELMNCXELFWCFZOPMHUZ5LXNBGUVRCY3FHLFPXK4QPXYP2X'
self.assertEqual(seed, seed_expect)
address = Keypair.deterministic(mnemonic, passphrase='<PASSWORD>', index=9).address().decode()
address_expect = 'GBOSMFQYKWFDHJWCMCZSMGUMWCZOM4KFMXXS64INDHVCJ2A2JAABCYRR'
self.assertEqual(address, address_expect)
mnemonic = 'abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon abandon about'
seed = Keypair.deterministic(mnemonic).seed().decode()
seed_expect = 'SBUV3MRWKNS6AYKZ6E6MOUVF2OYMON3MIUASWL3JLY5E3ISDJFELYBRZ'
self.assertEqual(seed, seed_expect)
address = Keypair.deterministic(mnemonic, index=8).address().decode()
address_expect = 'GABTYCZJMCP55SS6I46SR76IHETZDLG4L37MLZRZKQDGBLS5RMP65TSX'
self.assertEqual(address, address_expect)
``` |
{
"source": "jimbobhickville/sqlalchemy-jsonapi",
"score": 3
} |
#### File: sqlalchemy_jsonapi/declarative/serializer.py
```python
import datetime
from inflection import dasherize, underscore
class JSONAPISerializer(object):
"""A JSON API serializer that serializes SQLAlchemy models."""
model = None
primary_key = 'id'
fields = []
dasherize = True
def __init__(self):
"""Ensure required members are not defaults."""
if self.model is None:
raise TypeError("Model cannot be of type 'None'.")
if self.primary_key not in self.fields:
raise ValueError(
"Serializer fields must contain primary key '{}'".format(
self.primary_key))
def serialize(self, resources):
"""Serialize resource(s) according to json-api spec."""
serialized = {
'meta': {
'sqlalchemy_jsonapi_version': '4.0.9'
},
'jsonapi': {
'version': '1.0'
}
}
# Determine multiple resources by checking for SQLAlchemy query count.
if hasattr(resources, 'count'):
serialized['data'] = []
for resource in resources:
serialized['data'].append(
self._render_resource(resource))
else:
serialized['data'] = self._render_resource(resources)
return serialized
def _render_resource(self, resource):
"""Renders a resource's top level members based on json-api spec.
Top level members include:
'id', 'type', 'attributes', 'relationships'
"""
if not resource:
return None
# Must not render a resource that has same named
# attributes as different model.
if not isinstance(resource, self.model):
raise TypeError(
'Resource(s) type must be the same as the serializer model type.')
top_level_members = {}
try:
top_level_members['id'] = str(getattr(resource, self.primary_key))
except AttributeError:
raise
top_level_members['type'] = resource.__tablename__
top_level_members['attributes'] = self._render_attributes(resource)
top_level_members['relationships'] = self._render_relationships(
resource)
return top_level_members
def _render_attributes(self, resource):
"""Render the resources's attributes."""
attributes = {}
attrs_to_ignore = set()
for key, relationship in resource.__mapper__.relationships.items():
attrs_to_ignore.update(set(
[column.name for column in relationship.local_columns]).union(
{key}))
if self.dasherize:
mapped_fields = {x: dasherize(underscore(x)) for x in self.fields}
else:
mapped_fields = {x: x for x in self.fields}
for attribute in self.fields:
if attribute == self.primary_key:
continue
# Per json-api spec, we cannot render foreign keys
# or relationsips in attributes.
if attribute in attrs_to_ignore:
raise AttributeError
try:
value = getattr(resource, attribute)
if isinstance(value, datetime.datetime):
attributes[mapped_fields[attribute]] = value.isoformat()
else:
attributes[mapped_fields[attribute]] = value
except AttributeError:
raise
return attributes
def _render_relationships(self, resource):
"""Render the resource's relationships."""
relationships = {}
related_models = resource.__mapper__.relationships.keys()
primary_key_val = getattr(resource, self.primary_key)
if self.dasherize:
mapped_relationships = {
x: dasherize(underscore(x)) for x in related_models}
else:
mapped_relationships = {x: x for x in related_models}
for model in related_models:
relationships[mapped_relationships[model]] = {
'links': {
'self': '/{}/{}/relationships/{}'.format(
resource.__tablename__,
primary_key_val,
mapped_relationships[model]),
'related': '/{}/{}/{}'.format(
resource.__tablename__,
primary_key_val,
mapped_relationships[model])
}
}
return relationships
```
#### File: sqlalchemy_jsonapi/tests/test_relationship_delete.py
```python
import json
from uuid import uuid4
from sqlalchemy_jsonapi.errors import (
PermissionDeniedError, RelationshipNotFoundError, ResourceNotFoundError,
MissingContentTypeError, ValidationError)
def test_200_on_deletion_from_to_many(comment, client):
payload = {'data': [{'type': 'blog-comments', 'id': str(comment.id)}]}
response = client.delete(
'/api/blog-posts/{}/relationships/comments/'.format(
comment.post.id),
data=json.dumps(payload),
content_type='application/vnd.api+json').validate(200)
for item in response.json_data['data']:
assert {'id', 'type'} == set(item.keys())
assert payload['data'][0]['id'] not in [str(x['id'])
for x in response.json_data['data']
]
def test_404_on_resource_not_found(client):
client.delete(
'/api/blog-posts/{}/relationships/comments/'.format(uuid4()),
data='{}', content_type='application/vnd.api+json').validate(
404, ResourceNotFoundError)
def test_404_on_relationship_not_found(post, client):
client.delete(
'/api/blog-posts/{}/relationships/comment/'.format(post.id),
data='{}', content_type='application/vnd.api+json').validate(
404, RelationshipNotFoundError)
def test_403_on_permission_denied(user, client):
client.delete(
'/api/users/{}/relationships/logs/'.format(user.id),
data='{"data": []}',
content_type='application/vnd.api+json').validate(
403, PermissionDeniedError)
def test_409_on_to_one_provided(post, client):
client.delete(
'/api/blog-posts/{}/relationships/author/'.format(post.id),
data='{"data": {}}',
content_type='application/vnd.api+json').validate(
409, ValidationError)
def test_409_missing_content_type_header(post, client):
client.delete(
'/api/blog-posts/{}/relationships/comment/'.format(post.id),
data='{}').validate(409, MissingContentTypeError)
```
#### File: sqlalchemy_jsonapi/tests/test_serializer.py
```python
from app import api
import uuid
def test_include_different_types_same_id(session, comment):
new_id = uuid.uuid4()
comment.post.id = new_id
comment.author.id = new_id
comment.post_id = new_id
comment.author_id = new_id
session.commit()
r = api.serializer.get_resource(
session, {'include': 'post,author'}, 'blog-comments', comment.id)
assert len(r.data['included']) == 2
```
#### File: sqlalchemy_jsonapi/unittests/models.py
```python
from sqlalchemy import Column, String, Integer, Text, ForeignKey
from sqlalchemy.ext.declarative import declarative_base
from sqlalchemy.orm import backref, relationship, validates
from sqlalchemy_jsonapi import (
Permissions, permission_test, ALL_PERMISSIONS,
JSONAPI, AttributeActions, attr_descriptor
)
Base = declarative_base()
class User(Base):
"""Simple user model."""
__tablename__ = 'users'
id = Column(Integer, primary_key=True)
first = Column(String(50), nullable=False)
last = Column(String(50), nullable=False)
username = Column(String(50), unique=True, nullable=False)
password = Column(String(50), nullable=False)
@permission_test(Permissions.VIEW, 'password')
def view_password(self):
"""Password shall never be seen in a view."""
return False
@validates('password', 'username', 'first', 'last')
def empty_attributes_not_allowed(self, key, value):
assert value, 'Empty value not allowed for {0}'.format(key)
return value
# For demonstration purposes, we want to store
# the first name as SET-ATTR:first in database.
@attr_descriptor(AttributeActions.SET, 'first')
def set_first_to_start_with_set_attr(self, new_first):
self.first = 'SET-ATTR:{0}'.format(new_first)
# For demonstration purposes, we don't want to show
# how we stored first internally in database.
@attr_descriptor(AttributeActions.GET, 'first')
def get_first_starts_with_get_attr(self):
if 'SET-ATTR:' in self.first:
return self.first[9::]
return self.first
class Post(Base):
"""A blog post model."""
__tablename__ = 'posts'
id = Column(Integer, primary_key=True)
title = Column(String(100), nullable=False)
content = Column(Text, nullable=False)
author_id = Column(Integer, ForeignKey('users.id', ondelete='CASCADE'))
author = relationship('User',
lazy='joined',
backref=backref('posts',
lazy='dynamic',
cascade='all,delete'))
class Comment(Base):
"""Comment for each Post."""
__tablename__ = 'comments'
id = Column(Integer, primary_key=True)
post_id = Column(Integer, ForeignKey('posts.id', ondelete='CASCADE'))
author_id = Column(Integer, ForeignKey('users.id'), nullable=False)
content = Column(Text, nullable=False)
post = relationship('Post',
lazy='joined',
backref=backref('comments',
lazy='dynamic', cascade='all,delete'))
author = relationship('User',
lazy='joined',
backref=backref('comments',
lazy='dynamic'))
class Log(Base):
"""Log information model."""
__tablename__ = 'logs'
id = Column(Integer, primary_key=True)
user_id = Column(Integer, ForeignKey('users.id'))
user = relationship(
'User', lazy='joined', backref=backref('logs', lazy='dynamic'))
@permission_test(ALL_PERMISSIONS)
def block_interactive(cls):
"""Unable to Create, Edit, or Delete a log."""
return False
serializer = JSONAPI(Base)
```
#### File: sqlalchemy_jsonapi/unittests/test_errors_user_error.py
```python
import json
import unittest
from sqlalchemy_jsonapi import errors
from sqlalchemy_jsonapi import __version__
class TestUserError(unittest.TestCase):
"""Tests for errors.user_error."""
def test_user_error(self):
"""Create user error succesfully."""
status_code = 400
title = 'User Error Occured'
detail = 'Testing user error'
pointer = '/test'
actual = errors.user_error(
status_code, title, detail, pointer)
data = {
'errors': [{
'status': status_code,
'source': {'pointer': '{0}'.format(pointer)},
'title': title,
'detail': detail,
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
expected = json.dumps(data), status_code
self.assertEqual(expected, actual)
```
#### File: sqlalchemy_jsonapi/unittests/test_serializer_get_collection.py
```python
from sqlalchemy_jsonapi import errors
from sqlalchemy_jsonapi.unittests.utils import testcases
from sqlalchemy_jsonapi.unittests import models
from sqlalchemy_jsonapi import __version__
class GetCollection(testcases.SqlalchemyJsonapiTestCase):
"""Tests for serializer.get_collection."""
def test_get_collection_response_with_no_query_args(self):
"""Get collection with no query params returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {}, 'comments')
expected = {
'data': [{
'attributes': {
'content': 'This is a comment'
},
'type': 'comments',
'relationships': {
'author': {
'links': {
'related': '/comments/1/author',
'self': '/comments/1/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/1/post',
'self': '/comments/1/relationships/post'
}
}
},
'id': 1
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
@testcases.fragile
def test_get_collection_response_with_single_include_model(self):
"""Get collection with single included model returns 200.
This test is fragile.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is a comment', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'include': 'author'}, 'comments')
expected = {
'data': [{
'type': 'comments',
'id': 1,
'relationships': {
'author': {
'data': {
'type': 'users',
'id': 1
},
'links': {
'self': '/comments/1/relationships/author',
'related': '/comments/1/author'
}
},
'post': {
'links': {
'self': '/comments/1/relationships/post',
'related': '/comments/1/post'
}
}
},
'attributes': {
'content': u'This is a comment'
}
}],
'included': [{
'type': 'users',
'id': 1,
'relationships': {
'posts': {
'links': {
'self': '/users/1/relationships/posts',
'related': '/users/1/posts'
}
},
'comments': {
'links': {
'self': '/users/1/relationships/comments',
'related': '/users/1/comments'
}
},
'logs': {
'links': {
'self': '/users/1/relationships/logs',
'related': '/users/1/logs'
}
}
},
'attributes': {
'username': u'SallySmith1',
'last': u'Smith',
'first': u'Sally'
}
}],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'jsonapi': {
'version': '1.0'
},
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_get_collection_asc_sorted_response(self):
"""Get collection with ascending sorted response returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in reversed(range(2)):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': 'content'}, 'comments')
expected = {
'data': [{
'relationships': {
'author': {
'links': {
'related': '/comments/2/author',
'self': '/comments/2/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/2/post',
'self': '/comments/2/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 1'
},
'id': 2
}, {
'relationships': {
'author': {
'links': {
'related': '/comments/1/author',
'self': '/comments/1/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/1/post',
'self': '/comments/1/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 2'
},
'id': 1
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_desc_sorted_response(self):
"""Get collection with descending sorted response returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(2):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': '-content'}, 'comments')
expected = {
'data': [{
'relationships': {
'author': {
'links': {
'related': '/comments/2/author',
'self': '/comments/2/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/2/post',
'self': '/comments/2/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 2'
},
'id': 2
}, {
'relationships': {
'author': {
'links': {
'related': '/comments/1/author',
'self': '/comments/1/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/1/post',
'self': '/comments/1/relationships/post'
}
}
},
'type': 'comments',
'attributes': {
'content': u'This is comment 1'
},
'id': 1
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_response_with_relationship_for_sorting(self):
"""Get collection with relationship for sorting results in 409.
A NotSortableError is returned.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='Thfsessis Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': 'author'}, 'posts')
self.assertEquals(409, response.status_code)
def test_get_collection_response_given_invalid_sort_field(self):
"""Get collection given an invalid sort field results in 409.
A NotSortableError is returned.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'sort': 'invalid_field'}, 'posts')
expected = 'The requested field posts on type invalid_field is not a sortable field.'
self.assertEquals(expected, response.detail)
self.assertEquals(409, response.status_code)
def test_get_collection_access_denied(self):
"""Get collection with access denied results in 200.
The response data should be empty list.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
log = models.Log(user=user, user_id=user.id)
self.session.add(log)
self.session.commit()
response = models.serializer.get_collection(self.session, {}, 'logs')
expected = {
'data': [],
'included': [],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'jsonapi': {
'version': '1.0'
}
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_paginated_response_by_page(self):
"""Get collection with pagination by page returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(20):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session,
{'page[number]': u'1', 'page[size]': u'2'}, 'comments')
expected = {
'data': [{
'id': 3,
'attributes': {
'content': u'This is comment 3'
},
'type': 'comments',
'relationships': {
'author': {
'links': {
'self': '/comments/3/relationships/author',
'related': '/comments/3/author'
}
},
'post': {
'links': {
'self': '/comments/3/relationships/post',
'related': '/comments/3/post'
}
}
}
}, {
'id': 4,
'attributes': {
'content': u'This is comment 4'
},
'type': 'comments',
'relationships': {
'author': {
'links': {
'self': '/comments/4/relationships/author',
'related': '/comments/4/author'
}
},
'post': {
'links': {
'self': '/comments/4/relationships/post',
'related': '/comments/4/post'
}
}
}
}],
'included': [],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'jsonapi': {
'version': '1.0'
}
}
actual = response.data
self.assertEquals(expected, actual)
self.assertEquals(200, response.status_code)
def test_get_collection_with_single_field(self):
"""Get collection with specific field returns 200.
The response will only contain attributes specific in field dictionary.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
log = models.Log(user_id=user.id, user=user)
self.session.add(log)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'fields[users]': 'first'}, 'users')
expected = {
'data': [{
'relationships': {},
'id': 1,
'type': 'users',
'attributes': {
'first': u'Sally'
}
}],
'included': [],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
@testcases.fragile
def test_get_collection_when_including_model_and_its_attribute(self):
"""Get collection when including the model and its attribute returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is comment 1', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'include': 'post.author'}, 'comments')
expected = {
'included': [{
'id': 1,
'type': 'users',
'relationships': {
'posts': {
'links': {
'self': '/users/1/relationships/posts',
'related': '/users/1/posts'
}
},
'comments': {
'links': {
'self': '/users/1/relationships/comments',
'related': '/users/1/comments'
}
},
'logs': {
'links': {
'self': '/users/1/relationships/logs',
'related': '/users/1/logs'
}
}
},
'attributes': {
'username': u'SallySmith1',
'first': u'Sally',
'last': u'Smith'
}
}, {
'id': 1,
'type': 'posts',
'relationships': {
'author': {
'data': {
'id': 1,
'type': 'users'
},
'links': {
'self': '/posts/1/relationships/author',
'related': '/posts/1/author'
}
},
'comments': {
'links': {
'self': '/posts/1/relationships/comments',
'related': '/posts/1/comments'
}
}
},
'attributes': {
'content': u'This is the content',
'title': u'This Is A Title'
}
}],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'data': [{
'id': 1,
'type': 'comments',
'relationships': {
'post': {
'data': {
'id': 1,
'type': 'posts'
},
'links': {
'self': '/comments/1/relationships/post',
'related': '/comments/1/post'
}
},
'author': {
'links': {
'self': '/comments/1/relationships/author',
'related': '/comments/1/author'
}
}
},
'attributes': {
'content': u'This is comment 1'
}
}],
'jsonapi': {
'version': '1.0'
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
@testcases.fragile
def test_get_collection_given_an_included_model_that_is_null(self):
"""Get collection when given a included model that is null returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content')
self.session.add(blog_post)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'include': 'author'}, 'posts')
expected = {
'jsonapi': {
'version': '1.0'
},
'data': [{
'id': 1,
'type': 'posts',
'attributes': {
'title': u'This Is A Title',
'content': u'This is the content'
},
'relationships': {
'author': {
'links': {
'related': '/posts/1/author',
'self': '/posts/1/relationships/author'
},
'data': None
},
'comments': {
'links': {
'related': '/posts/1/comments',
'self': '/posts/1/relationships/comments'
}
}
}
}],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': []
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
@testcases.fragile
def test_get_collection_with_multiple_included_models(self):
"""Get collection with multiple included models returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is comment 1', author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session, {'include': 'comments,author'}, 'posts')
expected = {
'data': [{
'type': 'posts',
'id': 1,
'relationships': {
'comments': {
'data': [{
'type': 'comments',
'id': 1
}],
'links': {
'related': '/posts/1/comments',
'self': '/posts/1/relationships/comments'
}
},
'author': {
'data': {
'type': 'users',
'id': 1
},
'links': {
'related': '/posts/1/author',
'self': '/posts/1/relationships/author'
}
}
},
'attributes': {
'title': u'This Is A Title',
'content': u'This is the content'
}
}],
'jsonapi': {
'version': '1.0'
},
'included': [{
'type': 'users',
'id': 1,
'relationships': {
'comments': {
'links': {
'related': '/users/1/comments',
'self': '/users/1/relationships/comments'
}
},
'logs': {
'links': {
'related': '/users/1/logs',
'self': '/users/1/relationships/logs'
}
},
'posts': {
'links': {
'related': '/users/1/posts',
'self': '/users/1/relationships/posts'
}
}
},
'attributes': {
'last': u'Smith',
'first': u'Sally',
'username': u'SallySmith1'
}
}, {
'type': 'comments',
'id': 1,
'relationships': {
'author': {
'links': {
'related': '/comments/1/author',
'self': '/comments/1/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/1/post',
'self': '/comments/1/relationships/post'
}
}
},
'attributes': {
'content': u'This is comment 1'
}
}],
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_get_collection_given_pagination_with_offset(self):
"""Get collection given pagination with offset 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(10):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session,
{'page[offset]': u'5', 'page[limit]': u'2'}, 'comments')
expected = {
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'included': [],
'data': [{
'relationships': {
'author': {
'links': {
'related': '/comments/6/author',
'self': '/comments/6/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/6/post',
'self': '/comments/6/relationships/post'
}
}
},
'attributes': {
'content': u'This is comment 6'
},
'id': 6,
'type': 'comments'
}, {
'relationships': {
'author': {
'links': {
'related': '/comments/7/author',
'self': '/comments/7/relationships/author'
}
},
'post': {
'links': {
'related': '/comments/7/post',
'self': '/comments/7/relationships/post'
}
}
},
'attributes': {
'content': u'This is comment 7'
},
'id': 7,
'type': 'comments'
}]
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_get_collection_given_invalid_size_for_pagination(self):
"""Get collection given invalid size for pagination returns 400.
A BadRequestError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(10):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.get_collection(
self.session,
{'page[number]': u'foo', 'page[size]': u'2'}, 'comments')
expected_detail = 'Page query parameters must be integers'
self.assertEqual(error.exception.detail, expected_detail)
self.assertEqual(error.exception.status_code, 400)
def test_get_collection_given_invalid_limit_for_pagination(self):
"""Get collection given invalid limit for pagination returns 400.
A BadRequestError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(10):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.get_collection(
self.session,
{'page[offset]': u'5', 'page[limit]': u'foo'}, 'comments')
expected_detail = 'Page query parameters must be integers'
self.assertEqual(error.exception.detail, expected_detail)
self.assertEqual(error.exception.status_code, 400)
def test_get_collection_when_pagnation_is_out_of_range(self):
"""Get collection when pagination is out of range returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
for x in range(10):
comment = models.Comment(
content='This is comment {0}'.format(x+1), author_id=user.id,
post_id=blog_post.id, author=user, post=blog_post)
self.session.add(comment)
self.session.commit()
response = models.serializer.get_collection(
self.session,
{'page[offset]': u'999999', 'page[limit]': u'2'}, 'comments')
expected = {
'data': [],
'included': [],
'meta': {
'sqlalchemy_jsonapi_version': __version__
},
'jsonapi': {
'version': '1.0'
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
```
#### File: sqlalchemy_jsonapi/unittests/test_serializer_post_relationship.py
```python
from sqlalchemy_jsonapi import errors
from sqlalchemy_jsonapi.unittests.utils import testcases
from sqlalchemy_jsonapi.unittests import models
from sqlalchemy_jsonapi import __version__
class PostRelationship(testcases.SqlalchemyJsonapiTestCase):
"""Tests for serializer.post_relationship."""
def test_post_relationship_on_to_many_success(self):
"""Post relationship creates a relationship on many resources."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment_one = models.Comment(
content='This is the first comment',
author_id=user.id, author=user)
self.session.add(comment_one)
comment_two = models.Comment(
content='This is the second comment',
author_id=user.id, author=user)
self.session.add(comment_two)
self.session.commit()
payload = {
'data': [{
'type': 'comments',
'id': comment_one.id
}, {
'type': 'comments',
'id': comment_two.id
}]
}
models.serializer.post_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
self.assertEqual(comment_one.post.id, blog_post.id)
self.assertEqual(comment_one.post, blog_post)
self.assertEqual(comment_two.post.id, blog_post.id)
self.assertEqual(comment_two.post.id, blog_post.id)
def test_post_relationship_on_to_many_response(self):
"""Post relationship creates a relationship on many resources returns 200."""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment_one = models.Comment(
content='This is the first comment',
author_id=user.id, author=user)
self.session.add(comment_one)
comment_two = models.Comment(
content='This is the second comment',
author_id=user.id, author=user)
self.session.add(comment_two)
self.session.commit()
payload = {
'data': [{
'type': 'comments',
'id': comment_one.id
}, {
'type': 'comments',
'id': comment_two.id
}]
}
response = models.serializer.post_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
expected = {
'data': [{
'type': 'comments',
'id': comment_one.id
}, {
'type': 'comments',
'id': comment_two.id
}],
'jsonapi': {
'version': '1.0'
},
'meta': {
'sqlalchemy_jsonapi_version': __version__
}
}
actual = response.data
self.assertEqual(expected, actual)
self.assertEqual(200, response.status_code)
def test_post_relationship_with_hash_instead_of_array(self):
"""Post relalationship with a hash instead of an array returns 409.
A ValidationError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is the first comment',
author_id=user.id, author=user)
self.session.add(comment)
self.session.commit()
payload = {
'data': {
'type': 'comments',
'id': comment.id
}
}
with self.assertRaises(errors.ValidationError) as error:
models.serializer.post_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
expected_detail = '/data must be an array'
self.assertEqual(error.exception.detail, expected_detail)
self.assertEqual(error.exception.status_code, 409)
def test_post_relationship_with_incompatible_data_model(self):
"""Post relationship with incompatible data model returns 409.
The model type in the payload must match the relationship type.
A ValidationError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is the first comment',
author_id=user.id, author=user)
self.session.add(comment)
self.session.commit()
payload = {
'data': [{
'type': 'users',
'id': user.id
}]
}
with self.assertRaises(errors.ValidationError) as error:
models.serializer.post_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
expected_detail = 'Incompatible type provided'
self.assertEqual(error.exception.detail, expected_detail)
self.assertEqual(error.exception.status_code, 409)
def test_post_relationship_with_to_one_relationship(self):
"""Post relationship with to one relationship returns 409.
Cannot post to a to-one relationship.
A ValidationError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is the first comment',
author_id=user.id, author=user)
self.session.add(comment)
self.session.commit()
with self.assertRaises(errors.ValidationError) as error:
models.serializer.post_relationship(
self.session, {}, 'comments', comment.id, 'author')
expected_detail = 'Cannot post to to-one relationship'
self.assertEqual(error.exception.detail, expected_detail)
self.assertEqual(error.exception.status_code, 409)
def test_post_relationship_with_unknown_relationship(self):
"""Post relationship with unknown relationship results in a 404.
A RelationshipNotFoundError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is the first comment',
author_id=user.id, author=user)
self.session.add(comment)
self.session.commit()
with self.assertRaises(errors.RelationshipNotFoundError) as error:
models.serializer.post_relationship(
self.session, {}, 'posts',
blog_post.id, 'unknown-relationship')
self.assertEqual(error.exception.status_code, 404)
def test_post_relationship_with_extra_data_keys(self):
"""Post relationship with data keys other than 'id' and 'type' results in 404.
A BadRequestError is raised.
"""
user = models.User(
first='Sally', last='Smith',
password='password', username='SallySmith1')
self.session.add(user)
blog_post = models.Post(
title='This Is A Title', content='This is the content',
author_id=user.id, author=user)
self.session.add(blog_post)
comment = models.Comment(
content='This is the first comment',
author_id=user.id, author=user)
self.session.add(comment)
self.session.commit()
payload = {
'data': [{
'type': 'comments',
'id': comment.id,
'extra-key': 'foo'
}]
}
with self.assertRaises(errors.BadRequestError) as error:
models.serializer.post_relationship(
self.session, payload, 'posts', blog_post.id, 'comments')
expected_detail = 'comments must have type and id keys'
self.assertEqual(error.exception.detail, expected_detail)
self.assertEqual(error.exception.status_code, 400)
```
#### File: unittests/utils/testcases.py
```python
import unittest
import nose
from functools import wraps
from sqlalchemy.orm import sessionmaker
from sqlalchemy import create_engine
from sqlalchemy_jsonapi.unittests.models import Base
def fragile(func):
"""The fragile decorator raises SkipTest if test fails.
Use @fragile for tests that intermittenly fail.
"""
@wraps(func)
def wrapper(self):
try:
return func(self)
except AssertionError:
raise nose.SkipTest()
return wrapper
class SqlalchemyJsonapiTestCase(unittest.TestCase):
"""Base testcase for SQLAclehmy-related tests."""
def setUp(self, *args, **kwargs):
"""Configure sqlalchemy and session."""
super(SqlalchemyJsonapiTestCase, self).setUp(*args, **kwargs)
self.engine = create_engine('sqlite://')
Session = sessionmaker(bind=self.engine)
self.session = Session()
Base.metadata.create_all(self.engine)
def tearDown(self, *args, **kwargs):
"""Reset the sqlalchemy engine."""
super(SqlalchemyJsonapiTestCase, self).tearDown(*args, **kwargs)
Base.metadata.drop_all(self.engine)
``` |
{
"source": "jimbobhickville/tooz",
"score": 2
} |
#### File: tooz/tooz/coordination.py
```python
import abc
import collections
from concurrent import futures
import enum
import logging
import threading
from oslo_utils import encodeutils
from oslo_utils import netutils
from oslo_utils import timeutils
import six
from stevedore import driver
import tenacity
import tooz
from tooz import _retry
from tooz import partitioner
from tooz import utils
LOG = logging.getLogger(__name__)
TOOZ_BACKENDS_NAMESPACE = "tooz.backends"
class Characteristics(enum.Enum):
"""Attempts to describe the characteristic that a driver supports."""
DISTRIBUTED_ACROSS_THREADS = 'DISTRIBUTED_ACROSS_THREADS'
"""Coordinator components when used by multiple **threads** work
the same as if those components were only used by a single thread."""
DISTRIBUTED_ACROSS_PROCESSES = 'DISTRIBUTED_ACROSS_PROCESSES'
"""Coordinator components when used by multiple **processes** work
the same as if those components were only used by a single thread."""
DISTRIBUTED_ACROSS_HOSTS = 'DISTRIBUTED_ACROSS_HOSTS'
"""Coordinator components when used by multiple **hosts** work
the same as if those components were only used by a single thread."""
NON_TIMEOUT_BASED = 'NON_TIMEOUT_BASED'
"""The driver has the following property:
* Its operations are not based on the timeout of other clients, but on some
other more robust mechanisms.
"""
LINEARIZABLE = 'LINEARIZABLE'
"""The driver has the following properties:
* Ensures each operation must take place before its
completion time.
* Any operation invoked subsequently must take place
after the invocation and by extension, after the original operation
itself.
"""
SEQUENTIAL = 'SEQUENTIAL'
"""The driver has the following properties:
* Operations can take effect before or after completion – but all
operations retain the constraint that operations from any given process
must take place in that processes order.
"""
CAUSAL = 'CAUSAL'
"""The driver has the following properties:
* Does **not** have to enforce the order of every
operation from a process, perhaps, only causally related operations
must occur in order.
"""
SERIALIZABLE = 'SERIALIZABLE'
"""The driver has the following properties:
* The history of **all** operations is equivalent to
one that took place in some single atomic order but with unknown
invocation and completion times - it places no bounds on
time or order.
"""
SAME_VIEW_UNDER_PARTITIONS = 'SAME_VIEW_UNDER_PARTITIONS'
"""When a client is connected to a server and that server is partitioned
from a group of other servers it will (somehow) have the same view of
data as a client connected to a server on the other side of the
partition (typically this is accomplished by write availability being
lost and therefore nothing can change).
"""
SAME_VIEW_ACROSS_CLIENTS = 'SAME_VIEW_ACROSS_CLIENTS'
"""A client connected to one server will *always* have the same view
every other client will have (no matter what server those other
clients are connected to). Typically this is a sacrifice in
write availability because before a write can be acknowledged it must
be acknowledged by *all* servers in a cluster (so that all clients
that are connected to those servers read the exact *same* thing).
"""
class Hooks(list):
def run(self, *args, **kwargs):
return list(map(lambda cb: cb(*args, **kwargs), self))
class Event(object):
"""Base class for events."""
class MemberJoinedGroup(Event):
"""A member joined a group event."""
def __init__(self, group_id, member_id):
self.group_id = group_id
self.member_id = member_id
def __repr__(self):
return "<%s: group %s: +member %s>" % (self.__class__.__name__,
self.group_id,
self.member_id)
class MemberLeftGroup(Event):
"""A member left a group event."""
def __init__(self, group_id, member_id):
self.group_id = group_id
self.member_id = member_id
def __repr__(self):
return "<%s: group %s: -member %s>" % (self.__class__.__name__,
self.group_id,
self.member_id)
class LeaderElected(Event):
"""A leader as been elected."""
def __init__(self, group_id, member_id):
self.group_id = group_id
self.member_id = member_id
class Heart(object):
"""Coordination drivers main liveness pump (its heart)."""
def __init__(self, driver, thread_cls=threading.Thread,
event_cls=threading.Event):
self._thread_cls = thread_cls
self._dead = event_cls()
self._runner = None
self._driver = driver
self._beats = 0
@property
def beats(self):
"""How many times the heart has beaten."""
return self._beats
def is_alive(self):
"""Returns if the heart is beating."""
return not (self._runner is None
or not self._runner.is_alive())
def _beat_forever_until_stopped(self):
"""Inner beating loop."""
retry = tenacity.Retrying(
wait=tenacity.wait_fixed(1),
before_sleep=tenacity.before_sleep_log(LOG, logging.warning),
)
while not self._dead.is_set():
with timeutils.StopWatch() as w:
wait_until_next_beat = retry(self._driver.heartbeat)
ran_for = w.elapsed()
has_to_sleep_for = wait_until_next_beat - ran_for
if has_to_sleep_for < 0:
LOG.warning(
"Heartbeating took too long to execute (it ran for"
" %0.2f seconds which is %0.2f seconds longer than"
" the next heartbeat idle time). This may cause"
" timeouts (in locks, leadership, ...) to"
" happen (which will not end well).", ran_for,
ran_for - wait_until_next_beat)
self._beats += 1
# NOTE(harlowja): use the event object for waiting and
# not a sleep function since doing that will allow this code
# to terminate early if stopped via the stop() method vs
# having to wait until the sleep function returns.
# NOTE(jd): Wait for only the half time of what we should.
# This is a measure of safety, better be too soon than too late.
self._dead.wait(has_to_sleep_for / 2.0)
def start(self, thread_cls=None):
"""Starts the heart beating thread (noop if already started)."""
if not self.is_alive():
self._dead.clear()
self._beats = 0
if thread_cls is None:
thread_cls = self._thread_cls
self._runner = thread_cls(target=self._beat_forever_until_stopped)
self._runner.daemon = True
self._runner.start()
def stop(self):
"""Requests the heart beating thread to stop beating."""
self._dead.set()
def wait(self, timeout=None):
"""Wait up to given timeout for the heart beating thread to stop."""
self._runner.join(timeout)
return self._runner.is_alive()
class CoordinationDriver(object):
requires_beating = False
"""
Usage requirement that if true requires that the :py:meth:`~.heartbeat`
be called periodically (at a given rate) to avoid locks, sessions and
other from being automatically closed/discarded by the coordinators
backing store.
"""
CHARACTERISTICS = ()
"""
Tuple of :py:class:`~tooz.coordination.Characteristics` introspectable
enum member(s) that can be used to interogate how this driver works.
"""
def __init__(self, member_id, parsed_url, options):
super(CoordinationDriver, self).__init__()
self._member_id = member_id
self._started = False
self._hooks_join_group = collections.defaultdict(Hooks)
self._hooks_leave_group = collections.defaultdict(Hooks)
self._hooks_elected_leader = collections.defaultdict(Hooks)
self.requires_beating = (
CoordinationDriver.heartbeat != self.__class__.heartbeat
)
self.heart = Heart(self)
def _has_hooks_for_group(self, group_id):
return (group_id in self._hooks_join_group or
group_id in self._hooks_leave_group)
def join_partitioned_group(
self, group_id,
weight=1,
partitions=partitioner.Partitioner.DEFAULT_PARTITION_NUMBER):
"""Join a group and get a partitioner.
A partitioner allows to distribute a bunch of objects across several
members using a consistent hash ring. Each object gets assigned (at
least) one member responsible for it. It's then possible to check which
object is owned by any member of the group.
This method also creates if necessary, and joins the group with the
selected weight.
:param group_id: The group to create a partitioner for.
:param weight: The weight to use in the hashring for this node.
:param partitions: The number of partitions to create.
:return: A :py:class:`~tooz.partitioner.Partitioner` object.
"""
self.join_group_create(group_id, capabilities={'weight': weight})
return partitioner.Partitioner(self, group_id, partitions=partitions)
def leave_partitioned_group(self, partitioner):
"""Leave a partitioned group.
This leaves the partitioned group and stop the partitioner.
:param group_id: The group to create a partitioner for.
"""
leave = self.leave_group(partitioner.group_id)
partitioner.stop()
return leave.get()
@staticmethod
def run_watchers(timeout=None):
"""Run the watchers callback.
This may also activate :py:meth:`.run_elect_coordinator` (depending
on driver implementation).
"""
raise tooz.NotImplemented
@staticmethod
def run_elect_coordinator():
"""Try to leader elect this coordinator & activate hooks on success."""
raise tooz.NotImplemented
def watch_join_group(self, group_id, callback):
"""Call a function when group_id sees a new member joined.
The callback functions will be executed when `run_watchers` is
called.
:param group_id: The group id to watch
:param callback: The function to execute when a member joins this group
"""
self._hooks_join_group[group_id].append(callback)
def unwatch_join_group(self, group_id, callback):
"""Stop executing a function when a group_id sees a new member joined.
:param group_id: The group id to unwatch
:param callback: The function that was executed when a member joined
this group
"""
try:
# Check if group_id is in hooks to avoid creating a default empty
# entry in hooks list.
if group_id not in self._hooks_join_group:
raise ValueError
self._hooks_join_group[group_id].remove(callback)
except ValueError:
raise WatchCallbackNotFound(group_id, callback)
if not self._hooks_join_group[group_id]:
del self._hooks_join_group[group_id]
def watch_leave_group(self, group_id, callback):
"""Call a function when group_id sees a new member leaving.
The callback functions will be executed when `run_watchers` is
called.
:param group_id: The group id to watch
:param callback: The function to execute when a member leaves this
group
"""
self._hooks_leave_group[group_id].append(callback)
def unwatch_leave_group(self, group_id, callback):
"""Stop executing a function when a group_id sees a new member leaving.
:param group_id: The group id to unwatch
:param callback: The function that was executed when a member left
this group
"""
try:
# Check if group_id is in hooks to avoid creating a default empty
# entry in hooks list.
if group_id not in self._hooks_leave_group:
raise ValueError
self._hooks_leave_group[group_id].remove(callback)
except ValueError:
raise WatchCallbackNotFound(group_id, callback)
if not self._hooks_leave_group[group_id]:
del self._hooks_leave_group[group_id]
def watch_elected_as_leader(self, group_id, callback):
"""Call a function when member gets elected as leader.
The callback functions will be executed when `run_watchers` is
called.
:param group_id: The group id to watch
:param callback: The function to execute when a member leaves this
group
"""
self._hooks_elected_leader[group_id].append(callback)
def unwatch_elected_as_leader(self, group_id, callback):
"""Call a function when member gets elected as leader.
The callback functions will be executed when `run_watchers` is
called.
:param group_id: The group id to watch
:param callback: The function to execute when a member leaves this
group
"""
try:
self._hooks_elected_leader[group_id].remove(callback)
except ValueError:
raise WatchCallbackNotFound(group_id, callback)
if not self._hooks_elected_leader[group_id]:
del self._hooks_elected_leader[group_id]
@staticmethod
def stand_down_group_leader(group_id):
"""Stand down as the group leader if we are.
:param group_id: The group where we don't want to be a leader anymore
"""
raise tooz.NotImplemented
@property
def is_started(self):
return self._started
def start(self, start_heart=False):
"""Start the service engine.
If needed, the establishment of a connection to the servers
is initiated.
"""
if self._started:
raise tooz.ToozError(
"Can not start a driver which has not been stopped")
self._start()
if self.requires_beating and start_heart:
self.heart.start()
self._started = True
# Tracks which group are joined
self._joined_groups = set()
def _start(self):
pass
def stop(self):
"""Stop the service engine.
If needed, the connection to servers is closed and the client will
disappear from all joined groups.
"""
if not self._started:
raise tooz.ToozError(
"Can not stop a driver which has not been started")
if self.heart.is_alive():
self.heart.stop()
self.heart.wait()
# Some of the drivers modify joined_groups when being called to leave
# so clone it so that we aren't modifying something while iterating.
joined_groups = self._joined_groups.copy()
leaving = [self.leave_group(group) for group in joined_groups]
for fut in leaving:
try:
fut.get()
except tooz.ToozError:
# Whatever happens, ignore. Maybe we got booted out/never
# existed in the first place, or something is down, but we just
# want to call _stop after whatever happens to not leak any
# connection.
pass
self._stop()
self._started = False
def _stop(self):
pass
@staticmethod
def create_group(group_id):
"""Request the creation of a group asynchronously.
:param group_id: the id of the group to create
:type group_id: ascii bytes
:returns: None
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def get_groups():
"""Return the list composed by all groups ids asynchronously.
:returns: the list of all created group ids
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def join_group(group_id, capabilities=b""):
"""Join a group and establish group membership asynchronously.
:param group_id: the id of the group to join
:type group_id: ascii bytes
:param capabilities: the capabilities of the joined member
:type capabilities: object
:returns: None
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@_retry.retry()
def join_group_create(self, group_id, capabilities=b""):
"""Join a group and create it if necessary.
If the group cannot be joined because it does not exist, it is created
before being joined.
This function will keep retrying until it can create the group and join
it. Since nothing is transactional, it may have to retry several times
if another member is creating/deleting the group at the same time.
:param group_id: Identifier of the group to join and create
:param capabilities: the capabilities of the joined member
"""
req = self.join_group(group_id, capabilities)
try:
req.get()
except GroupNotCreated:
req = self.create_group(group_id)
try:
req.get()
except GroupAlreadyExist:
# The group might have been created in the meantime, ignore
pass
# Now retry to join the group
raise _retry.TryAgain
@staticmethod
def leave_group(group_id):
"""Leave a group asynchronously.
:param group_id: the id of the group to leave
:type group_id: ascii bytes
:returns: None
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def delete_group(group_id):
"""Delete a group asynchronously.
:param group_id: the id of the group to leave
:type group_id: ascii bytes
:returns: Result
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def get_members(group_id):
"""Return the set of all members ids of the specified group.
:returns: set of all created group ids
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def get_member_capabilities(group_id, member_id):
"""Return the capabilities of a member asynchronously.
:param group_id: the id of the group of the member
:type group_id: ascii bytes
:param member_id: the id of the member
:type member_id: ascii bytes
:returns: capabilities of a member
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def get_member_info(group_id, member_id):
"""Return the statistics and capabilities of a member asynchronously.
:param group_id: the id of the group of the member
:type group_id: ascii bytes
:param member_id: the id of the member
:type member_id: ascii bytes
:returns: capabilities and statistics of a member
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def update_capabilities(group_id, capabilities):
"""Update member capabilities in the specified group.
:param group_id: the id of the group of the current member
:type group_id: ascii bytes
:param capabilities: the capabilities of the updated member
:type capabilities: object
:returns: None
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def get_leader(group_id):
"""Return the leader for a group.
:param group_id: the id of the group:
:returns: the leader
:rtype: CoordAsyncResult
"""
raise tooz.NotImplemented
@staticmethod
def get_lock(name):
"""Return a distributed lock.
This is a exclusive lock, a second call to acquire() will block or
return False.
:param name: The lock name that is used to identify it across all
nodes.
"""
raise tooz.NotImplemented
@staticmethod
def heartbeat():
"""Update member status to indicate it is still alive.
Method to run once in a while to be sure that the member is not dead
and is still an active member of a group.
:return: The number of seconds to wait before sending a new heartbeat.
"""
pass
@six.add_metaclass(abc.ABCMeta)
class CoordAsyncResult(object):
"""Representation of an asynchronous task.
Every call API returns an CoordAsyncResult object on which the result or
the status of the task can be requested.
"""
@abc.abstractmethod
def get(self, timeout=None):
"""Retrieve the result of the corresponding asynchronous call.
:param timeout: block until the timeout expire.
:type timeout: float
"""
@abc.abstractmethod
def done(self):
"""Returns True if the task is done, False otherwise."""
class CoordinatorResult(CoordAsyncResult):
"""Asynchronous result that references a future."""
def __init__(self, fut, failure_translator=None):
self._fut = fut
self._failure_translator = failure_translator
def get(self, timeout=None):
try:
if self._failure_translator:
with self._failure_translator():
return self._fut.result(timeout=timeout)
else:
return self._fut.result(timeout=timeout)
except futures.TimeoutError as e:
utils.raise_with_cause(OperationTimedOut,
encodeutils.exception_to_unicode(e),
cause=e)
def done(self):
return self._fut.done()
class CoordinationDriverWithExecutor(CoordinationDriver):
EXCLUDE_OPTIONS = None
def __init__(self, member_id, parsed_url, options):
self._options = utils.collapse(options, exclude=self.EXCLUDE_OPTIONS)
self._executor = utils.ProxyExecutor.build(
self.__class__.__name__, self._options)
super(CoordinationDriverWithExecutor, self).__init__(
member_id, parsed_url, options)
def start(self, start_heart=False):
self._executor.start()
super(CoordinationDriverWithExecutor, self).start(start_heart)
def stop(self):
super(CoordinationDriverWithExecutor, self).stop()
self._executor.stop()
class CoordinationDriverCachedRunWatchers(CoordinationDriver):
"""Coordination driver with a `run_watchers` implementation.
This implementation of `run_watchers` is based on a cache of the group
members between each run of `run_watchers` that is being updated between
each run.
"""
def __init__(self, member_id, parsed_url, options):
super(CoordinationDriverCachedRunWatchers, self).__init__(
member_id, parsed_url, options)
# A cache for group members
self._group_members = collections.defaultdict(set)
self._joined_groups = set()
def _init_watch_group(self, group_id):
if group_id not in self._group_members:
members = self.get_members(group_id)
self._group_members[group_id] = members.get()
def watch_join_group(self, group_id, callback):
self._init_watch_group(group_id)
super(CoordinationDriverCachedRunWatchers, self).watch_join_group(
group_id, callback)
def unwatch_join_group(self, group_id, callback):
super(CoordinationDriverCachedRunWatchers, self).unwatch_join_group(
group_id, callback)
if (not self._has_hooks_for_group(group_id) and
group_id in self._group_members):
del self._group_members[group_id]
def watch_leave_group(self, group_id, callback):
self._init_watch_group(group_id)
super(CoordinationDriverCachedRunWatchers, self).watch_leave_group(
group_id, callback)
def unwatch_leave_group(self, group_id, callback):
super(CoordinationDriverCachedRunWatchers, self).unwatch_leave_group(
group_id, callback)
if (not self._has_hooks_for_group(group_id) and
group_id in self._group_members):
del self._group_members[group_id]
def run_watchers(self, timeout=None):
with timeutils.StopWatch(duration=timeout) as w:
result = []
group_with_hooks = set(self._hooks_join_group.keys()).union(
set(self._hooks_leave_group.keys()))
for group_id in group_with_hooks:
try:
group_members = self.get_members(group_id).get(
timeout=w.leftover(return_none=True))
except GroupNotCreated:
group_members = set()
if (group_id in self._joined_groups and
self._member_id not in group_members):
self._joined_groups.discard(group_id)
old_group_members = self._group_members.get(group_id, set())
for member_id in (group_members - old_group_members):
result.extend(
self._hooks_join_group[group_id].run(
MemberJoinedGroup(group_id, member_id)))
for member_id in (old_group_members - group_members):
result.extend(
self._hooks_leave_group[group_id].run(
MemberLeftGroup(group_id, member_id)))
self._group_members[group_id] = group_members
return result
def get_coordinator(backend_url, member_id,
characteristics=frozenset(), **kwargs):
"""Initialize and load the backend.
:param backend_url: the backend URL to use
:type backend: str
:param member_id: the id of the member
:type member_id: ascii bytes
:param characteristics: set
:type characteristics: set of :py:class:`.Characteristics` that will
be matched to the requested driver (this **will**
become a **required** parameter in a future tooz
version)
:param kwargs: additional coordinator options (these take precedence over
options of the **same** name found in the ``backend_url``
arguments query string)
"""
parsed_url = netutils.urlsplit(backend_url)
parsed_qs = six.moves.urllib.parse.parse_qs(parsed_url.query)
if kwargs:
options = {}
for (k, v) in six.iteritems(kwargs):
options[k] = [v]
for (k, v) in six.iteritems(parsed_qs):
if k not in options:
options[k] = v
else:
options = parsed_qs
d = driver.DriverManager(
namespace=TOOZ_BACKENDS_NAMESPACE,
name=parsed_url.scheme,
invoke_on_load=True,
invoke_args=(member_id, parsed_url, options)).driver
characteristics = set(characteristics)
driver_characteristics = set(getattr(d, 'CHARACTERISTICS', set()))
missing_characteristics = characteristics - driver_characteristics
if missing_characteristics:
raise ToozDriverChosenPoorly("Desired characteristics %s"
" is not a strict subset of driver"
" characteristics %s, %s"
" characteristics were not found"
% (characteristics,
driver_characteristics,
missing_characteristics))
return d
# TODO(harlowja): We'll have to figure out a way to remove this 'alias' at
# some point in the future (when we have a better way to tell people it has
# moved without messing up their exception catching hierarchy).
ToozError = tooz.ToozError
class ToozDriverChosenPoorly(tooz.ToozError):
"""Raised when a driver does not match desired characteristics."""
class ToozConnectionError(tooz.ToozError):
"""Exception raised when the client cannot connect to the server."""
class OperationTimedOut(tooz.ToozError):
"""Exception raised when an operation times out."""
class LockAcquireFailed(tooz.ToozError):
"""Exception raised when a lock acquire fails in a context manager."""
class GroupNotCreated(tooz.ToozError):
"""Exception raised when the caller request an nonexistent group."""
def __init__(self, group_id):
self.group_id = group_id
super(GroupNotCreated, self).__init__(
"Group %s does not exist" % group_id)
class GroupAlreadyExist(tooz.ToozError):
"""Exception raised trying to create an already existing group."""
def __init__(self, group_id):
self.group_id = group_id
super(GroupAlreadyExist, self).__init__(
"Group %s already exists" % group_id)
class MemberAlreadyExist(tooz.ToozError):
"""Exception raised trying to join a group already joined."""
def __init__(self, group_id, member_id):
self.group_id = group_id
self.member_id = member_id
super(MemberAlreadyExist, self).__init__(
"Member %s has already joined %s" %
(member_id, group_id))
class MemberNotJoined(tooz.ToozError):
"""Exception raised trying to access a member not in a group."""
def __init__(self, group_id, member_id):
self.group_id = group_id
self.member_id = member_id
super(MemberNotJoined, self).__init__("Member %s has not joined %s" %
(member_id, group_id))
class GroupNotEmpty(tooz.ToozError):
"Exception raised when the caller try to delete a group with members."
def __init__(self, group_id):
self.group_id = group_id
super(GroupNotEmpty, self).__init__("Group %s is not empty" % group_id)
class WatchCallbackNotFound(tooz.ToozError):
"""Exception raised when unwatching a group.
Raised when the caller tries to unwatch a group with a callback that
does not exist.
"""
def __init__(self, group_id, callback):
self.group_id = group_id
self.callback = callback
super(WatchCallbackNotFound, self).__init__(
'Callback %s is not registered on group %s' %
(callback.__name__, group_id))
# TODO(harlowja,jd): We'll have to figure out a way to remove this 'alias' at
# some point in the future (when we have a better way to tell people it has
# moved without messing up their exception catching hierarchy).
SerializationError = utils.SerializationError
``` |
{
"source": "jimbobhickville/Zake",
"score": 2
} |
#### File: Zake/zake/fake_client.py
```python
import collections
import contextlib
import functools
import logging
import sys
import time
import uuid
import six
from kazoo import exceptions as k_exceptions
from kazoo.handlers import threading as k_threading
from kazoo.protocol import states as k_states
from kazoo import retry as k_retry
from kazoo.recipe.barrier import Barrier
from kazoo.recipe.barrier import DoubleBarrier
from kazoo.recipe.counter import Counter
from kazoo.recipe.election import Election
from kazoo.recipe.lock import Lock
from kazoo.recipe.lock import Semaphore
from kazoo.recipe.partitioner import SetPartitioner
from kazoo.recipe.party import Party
from kazoo.recipe.party import ShallowParty
from kazoo.recipe.queue import Queue
from kazoo.recipe.queue import LockingQueue
from kazoo.recipe.watchers import ChildrenWatch
from kazoo.recipe.watchers import DataWatch
from zake import fake_storage as fs
from zake import utils
from zake import version
LOG = logging.getLogger(__name__)
# We provide a basic txn support (not as functional as zookeeper) and this
# was added in 3.4.0 so we will say we are 3.4.0 compat (until proven
# differently).
SERVER_VERSION = (3, 4, 0)
_NO_ACL_MSG = "ACLs not currently supported"
class FakeClient(object):
"""A fake mostly functional/good enough kazoo compat. client
It can have its underlying storage mocked out (as well as exposes the
listeners that are currently active and the watches that are currently
active) so that said functionality can be examined & introspected by
testing frameworks (while in use and after the fact).
"""
def __init__(self, handler=None, storage=None, server_version=None):
self._listeners = set()
self._child_watchers = collections.defaultdict(list)
self._data_watchers = collections.defaultdict(list)
if handler is None:
self._handler = k_threading.SequentialThreadingHandler()
self._own_handler = True
else:
self._handler = handler
self._own_handler = False
if storage is not None:
self._storage = storage
self._own_storage = False
else:
self._storage = fs.FakeStorage(self._handler)
self._own_storage = True
self._partial_client = _PartialClient(self._storage)
self._open_close_lock = self._handler.rlock_object()
self._watches_lock = self._handler.rlock_object()
self._listeners_lock = self._handler.rlock_object()
self._connected = False
self._retry = k_retry.KazooRetry()
if server_version is None:
self._server_version = SERVER_VERSION
else:
self._server_version = tuple(server_version)
if not len(self._server_version):
raise ValueError("Non-empty server version expected")
self.expired = False
self.logger = LOG
# Helper objects that makes these easier to create.
self.Barrier = functools.partial(Barrier, self)
self.Counter = functools.partial(Counter, self)
self.DoubleBarrier = functools.partial(DoubleBarrier, self)
self.ChildrenWatch = functools.partial(ChildrenWatch, self)
self.DataWatch = functools.partial(DataWatch, self)
self.Election = functools.partial(Election, self)
self.Lock = functools.partial(Lock, self)
self.Party = functools.partial(Party, self)
self.Queue = functools.partial(Queue, self)
self.LockingQueue = functools.partial(LockingQueue, self)
self.SetPartitioner = functools.partial(SetPartitioner, self)
self.Semaphore = functools.partial(Semaphore, self)
self.ShallowParty = functools.partial(ShallowParty, self)
@property
def handler(self):
return self._handler
@property
def storage(self):
return self._storage
def command(self, cmd=b'ruok'):
self.verify()
if cmd == b'ruok':
return 'imok'
if cmd == b'stat':
server_version = ".".join([str(s) for s in self._server_version])
return "\n".join(['Zake the fake version: %s' % (version.VERSION),
'Mimicked version: %s' % (server_version),
'Mode: standalone'])
if cmd == b"kill":
self.stop()
if cmd == b'envi':
server_version = ".".join([str(s) for s in self._server_version])
lines = [
"Environment:",
"zookeeper.version=%s" % server_version,
]
return "\n".join(lines)
return ''
def verify(self):
if not self._connected:
raise k_exceptions.ConnectionClosedError("Connection has been"
" closed")
if self.expired:
raise k_exceptions.SessionExpiredError("Expired")
@property
def session_id(self):
return self._partial_client.session_id
@property
def timeout_exception(self):
return IOError
@property
def child_watches(self):
return self._child_watchers
@property
def data_watches(self):
return self._data_watchers
@property
def listeners(self):
return self._listeners
@property
def connected(self):
return self._connected
def sync(self, path):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
def server_version(self):
self.verify()
return self._server_version
def flush(self):
self.verify()
# This puts an item into the callback queue, and waits until it gets
# called, this is a cheap way of knowing that the queue has been
# cycled over (as this item goes in on the bottom) and only when the
# items ahead of this callback are finished will this get called.
wait_for = self.handler.event_object()
fired = False
def flip():
wait_for.set()
while not wait_for.is_set():
if not fired:
self.handler.dispatch_callback(utils.make_cb(flip))
fired = True
time.sleep(0.001)
def create(self, path, value=b"", acl=None,
ephemeral=False, sequence=False, makepath=False):
self.verify()
result, data_watches, child_watches = self._partial_client.create(
path, value=value, acl=acl, ephemeral=ephemeral, sequence=sequence,
makepath=makepath)
self.storage.inform(self, child_watches, data_watches)
return result
def create_async(self, path, value=b"", acl=None,
ephemeral=False, sequence=False, makepath=False):
return utils.dispatch_async(self.handler, self.create, path,
value=value, acl=acl, ephemeral=ephemeral,
sequence=sequence, makepath=makepath)
def get(self, path, watch=None):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
path = utils.normpath(path)
try:
(data, znode) = self.storage.get(path)
except KeyError:
raise k_exceptions.NoNodeError("Node %s does not exist" % (path))
if watch:
with self._watches_lock:
self._data_watchers[path].append(watch)
return (data, znode)
def set_acls(self, path, acls, version=-1):
raise NotImplementedError(_NO_ACL_MSG)
def set_acls_async(self, path, acls, version=-1):
raise NotImplementedError(_NO_ACL_MSG)
def get_acls_async(self, path):
raise NotImplementedError(_NO_ACL_MSG)
def get_acls(self, path):
raise NotImplementedError(_NO_ACL_MSG)
def get_async(self, path, watch=None):
return utils.dispatch_async(self.handler, self.get, path, watch=watch)
def start(self, timeout=None):
if not self._connected:
with self._open_close_lock:
if not self._connected:
self._connected = True
with self._watches_lock:
self._child_watchers.clear()
self._data_watchers.clear()
self.storage.attach(self)
self.handler.start()
self._partial_client.session_id = int(uuid.uuid4())
self._fire_state_change(k_states.KazooState.CONNECTED)
def restart(self):
with self._open_close_lock:
before = self.session_id
self.stop()
self.start()
return before
def _fire_state_change(self, state):
with self._listeners_lock:
listeners = list(self._listeners)
for func in listeners:
self.handler.dispatch_callback(utils.make_cb(func, [state]))
def exists(self, path, watch=None):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
path = utils.normpath(path)
try:
(data, exists) = self.storage.get(path)
except KeyError:
exists = None
if watch:
with self._watches_lock:
self._data_watchers[path].append(watch)
return exists
def exists_async(self, path, watch=None):
return utils.dispatch_async(self.handler,
self.exists, path, watch=watch)
def set(self, path, value, version=-1):
self.verify()
result, data_watches, child_watches = self._partial_client.set(
path, value, version=version)
self.storage.inform(self, child_watches, data_watches)
return result
def set_async(self, path, value, version=-1):
return utils.dispatch_async(self.handler,
self.set, path, value, version=version)
def get_children(self, path, watch=None, include_data=False):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
def clean_path(p):
return p.strip("/")
path = utils.normpath(path)
with self.storage.lock:
if path not in self.storage:
raise k_exceptions.NoNodeError("Node %s does not exist"
% (path))
paths = self.storage.get_children(path)
if watch:
with self._watches_lock:
self._child_watchers[path].append(watch)
if include_data:
children_with_data = []
for (child_path, data) in six.iteritems(paths):
child_path = clean_path(child_path[len(path):])
children_with_data.append((child_path, data))
return children_with_data
else:
children = []
for child_path in six.iterkeys(paths):
child_path = clean_path(child_path[len(path):])
children.append(child_path)
return children
def get_children_async(self, path, watch=None, include_data=False):
return utils.dispatch_async(self.handler, self.get_children, path,
watch=watch, include_data=include_data)
def stop(self):
self.close()
def delete(self, path, version=-1, recursive=False):
self.verify()
result, data_watches, child_watches = self._partial_client.delete(
path, version=version, recursive=recursive)
self.storage.inform(self, child_watches, data_watches)
return result
def delete_async(self, path, recursive=False):
return utils.dispatch_async(self.handler,
self.delete, path, recursive=recursive)
def add_listener(self, listener):
with self._listeners_lock:
self._listeners.add(listener)
def retry(self, func, *args, **kwargs):
self.verify()
r = self._retry.copy()
return r(func, *args, **kwargs)
def remove_listener(self, listener):
with self._listeners_lock:
self._listeners.discard(listener)
def fire_child_watches(self, child_watches):
for (paths, event) in child_watches:
self._fire_watches(paths, event, self._child_watchers)
def fire_data_watches(self, data_watches):
for (paths, event) in data_watches:
self._fire_watches(paths, event, self._data_watchers)
def _fire_watches(self, paths, event, watch_source):
for path in reversed(sorted(paths)):
with self._open_close_lock:
if self._connected:
with self._watches_lock:
watches = list(watch_source.pop(path, []))
for w in watches:
cb = utils.make_cb(w, [event])
self.handler.dispatch_callback(cb)
def transaction(self):
return FakeTransactionRequest(self)
def ensure_path(self, path):
self.verify()
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
path = utils.normpath(path)
for piece in utils.partition_path(path):
try:
self.create(piece)
except k_exceptions.NodeExistsError:
pass
def ensure_path_async(self, path):
return utils.dispatch_async(self.handler, self.ensure_path, path)
def close(self, close_handler=True):
if self._connected:
with self._open_close_lock:
if self._connected:
self._connected = False
with self._watches_lock:
self._child_watchers.clear()
self._data_watchers.clear()
self.storage.purge(self)
self._fire_state_change(k_states.KazooState.LOST)
if self._own_handler and close_handler:
self.handler.stop()
self._partial_client.session_id = None
class _PartialClient(object):
"""An internal *only* client that returns the watches to be triggered."""
def __init__(self, storage):
self.storage = storage
self.session_id = None
def delete(self, path, version=-1, recursive=False):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
data_watches = []
child_watches = []
path = utils.normpath(path)
with self.storage.lock:
if path not in self.storage:
raise k_exceptions.NoNodeError("Node %s does not exist"
% (path))
path_version = self.storage[path]['version']
if version != -1 and path_version != version:
raise k_exceptions.BadVersionError("Version mismatch"
" (%s != %s)"
% (version, path_version))
if recursive:
paths = [path]
children = self.storage.get_children(path, only_direct=False)
for child_path in six.iterkeys(children):
paths.append(child_path)
else:
children = self.storage.get_children(path, only_direct=False)
if children:
raise k_exceptions.NotEmptyError("Path %s is not-empty"
" (%s children exist)"
% (path, len(children)))
paths = [path]
paths = list(reversed(sorted(set(paths))))
with self.storage.transaction():
for path in paths:
self.storage.pop(path)
parents = []
for path in paths:
parents.extend(self.storage.get_parents(path))
parents = list(reversed(sorted(set(parents))))
for path in parents:
event = k_states.WatchedEvent(
type=k_states.EventType.DELETED,
state=k_states.KeeperState.CONNECTED,
path=path)
child_watches.append(([path], event))
for path in paths:
event = k_states.WatchedEvent(
type=k_states.EventType.DELETED,
state=k_states.KeeperState.CONNECTED,
path=path)
data_watches.append(([path], event))
return (True, data_watches, child_watches)
def set(self, path, value, version=-1):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
if not isinstance(value, six.binary_type):
raise TypeError("value must be a byte string")
if not isinstance(version, int):
raise TypeError("version must be an int")
path = utils.normpath(path)
try:
stat = self.storage.set(path, value, version=version)
except KeyError:
raise k_exceptions.NoNodeError("Node %s does not exist" % (path))
data_watches = []
child_watches = []
event = k_states.WatchedEvent(type=k_states.EventType.CHANGED,
state=k_states.KeeperState.CONNECTED,
path=path)
data_watches.append(([path], event))
return (stat, data_watches, child_watches)
def create(self, path, value=b"", acl=None,
ephemeral=False, sequence=False, makepath=False):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
if not isinstance(value, six.binary_type):
raise TypeError("value must be a byte string")
if acl:
raise NotImplementedError(_NO_ACL_MSG)
data_watches = []
child_watches = []
with self.storage.lock:
if sequence:
path = utils.normpath(path, keep_trailing=True)
else:
path = utils.normpath(path, keep_trailing=False)
if makepath:
for parent_path in utils.partition_path(path)[0:-1]:
if parent_path not in self.storage:
result = self.create(parent_path)
data_watches.extend(result[1])
child_watches.extend(result[2])
created, parents, path = self.storage.create(
path, value=value, sequence=sequence,
ephemeral=ephemeral, session_id=self.session_id)
if parents:
event = k_states.WatchedEvent(type=k_states.EventType.CHILD,
state=k_states.KeeperState.CONNECTED,
path=path)
child_watches.append((parents, event))
if created:
event = k_states.WatchedEvent(type=k_states.EventType.CREATED,
state=k_states.KeeperState.CONNECTED,
path=path)
data_watches.append(([path], event))
return (path, data_watches, child_watches)
class StopTransaction(Exception):
pass
class StopTransactionNoExists(StopTransaction):
pass
class StopTransactionBadVersion(StopTransaction):
pass
@contextlib.contextmanager
def try_txn_lock(lock):
locked = lock.acquire(blocking=False)
if not locked:
raise RuntimeError("Transaction can not be concurrently modified")
try:
yield
finally:
lock.release()
class DelayedOperation(object):
def __init__(self, name, operation, path=None, version=-1):
self.path = path
self.name = name
self.version = version
self._operation = operation
def __call__(self):
return self._operation()
class FakeTransactionRequest(object):
def __init__(self, client):
self._lock = client.handler.rlock_object()
self._client = client
self._partial_client = client._partial_client
self._storage = client.storage
self.operations = []
self.committed = False
@property
def storage(self):
return self._storage
def delete(self, path, version=-1):
delayed_op = functools.partial(self._partial_client.delete,
path, version)
self._add(DelayedOperation('delete', delayed_op,
path=path, version=version))
def check(self, path, version):
def delayed_check(path, version):
if not isinstance(path, six.string_types):
raise TypeError("path must be a string")
if not isinstance(version, int):
raise TypeError("version must be an int")
try:
data = self._storage[path]
if data['version'] != version:
raise StopTransactionBadVersion()
else:
return (True, [], [])
except KeyError:
raise StopTransactionNoExists()
delayed_op = functools.partial(delayed_check, path, version)
self._add(DelayedOperation('check', delayed_op,
path=path, version=version))
def set_data(self, path, value, version=-1):
delayed_op = functools.partial(self._partial_client.set,
path, value, version)
self._add(DelayedOperation('set_data', delayed_op,
path=path, version=version))
def create(self, path, value=b"", acl=None, ephemeral=False,
sequence=False):
delayed_op = functools.partial(self._partial_client.create,
path, value, acl, ephemeral, sequence)
self._add(DelayedOperation('create', delayed_op, path=path))
def commit(self):
self._check_tx_state()
self._client.verify()
with try_txn_lock(self._lock):
self._check_tx_state()
# Delay all watch firing until we are sure that it succeeded.
results = []
child_watches = []
data_watches = []
try:
with self._storage.transaction():
for op in self.operations:
result = op()
results.append(result[0])
data_watches.extend(result[1])
child_watches.extend(result[2])
except StopTransaction as e:
for i in range(0, len(results)):
results[i] = k_exceptions.RolledBackError()
if isinstance(e, StopTransactionBadVersion):
results.append(k_exceptions.BadVersionError())
if isinstance(e, StopTransactionNoExists):
results.append(k_exceptions.NoNodeError())
while len(results) != len(self.operations):
results.append(k_exceptions.RuntimeInconsistency())
except (NotImplementedError, AttributeError,
RuntimeError, ValueError, TypeError,
k_exceptions.ConnectionClosedError,
k_exceptions.SessionExpiredError):
# Allow all these errors to bubble up.
six.reraise(*sys.exc_info())
except Exception as e:
for i in range(0, len(results)):
results[i] = k_exceptions.RolledBackError()
results.append(e)
while len(results) != len(self.operations):
results.append(k_exceptions.RuntimeInconsistency())
else:
self._storage.inform(self._client, child_watches, data_watches)
self.committed = True
return results
def __enter__(self):
return self
def _check_tx_state(self):
if self.committed:
raise ValueError('Transaction already committed')
def _add(self, request):
with try_txn_lock(self._lock):
self._check_tx_state()
self.operations.append(request)
def __exit__(self, type, value, tb):
if not any((type, value, tb)):
if not self.committed:
self.commit()
```
#### File: Zake/zake/utils.py
```python
import os
import time
from kazoo.protocol import paths as k_paths
from kazoo.protocol import states as k_states
def millitime():
"""Converts the current time to milliseconds."""
return int(round(time.time() * 1000.0))
def normpath(path, keep_trailing=False):
"""Really normalize the path by adding a missing leading slash."""
new_path = k_paths.normpath(path)
if keep_trailing and path.endswith("/") and not new_path.endswith("/"):
new_path = new_path + "/"
if not new_path.startswith('/'):
return '/' + new_path
return new_path
def make_cb(func, args=None, type=''):
if not args:
args = []
return k_states.Callback(type=type, func=func, args=args)
def dispatch_async(handler, func, *args, **kwargs):
async_result = handler.async_result()
def call(func, args, kwargs):
try:
result = func(*args, **kwargs)
async_result.set(result)
except Exception as exc:
async_result.set_exception(exc)
cb = make_cb(call, [func, args, kwargs], type='async')
handler.dispatch_callback(cb)
return async_result
def partition_path(path):
path_pieces = [path]
cur_path = path
while True:
(tmp_path, _ext) = os.path.split(cur_path)
if tmp_path == cur_path:
path_pieces.append(tmp_path)
break
else:
path_pieces.append(tmp_path)
cur_path = tmp_path
return sorted(set(path_pieces))
def is_child_path(parent_path, child_path, only_direct=True):
parent_pieces = [p for p in parent_path.split("/") if p]
child_pieces = [p for p in child_path.split("/") if p]
if len(child_pieces) <= len(parent_pieces):
return False
shared_pieces = child_pieces[0:len(parent_pieces)]
if tuple(parent_pieces) != tuple(shared_pieces):
return False
if only_direct:
return len(child_pieces) == len(parent_pieces) + 1
return True
``` |
{
"source": "jimboca/ISYHelperHABridge",
"score": 2
} |
#### File: ISYHelperHABridge/ihab/Rest.py
```python
import sys
from flask import Flask
from flask import request
debug = False
# TODO: need better way than using this global...
# TODO: This all probably should not be an object...
CONFIG = False
app = Flask(__name__)
class Rest(object):
global CONFIG
app = Flask(__name__)
def __init__(self,config,logger):
self.app = app
self.config = config
global CONFIG
CONFIG = config
self.logger = logger
self.host = self.config['this_host']['host']
self.port = int(self.config['this_host']['port'])
info = "Configuring REST interface...";
logger.info(info)
print info
self.app.debug = debug
def run(self,bridge,isy):
info = "Starting REST interface %s:%s..." % (self.host,self.port)
self.app.run(host=self.host, port=self.port)
self.bridge = bridge
self.isy = isy
def get_ip(self):
return request.remote_addr
@app.route("/")
def top():
app.logger.info("REST:top")
info = "ISYHelper HABridge: %s<br>Requestor: %s<br>" % (CONFIG['version'],request.remote_addr);
#return "ISYHelper Web Interface version %s<br>Requestor: %s<br>%s" % (CONFIG['isyhelper_version'], request.remote_addr, isyhelperRESTObj.helpers.get_index())
#
# This translates a REST setvar command to pass to the appropriate Helper
# that accepts responses from specific IP addresses, like Foscam.
#
#<EMAIL>('/setvar/<path:path>')
#def setvar(path):
# # Get the helper for the incoming IP address
# rip = request.remote_addr;
# helper = isyhelperRESTObj.helpers.get_helper(rip)
# if helper is False:
# return "REST:setvar: No helper for IP %s" % (rip), 404
# if not path:
# return "path not defined from %s" % (rip), 404
# # TODO: Allow value param to be passed in?
# # TODO: Make sure split only returns 2 objects?
# #udata = web.input(value=None)
# li = path.split("/")
# varname = li[0]
# varvalue = li[1]
# info = 'REST:setvar:GET: ' + rip + ' varname='+ varname + ' value=' + str(varvalue)
# isyhelperRESTObj.helpers.logger.info(info)
# helper.setvar(varname,varvalue)
# return info
<EMAIL>('/<helper_name>/<path:path>')
#def helper(helper_name,path):
# app.logger.debug("REST:helper: helper_name=%s path=%s" % (helper_name,path))
# helper = isyhelperRESTObj.helpers.get_helper_by_name(helper_name)
# if not helper:
# msg = "REST:default:GET: No helper '%s' exists for '%s' request by %s" % (helper_name, path, request.remote_addr)
# app.logger.error(msg)
# return msg, 404
# # Call the Helpers rest_get method.
# if request.method == 'GET':
# app.logger.debug("REST:helper: Calling %s.rest_get(%s)" % (helper_name,path))
# return helper.rest_get(app,request,path)
# app.logger.error("REST:helper:%s: No %s method available" % (helper_name,request.method))
# return msg, 404
if __name__ == "__main__":
import logging
import os
config = { 'this_host' : { 'host' : '192.168.1.77', 'port' : '8082' } }
log_file = "REST.log"
log_format = '%(asctime)-15s:%(name)s:%(levelname)s: %(message)s'
if os.path.exists(log_file):
os.remove(log_file)
logging.basicConfig(filename=log_file, format=log_format);
logger = logging.getLogger('IH')
logger.setLevel(logging.DEBUG)
rest = REST(config,[],logger)
rest.run()
``` |
{
"source": "jimboca/isyhelper",
"score": 3
} |
#### File: ISYHelper/Helpers/DateAndTime.py
```python
from datetime import datetime
from functools import partial
from .Helper import Helper
class DateAndTime(Helper):
def __init__(self,parent,hconfig):
self.optional = { 'interval' : { 'default' : 'minute', 'valid' : ['day', 'hour', 'minute', 'second'] } }
super(DateAndTime, self).__init__(parent,hconfig)
self.parent.logger.info("Datetime: interval=" + self.interval)
# Index of the interval, 0=day, ... 3=second
self.interval_index = self.optional['interval']['valid'].index(self.interval)
def second_function(self):
dt = datetime.now()
self.parent.logger.info('second_function: The minute is: %s' % dt.second)
self.Second.val = dt.second
def minute_function(self):
dt = datetime.now()
self.parent.logger.info('minute_function: The minute is: %s' % dt.minute)
self.Minute.val = dt.minute
def hour_function(self):
dt = datetime.now()
self.parent.logger.info('hour_function: The hour is: %s' % dt.hour)
self.Hour.val = dt.hour
def day_function(self):
dt = datetime.now()
self.parent.logger.info('day_function: It is a new day! The time is: %s' % dt)
self.Day.val = dt.day
self.Month.val = dt.month
self.Year.val = dt.year
def do_pong(self,e,val):
self.parent.logger.info('Pong: %s' % val)
self.Pong.val = val
# Initialize all on startup
def start(self):
# Build our hash of variables
self.isy_variables = ['Day', 'Month', 'Year', 'Pong']
if self.interval_index > 0:
self.isy_variables.append('Hour')
if self.interval_index > 1:
self.isy_variables.append('Minute')
if self.interval_index > 2:
self.isy_variables.append('Second')
super(DateAndTime, self).start()
self.Ping = self.Day
if self.interval_index > 0:
self.hour_function()
self.Ping = self.Hour
if self.interval_index > 1:
self.minute_function()
self.Ping = self.Minute
if self.interval_index > 2:
self.second_function()
self.day_function()
self.parent.logger.info('Pong: will watch %s' % str(self.Ping))
self.ping_handler = self.Ping.val.subscribe(
'changed', partial(self.do_pong,val=self.Ping.val))
def sched(self):
super(DateAndTime, self).sched()
# Schedules second_function to be run at the change of each second.
if self.interval_index > 2:
self.parent.sched.add_job(self.second_function, 'cron', second='0-59')
# Schedules minute_function to be run at the change of each minute.
if self.interval_index > 1:
self.parent.sched.add_job(self.minute_function, 'cron', second='0')
# Schedules hour_function to be run at the change of each hour.
if self.interval_index > 0:
self.parent.sched.add_job(self.hour_function, 'cron', minute='0', second='0')
# Schedules day_function to be run at the start of each day.
self.parent.sched.add_job(self.day_function, 'cron', minute='0', second='0', hour='0')
```
#### File: ISYHelper/Helpers/__init__.py
```python
from .Tester import Tester
from .Foscam1 import Foscam1
from .DateAndTime import DateAndTime
from .Maker import Maker
from .FauxMo import FauxMo
from .PyHue import PyHue
from .PyHarmony import PyHarmony
class Helpers(object):
def __init__(self,logger,sched,config):
self.logger = logger
self.config = config
self.resturl = config['this_host']['url']
self.children = []
self.isy = None
self.sched = sched
# TODO: I don't like all this dict's, does Python have grep?
self.by_ip = {}
self.by_name = {}
errors = 0
for hconfig in config['helpers']:
try:
self.add_helper(hconfig)
except ValueError as e:
logger.error(str(e))
errors += 1
if errors > 0:
raise ValueError("See Log")
def add_helper(self,hconfig):
self.logger.info("add_helper: " + str(hconfig))
if 'type' not in hconfig:
self.logger.error("helper 'type' not defined for " + str(hconfig))
raise ValueError("See Log")
# TODO: There must be a good way to use a variable for a class name?
dtype = hconfig['type']
print("Initializing new helper: " + dtype)
if dtype == "Tester":
helper = Tester(self,hconfig)
elif dtype == "Foscam1":
helper = Foscam1(self,hconfig)
elif dtype == "DateAndTime":
helper = DateAndTime(self,hconfig)
elif dtype == "Maker":
helper = Maker(self,hconfig)
elif dtype == "FauxMo":
helper = FauxMo(self,hconfig)
elif dtype == "PyHue":
helper = PyHue(self,hconfig)
elif dtype == "PyHarmony":
helper = PyHarmony(self,hconfig)
else:
self.logger.error("Unknown helper type "+ dtype)
raise ValueError("See Log")
return
self.children.append(helper)
if 'name' in hconfig:
self.by_name[hconfig['name']] = helper
if 'ip' in hconfig:
self.by_ip[hconfig['ip']] = helper
# Run the scheduler
print(" Scheduling helper: " + helper.name)
helper.sched()
return helper
def start(self,isy):
self.isy = isy
errors = 0
for helper in self.children:
try:
msg = "Starting helper: %s" % (helper.name)
print msg
self.logger.info(msg);
helper.start()
except ValueError:
errors += 1
if errors:
raise ValueError(str(errors) + " startup errors, see log")
# TODO: Add args to allow getting by ip, name, type, ...
def get_helper(self,ip):
if ip in self.by_ip:
return self.by_ip[ip]
self.logger.error("Unable to get helper by ip '" + ip + "'");
return False
def get_helper_by_name(self,name):
if name in self.by_name:
return self.by_name[name]
self.logger.error("Unable to get helper by name '" + name + "'");
return False
def get_index(self):
msg = "<br>Configured Helpers:<ul>\n"
for helper in self.children:
msg += helper.get_index()
msg += "</ul>\n"
return msg
```
#### File: ISYHelper/Helpers/Maker.py
```python
from .Helper import Helper
import json
class Maker(Helper):
def __init__(self,parent,hconfig):
self.required = ['token']
super(Maker, self).__init__(parent,hconfig)
def maker(self,path,data):
#super(Tester, self).setvar(name,value)
lpfx = self.name + ".maker: "
self.parent.logger.info(lpfx + ' path=' + path + ' data=' + data)
jdata = json.loads(data)
self.parent.logger.info(lpfx + ' jdata=' + repr(jdata))
errors = 0
for key in ['token','type','name','value']:
if not key in jdata:
self.parent.logger.error(lpfx + 'Missing key: ' + key)
errors += 1
if errors > 0:
raise ValueError("Missing Keys")
return
if not jdata['token'] == self.token:
self.parent.logger.error(lpfx + 'Token mismatch ' + jdata['token'] + ' != ' + self.token)
return
if jdata['type'] == 'variable':
var = self.setvar(jdata['name'],jdata['value']);
else:
self.parent.logger.error(lpfx + 'Unknown type: ' + jdata['type'])
```
#### File: isyhelper/ISYHelper/__init__.py
```python
import logging
import logging.handlers
import time
import sys
import yaml
import socket
import os
# from http://commandline.org.uk/python/how-to-find-out-ip-address-in-python/
def get_network_ip(rhost):
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect((rhost, 0))
return s.getsockname()[0]
def load_config ():
config_file = open('config.yaml', 'r')
config = yaml.load(config_file)
config_file.close
# host config param overrides default.
if 'host' in config and config['host'] is not None:
# use what the user has defined.
this_host = config['host']
else:
# Only way to get the current host ip is connect to something, so use the ISY.
this_host = get_network_ip(config['isy']['host'])
# port config param overrides port
port = '8080'
if 'port' in config and config['port'] is not None:
port = str(config['port'])
print "isyhelper: host: " + this_host + ":" + port
config['this_host'] = {
'host' : this_host,
# TODO: This is the REST interface, should be configurable?
'port' : port,
}
config['this_host']['url'] = 'http://'+config['this_host']['host']+':'+config['this_host']['port']
config['log_format'] = '%(asctime)-15s:%(name)s:%(levelname)s: %(message)s'
return config
def get_logger(config):
if 'log_file' not in config:
config['log_file'] = False
print("PyISYLink: No log_file defined")
elif config['log_file'] == 'none':
print("PyISYLink: Not writing log because log_file is none")
config['log_file'] = False
if config['log_file'] != False:
print('isyhelper: Writing to log: ' + config['log_file'] + ' level=' + str(config['log_level']))
if os.path.exists(config['log_file']):
os.remove(config['log_file'])
# Create the logger
logger = logging.getLogger('IH')
# Set the log level Warning level by default, unless log_level is debug or info
if config['log_level'] == 'debug':
logger.setLevel(logging.DEBUG)
elif config['log_level'] == 'info':
logger.setLevel(logging.INFO)
else:
logger.setLevel(logging.WARNING)
# Make a handler that writes to a file,
# making a new file at midnight and keeping 30 backups
handler = logging.handlers.TimedRotatingFileHandler(config['log_file'], when="midnight", backupCount=7)
# Format each log message like this
formatter = logging.Formatter(config['log_format'])
# Attach the formatter to the handler
handler.setFormatter(formatter)
# Attach the handler to the logger
logger.addHandler(handler)
else:
logger = False
return logger
```
#### File: isyhelper/ISYHelper/REST.py
```python
import sys
from flask import Flask
from flask import request
from traceback import format_exception
debug = False
app = Flask(__name__)
# TODO: This is dumb, but I was lazy. It's a global variable referenced to
# TODO: find our top object in the lower classes, like setvar.GET.
# TODO: There must be a way to pass this in web.application?
isyhelperRESTObj = False
CONFIG = False
class REST(object):
global isyhelperRESTObj
global CONFIG
app = Flask(__name__)
def __init__(self,config,helpers,mylogger):
global isyhelperRESTObj
global CONFIG
CONFIG = config
self.app = app
self.config = config
self.helpers = helpers
isyhelperRESTObj = self
self.app.debug = debug
# TODO: Check that the files exist!
#if 'ssl' in self.config:
# if 'certificate' in self.config['ssl']:
# print('Using certificate: ' + self.config['ssl']['certificate'])
# CherryPyWSGIServer.ssl_certificate = self.config['ssl']['certificate']
# if 'private_key' in self.config['ssl']:
# print('Using private_key: ' + self.config['ssl']['private_key'])
# CherryPyWSGIServer.ssl_private_key = self.config['ssl']['private_key']
self.app.logger.addHandler(mylogger)
#self.app.logger.setLevel(logging.DEBUG)
def run(self):
arg = "%s:%s" % (self.config['this_host']['host'],self.config['this_host']['port'])
print "REST: %s" % (arg)
self.app.run(host=self.config['this_host']['host'], port=int(self.config['this_host']['port']), use_reloader=False)
def get_ip(self):
return request.remote_addr
@app.route("/")
def top():
app.logger.info("REST:top")
try:
return "ISYHelper Web Interface version %s<br>Requestor: %s<br>%s" % (CONFIG['isyhelper_version'], request.remote_addr, isyhelperRESTObj.helpers.get_index())
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
return "<pre>Top Error: %s</pre>" % ''.join(format_exception(exc_type, exc_value, exc_traceback))
#
# This translates a REST setvar command to pass to the appropriate Helper
# that accepts responses from specific IP addresses, like Foscam.
#
@app.route('/setvar/<path:path>')
def setvar(path):
# Get the helper for the incoming IP address
rip = request.remote_addr;
helper = isyhelperRESTObj.helpers.get_helper(rip)
if helper is False:
return "REST:setvar: No helper for IP %s" % (rip), 404
if not path:
return "path not defined from %s" % (rip), 404
# TODO: Allow value param to be passed in?
# TODO: Make sure split only returns 2 objects?
#udata = web.input(value=None)
li = path.split("/")
varname = li[0]
varvalue = li[1]
info = 'REST:setvar:GET: ' + rip + ' varname='+ varname + ' value=' + str(varvalue)
isyhelperRESTObj.helpers.logger.info(info)
helper.setvar(varname,varvalue)
return info
@app.route('/<helper_name>/<path:path>')
def helper(helper_name,path):
app.logger.debug("REST:helper: helper_name=%s path=%s" % (helper_name,path))
helper = isyhelperRESTObj.helpers.get_helper_by_name(helper_name)
if not helper:
msg = "REST:default:GET: No helper '%s' exists for '%s' request by %s" % (helper_name, path, request.remote_addr)
app.logger.error(msg)
return msg, 404
# Call the Helpers rest_get method.
if request.method == 'GET':
app.logger.debug("REST:helper: Calling %s.rest_get(%s)" % (helper_name,path))
return helper.rest_get(app,request,path)
app.logger.error("REST:helper:%s: No %s method available" % (helper_name,request.method))
return msg, 404
if __name__ == "__main__":
import logging
import os
config = { 'this_host' : { 'host' : '192.168.1.77', 'port' : '8082' } }
log_file = "REST.log"
log_format = '%(asctime)-15s:%(name)s:%(levelname)s: %(message)s'
if os.path.exists(log_file):
os.remove(log_file)
logging.basicConfig(filename=log_file, format=log_format);
logger = logging.getLogger('IH')
logger.setLevel(logging.DEBUG)
rest = REST(config,[],logger)
rest.run()
``` |
{
"source": "jimboca/udi-camera-poly",
"score": 3
} |
#### File: jimboca/udi-camera-poly/foscam_poll.py
```python
import os
import socket
import sys
import time
import select
from struct import unpack,pack
from camera_funcs import get_valid_node_name,get_network_bcast
TIMEOUT = 6 # Run for 30 seconds max.
PING_INTERVAL = 2 # Once every 5 seconds
PING_PORT_NUMBER = 10000
PING_MSG_SIZE = 130
# ftp://192.168.3.11/Nadzor/FOSCAM/SDK%20CGI/MJPEG%20CGI%20SDK/MJPEG%20CGI%20SDK/Ipcamera%20device%20search%20protocol.pdf
SEARCH_REQUEST = pack('>4sH?8sll4s', b'MO_I', 0, 0, b'', 67108864, 0, b'')
def foscam_poll(logger=None,verbose=False):
clients = []
clients_by_addr = {}
#myip = get_network_ip(logger=logger)
mybcast = get_network_bcast(logger=logger)
# Create UDP socket
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM, socket.IPPROTO_UDP)
# Ask operating system to let us do broadcasts from socket
sock.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
# Bind UDP socket to local port so we can receive pings
sock.bind(('',0)) # Was, PING_PORT_NUMBER, but sender can be any open port.
# Use timeout
sock.settimeout(PING_INTERVAL)
main_timeout = time.time() + TIMEOUT
responses = {}
while time.time() < main_timeout:
# Broadcast our beacon
if logger is not None:
logger.info("Pinging for Foscams {}:{}".format(mybcast,PING_PORT_NUMBER))
sock.sendto(SEARCH_REQUEST, 0, (mybcast, PING_PORT_NUMBER))
ping_timeout = time.time() + PING_INTERVAL
while time.time() < ping_timeout:
# Listen for a response with timeout
addr = None
try:
msg, (addr, uport) = sock.recvfrom(PING_MSG_SIZE)
# Someone answered our ping, store it.
if addr not in responses:
if logger is not None:
logger.info("Saving response from %s:%s" % (addr,uport))
responses[addr] = msg
except socket.timeout:
if logger is not None:
logger.debug("No more reponses")
sock.close()
if logger is not None:
logger.debug("All done looking")
for addr, msg in iter(responses.items()):
if logger is not None:
logger.debug("Response from: %s" % (addr))
if verbose:
logger.debug("msg=%s" % msg)
if msg == SEARCH_REQUEST:
if logger is not None:
logger.debug("ignore my echo")
elif len(msg) == 88 or len(msg) == 121 or len(msg) == 129:
if len(msg) == 88:
upk = unpack('>23s13s21s4I4b4b4bH?',msg)
(header, id, name, ip_i, mask_i, gateway_i, dns_i, r1, r2, r3, r4, s1, s2, s3, s4, a1, a2, a3, a4, port, dhcp) = upk
type = ""
mtype = "MJPEG"
elif len(msg) == 121:
# I can't find documentation for the last 19 and 14 bytes, but the 14 seems to
# be a string that indicates what type of camera A=HD and b=H.264
# I see this for my FI9828P V2
upk = unpack('>23s13s21s4I4b4b4bH?19s14s',msg)
(header, id, name, ip_i, mask_i, gateway_i, dns_i, r1, r2, r3, r4, s1, s2, s3, s4, a1, a2, a3, a4, port, dhcp, unknown, type) = upk
mtype = "HD2"
elif len(msg) == 129:
# And this has has another 8 bytes at the end? I see this on my FI9826P V2
upk = unpack('>23s13s21s4I4b4b4bH?19s14s8s',msg)
(header, id, name, ip_i, mask_i, gateway_i, dns_i, r1, r2, r3, r4, s1, s2, s3, s4, a1, a2, a3, a4, port, dhcp, unknown1, type, unknown2) = upk
mtype = "HD2"
if verbose and logger is not None:
logger.debug(upk)
#type = type.decode()
id = id.decode()
name = name.decode()
client = {
'type': type,#rstrip(b'\x00'),
'mtype': mtype,
'id': id.rstrip('\x00'),
'name': get_valid_node_name(name.rstrip('\x00')),
'ip': socket.inet_ntoa(pack('!I',ip_i)),
'port': port,
'mask': socket.inet_ntoa(pack('!I',mask_i)),
'gateway': socket.inet_ntoa(pack('!I',gateway_i)),
'dns': socket.inet_ntoa(pack('!I',dns_i)),
'reserve': "%d.%d.%d.%d" % (r1, r2, r3, r4),
'sys': "%d.%d.%d.%d" % (s1, s2, s3, s4),
'app': "%d.%d.%d.%d" % (a1, a2, a3, a4),
'dhcp': dhcp,
'reserve_a': (r1, r2, r3, r4),
'sys_a': (s1, s2, s3, s4),
'app_a': (a1, a2, a3, a4),
}
if logger is not None:
logger.info("Foscam Info: %s" % (client))
clients.append(client)
else:
if logger is not None:
logger.debug("Ignoring message of size " + str(len(msg)))
return clients
if __name__ == '__main__':
import logging
import sys
# Create our logger
logger = logging.getLogger('foscam_poll')
logger.setLevel(logging.DEBUG)
# create console handler
ch = logging.StreamHandler()
ch.setLevel(logging.DEBUG)
# create formatter and add it to the handlers
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
ch.setFormatter(formatter)
# add the handlers to the logger
logger.addHandler(ch)
verbose = False
if (len(sys.argv) > 1 and sys.argv[1] == "-v"):
verbose = True
foscam_poll(logger,verbose)
``` |
{
"source": "jimboca/udi-harmony-poly-V2",
"score": 3
} |
#### File: udi-harmony-poly-V2/harmony_hub_nodes/HarmonyController.py
```python
import sys
sys.path.insert(0,"pyharmony")
from polyinterface import Controller,LOG_HANDLER,LOGGER
import json,re,time,sys,os.path,yaml,logging,json,warnings,time
from traceback import format_exception
from copy import deepcopy
from threading import Thread
from harmony_hub_nodes import HarmonyHub
from harmony_hub_funcs import *
from write_profile import write_profile
class HarmonyController(Controller):
"""
The Controller Class is the primary node from an ISY perspective. It is a Superclass
of polyinterface.Node so all methods from polyinterface.Node are available to this
class as well.
Class Variables:
self.nodes: Dictionary of nodes. Includes the Controller node. Keys are the node addresses
self.name: String name of the node
self.address: String Address of Node, must be less than 14 characters (ISY limitation)
self.polyConfig: Full JSON config dictionary received from Polyglot.
self.added: Boolean Confirmed added to ISY as primary node
Class Methods (not including the Node methods):
start(): Once the NodeServer config is received from Polyglot this method is automatically called.
addNode(polyinterface.Node): Adds Node to self.nodes and polyglot/ISY. This is called for you
on the controller itself.
delNode(address): Deletes a Node from the self.nodes/polyglot and ISY. Address is the Node's Address
longPoll(): Runs every longPoll seconds (set initially in the server.json or default 10 seconds)
shortPoll(): Runs every shortPoll seconds (set initially in the server.json or default 30 seconds)
query(): Queries and reports ALL drivers for ALL nodes to the ISY.
runForever(): Easy way to run forever without maxing your CPU or doing some silly 'time.sleep' nonsense
this joins the underlying queue query thread and just waits for it to terminate
which never happens.
"""
def __init__(self, polyglot):
"""
Optional.
Super runs all the parent class necessities. You do NOT have
to override the __init__ method, but if you do, you MUST call super.
"""
LOGGER.info('HarmonyController: Initializing')
super(HarmonyController, self).__init__(polyglot)
self.name = 'HarmonyHub Controller'
self.address = 'harmonyctrl'
self.primary = self.address
# These start in threads cause they take a while
self.discover_thread = None
self.profile_thread = None
self.do_poll = False
self.lpfx = ""
self.hb = 0
def start(self):
"""
Optional.
Polyglot v2 Interface startup done. Here is where you start your integration.
This will run, once the NodeServer connects to Polyglot and gets it's config.
In this example I am calling a discovery method. While this is optional,
this is where you should start. No need to Super this method, the parent
version does nothing.
"""
self.removeNoticesAll()
serverdata = self.poly.get_server_data(check_profile=False)
LOGGER.info('Started HarmonyHub NodeServer {}'.format(serverdata['version']))
#polyinterface.LOGGER("start: This is {0} error {1}".format("an"))
self.l_info('start','Starting')
# Some are getting unclosed socket warnings from sleekxmpp when thread exits that I can't get rid if so ignore them.
warnings.filterwarnings("ignore", category=ResourceWarning, message="unclosed.*<socket.socket.*>")
# Show these for now
self.l_debug("start","GV4={0} GV8={1}".format(self.getDriver('GV4'),self.getDriver('GV8')))
self.set_debug_level(self.getDriver('GV4'))
# Set Profile Status as Up To Date, if it's status 6=ISY Reboot Required
val = self.getDriver('GV7')
if val is None or int(val) == 6 or int(val) == 0:
self.setDriver('GV7', 1)
# Short Poll
val = self.getDriver('GV5')
self.l_debug("start","shortPoll={0} GV5={1}".format(self.polyConfig['shortPoll'],val))
if val is None:
self.setDriver('GV5',self.polyConfig['shortPoll'])
elif (int(val) != 0):
self.polyConfig['shortPoll'] = int(val)
# Long Poll
val = self.getDriver('GV6')
self.l_debug("start","longPoll={0} GV6={1}".format(self.polyConfig['longPoll'],val))
if val is None:
self.setDriver('GV6',self.polyConfig['longPoll'])
elif (int(val) != 0):
self.polyConfig['longPoll'] = int(val)
# Activiy method
val = self.getDriver('GV9')
if val is None:
self.activity_method = 2 # The default
self.setDriver('GV9',self.activity_method)
else:
self.activity_method = int(val)
self.l_debug("start","GV9={0} activity_method={1}".format(val,self.activity_method))
# Initialize hubs
self.clear_hubs()
# Load em if we have em
self.load_hubs()
# Watch Mode
self.set_watch_mode(self.getDriver('GV10'))
# New vesions need to force an update
if not 'cver' in self.polyConfig['customData']:
self.polyConfig['customData']['cver'] = 1
self.l_debug("start","cver={0}".format(self.polyConfig['customData']['cver']))
if int(self.polyConfig['customData']['cver']) < 2:
self.l_debug("start","updating myself since cver {0} < 2".format(self.polyConfig['customData']['cver']))
# Force an update.
self.addNode(self,update=True)
self.polyConfig['customData']['cver'] = 3
self.saveCustomData(self.polyConfig['customData'])
#
# Add Hubs from the config
#
self._set_num_hubs(0)
self.first_run = False
#self.l_debug("start","nodes={}".format(self.polyConfig['nodes']))
if self.polyConfig['nodes']:
self.l_info("start","Adding known hubs...")
# Load the config info about the hubs.
self.load_config()
# Load the hub info.
self.load_hubs()
if self.hubs is False:
self.l_error("start","No hubs loaded, need to discover?")
return
# Build/Update profile if necessary
serverdata = self.poly.check_profile(serverdata,build_profile=self._update_profile)
# Restore known hubs from the poly config nodes
self.add_hubs()
else:
# No nodes exist, that means this is the first time we have been run
# after install, so do a discover
self.l_info("start","First run, will start discover...")
self.first_run = True
self.discover()
self.l_info("start","done")
# TODO: Is it ok to reference nodesAdding?
def allNodesAdded(self):
LOGGER.debug('nodesAdding: %d', len(self.nodesAdding))
return True if len(self.nodesAdding) > 0 else False
def canPoll(self):
if not self.allNodesAdded:
LOGGER.debug('Waiting for all nodes to be added...')
return False
if self.discover_thread is not None:
if self.discover_thread.is_alive():
LOGGER.debug('discover thread still running...')
return False
else:
LOGGER.debug('discover thread is done...')
self.discover_thread = None
if self.profile_thread is not None:
if self.profile_thread.is_alive():
LOGGER.debug('profile thread still running...')
return False
else:
LOGGER.debug('profile thread is done...')
self.profile_thread = None
return True
def shortPoll(self):
#self.l_debug('shortPoll','...')
if not self.canPoll():
return False
for node in self.nodes:
if self.nodes[node].do_poll:
self.nodes[node].shortPoll()
def longPoll(self):
#self.l_debug('longpoll','...')
if not self.canPoll():
return False
for node in self.nodes:
if self.nodes[node].do_poll:
self.nodes[node].longPoll()
self.heartbeat()
def query(self):
self.l_debug('query','...')
if not self.canPoll():
return False
self.reportDrivers()
for node in self.nodes:
if self.nodes[node].do_poll:
self.nodes[node].query()
def discover(self):
"""
Start the discover in a thread so we don't cause timeouts :(
"""
self.discover_thread = Thread(target=self._discover)
self.discover_thread.start()
def heartbeat(self):
self.l_info('heartbeat','hb={}'.format(self.hb))
if self.hb == 0:
self.reportCmd("DON",2)
self.hb = 1
else:
self.reportCmd("DOF",2)
self.hb = 0
def _discover(self):
# Clear the hubs now so we clear some that may have been improperly added.
self.clear_hubs()
# Set all hubs to not found
for hub in self.hubs:
hub['found'] = False
#
# Look for the hubs...
#
self.setDriver('GV7', 2)
auto_discover = self.getDriver('GV8')
discover_result = None
if auto_discover is None:
auto_discover = 1
else:
auto_discover = int(auto_discover)
if (auto_discover == 0):
self.l_info('discover','harmony_discover: skipping since auto discover={0}...'.format(auto_discover))
discover_result = list()
else:
self.l_info('discover','harmony_discover: starting...')
sys.path.insert(0,"pyharmony")
from pyharmony import discovery as harmony_discovery
harmony_discovery.logger = LOGGER
try:
discover_result = harmony_discovery.discover(scan_attempts=10,scan_interval=1)
except (OSError) as err:
self.setDriver('GV7', 9)
self.l_error('discover','pyharmony discover failed. May need to restart this nodeserver: {}'.format(err), exc_info=True)
self.l_info('discover','harmony_discover: {0}'.format(discover_result))
#
# Add the nodes
#
self.setDriver('GV7', 3)
#
# First from customParams.
#
for param in self.polyConfig['customParams']:
# Look for customParam starting with hub_
match = re.match( "hub_(.*)", param, re.I)
self.l_info('discover','param={} match={}'.format(param,match))
if match is not None:
# The hub address is everything following the hub_
address = match.group(1)
self.l_info('discover','process param={0} address={1}'.format(param,address))
# Get the customParam value which is json code
# { "name": "HarmonyHub FamilyRoom", "host": "192.168.1.86" }
cfg = self.polyConfig['customParams'][param]
cfgd = None
try:
cfgd = json.loads(cfg)
except:
err = sys.exc_info()[0]
self.l_error('discover','failed to parse cfg={0} Error: {1}'.format(cfg,err))
if cfgd is not None:
# Check that name and host are defined.
addit = True
if not 'name' in cfgd:
self.l_error('discover','No name in customParam {0} value={1}'.format(param,cfg))
addit = False
if not 'host' in cfgd:
self.l_error('discover','No host in customParam {0} value={1}'.format(param,cfg))
addit = False
if addit:
hub_name = get_valid_node_name(cfgd['name'])
hub_hash = {'address': address, 'name': hub_name, 'host': cfgd['host'], 'port': 5222, 'found': True, 'custom': True}
index = next((idx for (idx, hub) in enumerate(self.hubs) if hub['name'] == hub_name), None)
if index is None:
self.hubs.append(hub_hash)
else:
self.hubs[index] = hub_hash
#
# Next the discovered ones
#
tst = time.strftime("%m%d%Y-%H%M%S")
ust = 'uuid-save-%s' % (tst)
if discover_result is not None:
LOGGER.debug("hubs.list=%s",self.hubs)
for config in discover_result:
LOGGER.debug("hub config: %s",config)
addit = True
if 'current_fw_version' in config:
if config['current_fw_version'] == '4.15.206':
self.l_error('discover','current_fw_version={} which is not supported. See: {}'.
format(
config['current_fw_version'],
'https://community.logitech.com/s/question/0D55A00008D4bZ4SAJ/harmony-hub-firmware-update-fixes-vulnerabilities'
))
addit = False
else:
self.l_error('discover','current_fw_version not in config? Will try to use anyway {}'.format(config))
if addit:
# See if the hub is already in the list.
hub_address = 'h'+id_to_address(config['uuid'],13)
hub_name = get_valid_node_name(config['friendlyName'])
index = next((idx for (idx, hub) in enumerate(self.hubs) if hub['name'] == hub_name), None)
LOGGER.debug('found index=%s',index)
if index is None:
# Not seen, or is a different name
hub_hash = {
'address': hub_address,
'name': hub_name,
}
self.hubs.append(hub_hash)
else:
# Keep the same address for this hub name.
hub_hash = self.hubs[index]
if 'uuid' in hub_hash:
if hub_hash['uuid'] != config['uuid']:
LOGGER.warning("Seems that hub '%s' uuid changed from '%s' to '%s' will continue using old address %s",hub_name,hub_hash['uuid'],config['uuid'],hub_address)
hub_hash[ust] = hub_hash['uuid']
# These always use the latest data.
hub_hash['date_time'] = tst
hub_hash['host'] = config['ip']
hub_hash['port'] = config['port']
hub_hash['found'] = True
hub_hash['save'] = True
hub_hash['uuid'] = config['uuid']
#
# Write warnings about previously known Hubs
#
for hub in self.hubs:
if not 'found' in hub or not hub['found']:
LOGGER.warning("Previously known hub '%s' did not respond to discover",hub['name'])
self.save_hubs()
#
# Build the profile
# It needs the hub_list set, so we will reset it later.
if self._build_profile():
#
# Now really add them.
self.add_hubs()
# Check on the purge
self.purge(do_delete=False)
def add_hub(self,address,name,host,port,discover=False):
self.l_debug("add_hub","address={0} name='{1}' host={2} port={3}".format(address,name,host,port))
self.addNode(HarmonyHub(self, address, name, host, port, watch=self.watch_mode, discover=discover))
def add_hubs(self):
self._set_num_hubs(0)
for hub in self.hubs:
if not 'found' in hub or hub['found']:
self.add_hub(hub['address'], hub['name'], hub['host'], hub['port'])
self._set_num_hubs(self.num_hubs + 1)
"""
This pulls in the save hub data. Old versions stored this in the
customParams, but since we need it available from install.sh we
switched to using a local file.
"""
def load_hubs(self):
# Hack... if customParams has clear_hubs=1 then just clear them :(
# This is the only way to clear a bad IP address until my changes to pyharmony are accepted.
cdata = self.polyConfig['customParams']
param_name = 'clear_hubs'
if param_name in self.polyConfig['customParams'] and int(self.polyConfig['customParams'][param_name]) == 1:
self.l_info("load_hubs","Clearing known hubs, you will need to run discover again since customParam {0} = {1}".format(param_name,self.polyConfig['customParams'][param_name]))
self.clear_hubs()
self.hubs = list()
else:
# If hubs exists in the customData, convert to .hubs list and save the json
if 'hubs' in self.polyConfig['customData']:
# Turn customData hubs hash into a list...
self.l_info("load_hubs","Converting hubs from Polyglot DB to local file for {0}".format(self.polyConfig['customData']))
# From: self.polyConfig['customData']['hubs'][address] = {'name': name, 'host': host, 'port': port}
for address in self.polyConfig['customData']['hubs']:
hub_c = deepcopy(self.polyConfig['customData']['hubs'][address])
hub_c['address'] = address
self.hubs.append(hub_c)
# Save the new json
if self.save_hubs():
del self.polyConfig['customData']['hubs']
self.saveCustomData(self.polyConfig['customData'])
if 'hubs' in self.polyConfig['customData']:
# WTF, it wasn't deleted?
self.l_error("load_hubs","customData['hubs'] was not deleted? {0}".format(self.polyConfig))
else:
self.l_info("load_hubs","customData['hubs'] was deleted".format(self.polyConfig))
# Need to generate new profile
self.l_info("load_hubs","Building profile since data was migrated to external file.")
self.build_profile()
else:
self.hubs = load_hubs_file(LOGGER)
if not self.hubs:
self.hubs = list()
# Temp test to put them back...
#hdata = dict()
#for hub in self.hubs:
# hdata[hub['address']] = hub
#self.polyConfig['customData']['hubs'] = hdata
#self.saveCustomData(self.polyConfig['customData'])
#self.l_info("load_hubs","Force adding back customData['hubs'] {0}".format(self.polyConfig))
# Always clear it so the default value shows for the user.
self.addCustomParam({param_name: 0})
def save_hubs(self):
return save_hubs_file(LOGGER,self.hubs)
def clear_hubs(self):
# Clear how many hubs we manage
self._set_num_hubs(0)
def load_config(self):
self.harmony_config = load_config_file(LOGGER)
def delete(self):
"""
Example
This is sent by Polyglot upon deletion of the NodeServer. If the process is
co-resident and controlled by Polyglot, it will be terminiated within 5 seconds
of receiving this message.
"""
self.l_info('delete','Oh God I\'m being deleted. Nooooooooooooooooooooooooooooooooooooooooo.')
def _set_num_hubs(self, value):
self.num_hubs = value
self.l_info("_set_num_hubs","{}".format(self.num_hubs))
self.setDriver('GV3', self.num_hubs)
return True
def purge(self,do_delete=False):
LOGGER.info("%s starting do_delete=%s",self.lpfx,do_delete)
self.removeNoticesAll()
#LOGGER.debug("%s config=",self.lpfx,config)
#
# Check for removed activities or devices
#
# This can change while we are checking if another hub is being added...
#LOGGER.debug("%s",self.controller.poly.config)
# These are all the nodes from the config, not the real nodes we added...
nodes = self.controller.poly.config['nodes'].copy()
# Pattern match hub address s
pch = re.compile('h([a-f0-9]+)$')
# Pattern match activity and device addresses
pcad = re.compile('(.)(\d+)$')
activities = self.harmony_config['info']['activities']
devices = self.harmony_config['info']['devices']
msg_pfx = "Deleting" if do_delete else "Want to delete"
delete_cnt = 0
# Check if we still have them.
for node in nodes:
address = node['address']
if address != self.address:
#LOGGER.info("%s Checking Node: %s",self.lpfx,node)
LOGGER.info("%s Checking Node: %s",self.lpfx,address)
match = pch.match(address)
LOGGER.debug(" Match Hub: %s", match)
if match:
id = match.group(1)
#LOGGER.debug("Got: %s %s", type,match)
LOGGER.debug('%s Check if Hub %s "%s" id=%s still exists',self.lpfx,address,node['name'],id)
ret = next((d for d in self.hubs if d['address'] == address), None)
LOGGER.debug('%s Got: %s',self.lpfx,ret)
if ret is None:
delete_cnt += 1
msg = '%s Hub that is no longer found %s "%s"' % (msg_pfx,address,node['name'])
LOGGER.warning('%s %s',self.lpfx,msg)
self.addNotice(msg)
if do_delete:
self.controller.poly.delNode(address)
else:
match = pcad.match(address)
LOGGER.debug(" Match AD: %s", match)
if match:
type = match.group(1)
id = int(match.group(2))
LOGGER.debug(" np: %s", node['primary'])
if node['primary'] in self.nodes:
pname = self.nodes[node['primary']].name
else:
pname = node['primary']
#LOGGER.debug("Got: %s %s", type,match)
if type == 'a':
LOGGER.debug('%s Check if Activity %s "%s" id=%s still exists',self.lpfx,address,node['name'],id)
item = next((d for d in activities if int(d['id']) == id), None)
LOGGER.debug('%s Got: %s',self.lpfx,item)
if item is None or item['cnt'] == 0:
delete_cnt += 1
msg = '%s Activity for "%s" that is no longer used %s "%s"' % (msg_pfx,pname,address,node['name'])
LOGGER.warning('%s %s',self.lpfx,msg)
self.addNotice(msg)
if do_delete:
self.controller.poly.delNode(address)
elif type == 'd':
LOGGER.debug('%s Check if Device %s "%s" id=%s still exists',self.lpfx,address,node['name'],id)
item = next((d for d in devices if int(d['id']) == id), None)
LOGGER.debug('%s Got: %s',self.lpfx,item)
if item is None or item['cnt'] == 0:
delete_cnt += 1
msg = '%s Device for "%s" that is no longer used %s "%s"' % (msg_pfx,pname,address,node['name'])
LOGGER.warning('%s %s',self.lpfx,msg)
self.addNotice(msg)
if do_delete:
self.controller.poly.delNode(address)
else:
LOGGER.warning('%s Unknown type "%s" "%s" id=%s still exists',self.lpfx,type,address,node['name'])
if delete_cnt > 0 and not do_delete:
self.addNotice("Please run 'Purge Execute' on %s in Admin Console" % self.name)
LOGGER.info("%s done",self.lpfx)
self.purge_run = True
def l_info(self, name, string):
LOGGER.info("%s:%s: %s" % (self.id,name,string))
def l_error(self, name, string, exc_info=False):
LOGGER.error("%s:%s: %s" % (self.id,name,string), exc_info=exc_info)
def l_warning(self, name, string):
LOGGER.warning("%s:%s: %s" % (self.id,name,string))
def l_debug(self, name, string):
LOGGER.debug("%s:%s: %s" % (self.id,name,string))
# Just calls build_profile with poll_hubs=False
def update_profile(self):
self.build_profile(False)
def build_profile(self,poll_hubs=True):
"""
Start the build_profile in a thread so we don't cause timeouts :(
"""
if poll_hubs:
self.profile_thread = Thread(target=self._build_profile)
else:
self.profile_thread = Thread(target=self._update_profile)
self.profile_thread.start()
def _build_profile(self):
"""
Build the profile by polling the hubs
"""
self.setDriver('GV7', 4)
# This writes all the profile data files and returns our config info.
wrote_profile = False
try:
config_data = write_profile(LOGGER,self.hubs)
wrote_profile = True
except (Exception) as err:
self.l_error('build_profile','write_profile failed: {}'.format(err), exc_info=True)
self.setDriver('GV7', 7)
# Reload the config we just generated.
self.load_config()
#
# Upload the profile
#
st = self.install_profile()
#
# Restart the hubs since the config data files may have changed.
#
if not self.first_run:
self.restart_hubs()
return st
def restart_hubs(self):
self.l_debug('restart_hubs','restarting hubs')
for hub in self.hubs:
address = hub['address']
if address in self.nodes:
self.nodes[address].restart()
else:
self.l_debug('restart_hubs','hub {} does not seem to exist yet'.format(address))
def _update_profile(self):
"""
Build the profile from the previously saved info
"""
self.setDriver('GV7', 4)
# This writes all the profile data files and returns our config info.
try:
config_data = write_profile(LOGGER,self.hubs,False)
except (Exception) as err:
self.l_error('build_profile','write_profile failed: {}'.format(err), exc_info=True)
self.setDriver('GV7', 7)
# Reload the config we just generated, it shouldn't update, but it might.
self.load_config()
# Upload the profile
st = self.install_profile()
return st
def install_profile(self):
self.setDriver('GV7', 5)
try:
self.poly.installprofile()
except:
err = sys.exc_info()[0]
self.setDriver('GV7', 8)
self.l_error('discovery','Install Profile Error: {}'.format(err))
return False
# Now a reboot is required
# TODO: This doesn't really mean it was complete, a response is needed from polyglot,
# TODO: which is on the enhancement list.
self.setDriver('GV7', 6)
return True
def set_all_logs(self,level):
LOGGER.setLevel(level)
logging.getLogger('sleekxmpp').setLevel(logging.ERROR)
logging.getLogger('requests').setLevel(level)
logging.getLogger('urllib3').setLevel(level)
logging.getLogger('pyharmony').setLevel(level)
def set_debug_level(self,level):
# First run will be None, so default is all
if level is None:
level = 0
else:
level = int(level)
self.setDriver('GV4', level)
# 0=All 10=Debug are the same because 0 (NOTSET) doesn't show everything.
if level == 0 or level == 10:
self.set_all_logs(logging.DEBUG)
elif level == 20:
self.set_all_logs(logging.INFO)
elif level == 30:
self.set_all_logs(logging.WARNING)
elif level == 40:
self.set_all_logs(logging.ERROR)
elif level == 50:
self.set_all_logs(logging.CRITICAL)
else:
self.l_error("set_debug_level","Unknown level {0}".format(level))
def set_watch_mode(self,val):
if val is None:
self.l_debug("set_watch_mode","{0}".format(val))
val = 1
self.watch_mode = True if int(val) == 1 else False
self.l_debug("set_watch_mode","{0}={1}".format(val,self.watch_mode))
for hub in self.hubs:
address = hub['address']
if address in self.nodes:
self.nodes[address].set_watch(self.watch_mode)
self.setDriver('GV10',val)
def _cmd_discover(self, command):
self.discover()
def _cmd_purge_check(self,command):
self.l_info("_cmd_purge","building...")
self.purge(do_delete=False)
def _cmd_purge_execute(self,command):
self.l_info("_cmd_purge","building...")
self.purge(do_delete=True)
def _cmd_build_profile(self,command):
self.l_info("_cmd_build_profile","building...")
self.build_profile()
def _cmd_install_profile(self,command):
self.l_info("_cmd_install_profile","installing...")
self.poly.installprofile()
def _cmd_update_profile(self,command):
self.l_info("_cmd_update_profile","...")
self.update_profile()
def _cmd_set_debug_mode(self,command):
val = int(command.get('value'))
self.l_info("_cmd_set_debug_mode",val)
self.set_debug_level(val)
def _cmd_set_discover_mode(self,command):
val = int(command.get('value'))
self.l_info("_cmd_set_discover_mode",val)
self.setDriver('GV8', val)
def _cmd_set_activity_method(self,command):
val = int(command.get('value'))
self.l_info("_cmd_set_activity_method",val)
self.setDriver('GV9', val)
self.activity_method = val # The default
def _cmd_set_shortpoll(self,command):
val = int(command.get('value'))
self.l_info("_cmd_set_short_poll",val)
self.setDriver('GV5', val)
self.polyConfig['shortPoll'] = val
def _cmd_set_longpoll(self,command):
val = int(command.get('value'))
self.l_info("_cmd_set_log_poll",val)
self.setDriver('GV6', val)
self.polyConfig['longPoll'] = val
def _cmd_set_watch_mode(self,command):
val = int(command.get('value'))
self.set_watch_mode(val)
id = 'HarmonyController'
"""
Commands:
"""
commands = {
'QUERY': query,
'DISCOVER': _cmd_discover,
'BUILD_PROFILE': _cmd_build_profile,
'PURGE_CHECK': _cmd_purge_check,
'PURGE_EXECUTE': _cmd_purge_execute,
'INSTALL_PROFILE': _cmd_install_profile,
'UPDATE_PROFILE': _cmd_update_profile,
'SET_DEBUGMODE': _cmd_set_debug_mode,
'SET_SHORTPOLL': _cmd_set_shortpoll,
'SET_LONGPOLL': _cmd_set_longpoll,
'SET_DI_MODE': _cmd_set_discover_mode,
'SET_ACTIVITY_METHOD': _cmd_set_activity_method,
'SET_WATCH_MODE': _cmd_set_watch_mode
}
"""
Driver Details:
"""
drivers = [
{'driver': 'ST', 'value': 1, 'uom': 2}, # bool: Connection status (managed by polyglot)
# No longer used.
#{'driver': 'GV1', 'value': 0, 'uom': 56}, # float: Version of this code (Major)
#{'driver': 'GV2', 'value': 0, 'uom': 56}, # float: Version of this code (Minor)
{'driver': 'GV3', 'value': 0, 'uom': 25}, # integer: Number of the number of hubs we manage
{'driver': 'GV4', 'value': 0, 'uom': 25}, # integer: Log/Debug Mode
{'driver': 'GV5', 'value': 5, 'uom': 25}, # integer: shortpoll
{'driver': 'GV6', 'value': 60, 'uom': 25}, # integer: longpoll
{'driver': 'GV7', 'value': 0, 'uom': 25}, # bool: Profile status
{'driver': 'GV8', 'value': 1, 'uom': 25}, # bool: Auto Discover
{'driver': 'GV9', 'value': 2, 'uom': 25}, # bool: Activity Method
{'driver': 'GV10', 'value': 2, 'uom': 2} # bool: Activity Method
]
``` |
{
"source": "jimboca/udi-poly-Camect-V2",
"score": 2
} |
#### File: udi-poly-Camect-V2/nodes/DetectedObject.py
```python
from nodes.BaseNode import BaseNode
from polyinterface import LOGGER
from nodes import BaseNode
from node_funcs import id_to_address,get_valid_node_name
from const import DETECTED_OBJECT_MAP
class DetectedObject(BaseNode):
id = 'objdet' # Placeholder, gets overwritten in __init__
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 2}, # Enabled
]
def __init__(self, controller, primary, otype):
self.id = otype
self.map = DETECTED_OBJECT_MAP[otype]
LOGGER.debug(f"Adding DetectedObject {otype} for {primary.address}:{primary.name}")
address = f'{primary.address}_{otype}'[:14]
name = f'{primary.name} {otype}'
super(DetectedObject, self).__init__(controller, primary.address, address, name)
self.dname_to_driver = {}
self.lpfx = '%s:%s' % (self.address,self.name)
for obj_name in self.map:
dv = 'GV' + str(self.map[obj_name]['num'])
self.drivers.append({'driver': dv, 'value': 0, 'uom': 2})
# Hash of my detected objects to the driver
self.dname_to_driver[obj_name] = dv
def start(self):
LOGGER.debug(f'{self.lpfx}')
self.set_driver('ST',0)
for dn in self.dname_to_driver:
self.set_driver(self.dname_to_driver[dn], 0)
def shortPoll(self):
pass
def longPoll(self):
pass
def clear(self):
if int(self.get_driver('ST')) == 1:
LOGGER.debug(f'{self.lpfx}')
self.reportCmd("DOF",2)
for obj in self.dname_to_driver:
self.set_driver(self.dname_to_driver[obj], 0)
# This is called by parent when object is detected
def turn_on(self,obj):
LOGGER.debug(f"{self.lpfx}")
self.reportCmd("DON",2)
self.set_driver(self.dname_to_driver[obj],1)
# This is called by parent when object is no longer detected
def turn_off(self,obj):
LOGGER.debug(f"{self.lpfx}")
self.reportCmd("DOF",2)
self.set_driver(self.dname_to_driver[obj],0)
def cmd_on(self, command=None):
LOGGER.debug(f"{self.lpfx} command={command} ST={self.get_driver('ST')}")
self.set_driver('ST', 1)
def cmd_off(self, command=None):
LOGGER.debug(f"{self.lpfx} command={command} ST={self.get_driver('ST')}")
self.set_driver('ST', 1)
def query(self,command=None):
LOGGER.debug(f'{self.lpfx}')
self.reportDrivers()
hint = [1,2,3,4]
commands = {
'DON': cmd_on,
'DOF': cmd_off,
}
``` |
{
"source": "jimboca/udi-poly-ecobee",
"score": 2
} |
#### File: udi-poly-ecobee/nodes/Controller.py
```python
from asyncio import format_helpers
from udi_interface import Node,LOGGER,Custom,LOG_HANDLER
import sys
import json
import time
import http.client
import urllib.parse
from datetime import datetime
import os
import os.path
import re
import logging
from copy import deepcopy
from pgSession import pgSession
from nodes import Thermostat
from node_funcs import *
ECOBEE_API_URL = 'api.ecobee.com'
class Controller(Node):
def __init__(self, poly, primary, address, name):
super(Controller, self).__init__(poly, primary, address, name)
self.name = 'Ecobee Controller'
self.tokenData = {}
self.msgi = {}
self.in_discover = False
self.discover_st = False
self.refreshingTokens = False
self.pinRun = False
self._last_dtns = False
self.hb = 0
self.ready = False
self.waiting_on_tokens = False
self.use_oauth = False
self.api_key = None
self.api_key_param = None
self.n_queue = []
self.debug_level = 0
#
self.handler_config_st = None
self.handler_config_done_st = None
self.handler_params_st = None
self.handler_nsdata_st = None
self.handler_data_st = None
self.Notices = Custom(poly, 'notices')
self.Data = Custom(poly, 'customdata')
self.Params = Custom(poly, 'customparams')
self.Notices = Custom(poly, 'notices')
#self.TypedParameters = Custom(poly, 'customtypedparams')
#self.TypedData = Custom(poly, 'customtypeddata')
poly.subscribe(poly.START, self.handler_start, address)
poly.subscribe(poly.CONFIG, self.handler_config)
poly.subscribe(poly.POLL, self.handler_poll)
poly.subscribe(poly.DISCOVER, self.discover)
poly.subscribe(poly.STOP, self.handler_stop)
poly.subscribe(poly.CUSTOMDATA, self.handler_data)
poly.subscribe(poly.CUSTOMPARAMS, self.handler_params)
poly.subscribe(poly.CUSTOMNS, self.handler_nsdata)
#poly.subscribe(poly.CUSTOMTYPEDPARAMS, self.handler_typed_params)
#poly.subscribe(poly.CUSTOMTYPEDDATA, self.handler_typed_data)
poly.subscribe(poly.LOGLEVEL, self.handler_log_level)
poly.subscribe(poly.CONFIGDONE, self.handler_config_done)
poly.subscribe(poly.ADDNODEDONE, self.node_queue)
poly.ready()
poly.addNode(self, conn_status="ST")
'''
node_queue() and wait_for_node_event() create a simple way to wait
for a node to be created. The nodeAdd() API call is asynchronous and
will return before the node is fully created. Using this, we can wait
until it is fully created before we try to use it.
'''
def node_queue(self, data):
self.n_queue.append(data['address'])
def wait_for_node_done(self):
while len(self.n_queue) == 0:
time.sleep(0.1)
self.n_queue.pop()
def add_node(self,node):
anode = self.poly.addNode(node)
LOGGER.debug(f'got {anode}')
self.wait_for_node_done()
if anode is None:
LOGGER.error('Failed to add node address')
return anode
def handler_start(self):
self.Notices.clear()
#serverdata = self.poly.get_server_data(check_profile=False)
LOGGER.info(f"Started Ecobee NodeServer {self.poly.serverdata['version']}")
self.heartbeat()
#
# Wait for all handlers to finish
#
cnt = 10
while ((self.handler_config_done_st is None or self.handler_params_st is None
or self.handler_nsdata_st is None or self.handler_data_st is None)
or self.handler_config_st is None and cnt > 0):
LOGGER.warning(f'Waiting for all to be loaded config={self.handler_config_st} config_done={self.handler_config_done_st} params={self.handler_params_st} data={self.handler_data_st} nsdata={self.handler_nsdata_st}... cnt={cnt}')
time.sleep(1)
cnt -= 1
if cnt == 0:
LOGGER.error("Timed out waiting for handlers to startup")
self.exit()
#
# Force to false, and successful communication will fix it
self.set_ecobee_st(False)
#
# Start the session
#
self.get_session()
#
# Cloud uses OAuth, local users PIN
#
self.pg_test = False
if self.use_oauth:
self.grant_type = 'authorization_code'
self.api_key = self.serverdata['api_key']
# TODO: Need a better way to tell if we are on pgtest!
# "logBucket": "pgc-test-logbucket-19y0vctj4zlk5",
if self.poly.stage == 'test':
self.pg_test = True
LOGGER.warning("Looks like we are running on to pgtest")
self.redirect_url = 'https://pgtest.isy.io/api/oauth/callback'
else:
LOGGER.warning("Looks like we are running on to pgc")
self.redirect_url = 'https://polyglot.isy.io/api/oauth/callback'
else:
self.grant_type = 'ecobeePin'
self.redirect_url = None
#
# Discover
#
self.ready = True
self.discover()
LOGGER.debug('done')
def handler_config(self, cfg_data):
LOGGER.info(f'cfg_data={cfg_data}')
self.cfg_longPoll = int(cfg_data['longPoll'])
self.handler_config_st = True
def handler_config_done(self):
LOGGER.info('enter')
self.poly.addLogLevel('DEBUG_SESSION',9,'Debug + Session')
self.poly.addLogLevel('DEBUG_SESSION_VERBOSE',8,'Debug + Session Verbose')
self.handler_config_done_st = True
LOGGER.info('exit')
def handler_poll(self, polltype):
if polltype == 'longPoll':
self.longPoll()
elif polltype == 'shortPoll':
self.shortPoll()
def shortPoll(self):
if not self.ready:
LOGGER.debug("{}:shortPoll: not run, not ready...".format(self.address))
return False
if self.in_discover:
LOGGER.debug("{}:shortPoll: Skipping since discover is still running".format(self.address))
return
if self.waiting_on_tokens is False:
LOGGER.debug("Nothing to do...")
return
elif self.waiting_on_tokens == "OAuth":
LOGGER.debug("{}:shortPoll: Waiting for user to authorize...".format(self.address))
else:
# Must be waiting on our PIN Authorization
LOGGER.debug("{}:shortPoll: Try to get tokens...".format(self.address))
if self._getTokens(self.waiting_on_tokens):
self.Notices.clear()
LOGGER.info("shortPoll: Calling discover now that we have authorization...")
self.discover()
def longPoll(self):
# Call discovery if it failed on startup
LOGGER.debug("{}:longPoll".format(self.address))
self.heartbeat()
if not self.ready:
LOGGER.debug("{}:longPoll: not run, not ready...".format(self.address))
return False
if self.waiting_on_tokens is not False:
LOGGER.debug("{}:longPoll: not run, waiting for user to authorize...".format(self.address))
return False
if self.in_discover:
LOGGER.debug("{}:longPoll: Skipping since discover is still running".format(self.address))
return
if self.discover_st is False:
LOGGER.info("longPoll: Calling discover...")
self.discover()
self.updateThermostats()
def heartbeat(self):
LOGGER.debug('heartbeat hb={}'.format(self.hb))
if self.hb == 0:
self.reportCmd("DON",2)
self.hb = 1
else:
self.reportCmd("DOF",2)
self.hb = 0
# sends a stop command for the nodeserver to Polyglot
def exit(self):
LOGGER.info('Asking Polyglot to stop me.')
self.poly.stop()
def delete(self):
LOGGER.warning("Nodeserver is being deleted...")
# Ecobee delete tokens not working, need info from Ecobee
#if self.ecobeeDelete():
# self.tokenData = {}
def handler_log_level(self,level):
LOGGER.info(f'enter: level={level}')
if level['level'] < 10:
LOGGER.info("Setting basic config to DEBUG...")
LOG_HANDLER.set_basic_config(True,logging.DEBUG)
# 9 & 8 incrase pgsession debug level
if level == 9:
self.debug_level = 1
elif level == 8:
self.debug_level = 2
else:
LOGGER.info("Setting basic config to WARNING...")
LOG_HANDLER.set_basic_config(True,logging.WARNING)
#logging.getLogger("elkm1_lib.elk").setLevel(slevel)
LOGGER.info(f'exit: level={level}')
def handler_nsdata(self, key, data):
LOGGER.debug(f"key={key} data={data}")
if key != "nsdata":
LOGGER.info(f"Ignoring key={key} data={data}")
return
if data is None:
LOGGER.warning(f"No NSDATA... Must be running locally key={key} data={data}")
self.handler_nsdata_st = False
return
if 'nsdata' in key:
LOGGER.info('Got nsdata update {}'.format(data))
# Temporary, should be fixed in next version of PG3
if data is None:
msg = "No NSDATA Returned by Polyglot"
LOGGER.error(msg)
self.Notices['nsdata'] = msg
self.handler_nsdata_st = False
return
self.Notices.delete('nsdata')
try:
#jdata = json.loads(data)
if self.use_oauth:
self.api_key = data['api_key_oauth']
else:
self.api_key = data['api_key_pin']
except:
LOGGER.error(f'failed to parse nsdata={data}',exc_info=True)
self.handler_nsdata_st = False
return
self.handler_nsdata_st = True
def handler_data(self,data):
LOGGER.debug(f'enter: Loading data {data}')
if data is None:
LOGGER.warning("No custom data, must be firt run or never authorized")
self.handler_data_st = False
return
self.Data.load(data)
if 'tokenData' in data:
self.tokenData = data['tokenData']
self.handler_data_st = True
def handler_params(self,params):
LOGGER.debug(f'enter: Loading params {params}')
self.Params.load(params)
"""
Check all user params are available and valid
"""
# Assume it's good unless it's not
st = True
#
# In local install must manually supply api_key_pin to test.
#
if 'api_key' in self.Params.keys():
if self.api_key_param != self.Params['api_key']:
self.api_key_param = self.Params['api_key']
self.api_key = self.api_key_param
LOGGER.info(f'Got api_key from user params {self.api_key_param}')
if self.handler_params_st is not None:
# User changed pin, do authorize
self.authorize("New user pin detected, will re-authorize...")
self.handler_params_st = st
LOGGER.debug(f'exit: st={st}')
def get_session(self):
self.session = pgSession(self,self.name,LOGGER,ECOBEE_API_URL,debug_level=self.debug_level)
def authorized(self):
if 'access_token' in self.tokenData:
st = True
else:
st = False
LOGGER.debug(f'exit: st={st}')
return st
def authorize(self,message):
if self.api_key is None:
msg = "api_key is not defined, must be running local version or there was an error retreiving it from PG3? Must fix or add custom param for local"
LOGGER.error(msg)
self.Notices['authorize'] = msg
return
self.Notices['authorize'] = message
if self.use_oauth is True:
self._getOAuth()
else:
self._getPin()
def _reAuth(self, reason):
# Need to re-auth!
if self.tokenData is None or not 'access_toekn' in self.tokenData:
LOGGER.error(f'No existing tokenData in Data: {self.tokenData}')
# Save the old token for debug
self.Data['tokenData_old'] = self.tokenData
self.tokenData = {}
self.authorize(f"Must Re-Authorize because {reason}")
def _getPin(self):
# Ask Ecobee for our Pin and present it to the user in a notice
res = self.session_get('authorize',
{
'response_type': 'ecobeePin',
'client_id': self.api_key,
'scope': 'smartWrite'
})
if res is False:
self.refreshingTokens = False
return False
res_data = res['data']
res_code = res['code']
if 'ecobeePin' in res_data:
msg = 'Please <a target="_blank" href="https://www.ecobee.com/consumerportal/">Signin to your Ecobee account</a>. Click on Profile > My Apps > Add Application and enter PIN: <b>{}</b> You have 10 minutes to complete this. The NodeServer will check every 60 seconds.'.format(res_data['ecobeePin'])
LOGGER.info(f'_getPin: {msg}')
self.Notices[f'getPin'] = msg
# This will tell shortPoll to check for PIN
self.waiting_on_tokens = res_data
else:
msg = f'ecobeePin Failed code={res_code}: {res_data}'
self.Notices['getPin'] = msg
def _getOAuthInit(self):
"""
See if we have the oauth data stored already
"""
sdata = {}
if self.use_oauth:
error = False
if 'clientId' in self.poly.init['oauth']:
sdata['api_client'] = self.poly.init['oauth']['clientId']
else:
LOGGER.warning('Unable to find Client ID in the init oauth data: {}'.format(self.poly.init['oauth']))
error = True
if 'clientSecret' in self.poly.init['oauth']:
sdata['api_key'] = self.poly.init['oauth']['clientSecret']
else:
LOGGER.warning('Unable to find Client Secret in the init oauth data: {}'.format(self.poly.init['oauth']))
error = True
if error:
return False
return sdata
def _getOAuth(self):
# Do we have it?
sdata = self._getOAuthInit()
LOGGER.debug("_getOAuth: sdata={}".format(sdata))
if sdata is not False:
LOGGER.debug('Init={}'.format(sdata))
self.serverdata['api_key'] = sdata['api_key']
self.serverdata['api_client'] = sdata['api_client']
else:
url = 'https://{}/authorize?response_type=code&client_id={}&redirect_uri={}&state={}'.format(ECOBEE_API_URL,self.api_key,self.redirect_url,self.poly.init['worker'])
msg = 'No existing Authorization found, Please <a target="_blank" href="{}">Authorize access to your Ecobee Account</a>'.format(url)
self.Notices['oauth'] = msg
LOGGER.warning(msg)
self.waiting_on_tokens = "OAuth"
def oauth(self, oauth):
LOGGER.info('OAUTH Received: {}'.format(oauth))
if 'code' in oauth:
if self._getTokens(oauth):
self.Notices.clear()
self.discover()
def _expire_delta(self):
if not 'expires' in self.tokenData:
return False
ts_exp = datetime.strptime(self.tokenData['expires'], '%Y-%m-%dT%H:%M:%S')
return ts_exp - datetime.now()
def _checkTokens(self):
if self.refreshingTokens:
LOGGER.error('Waiting for token refresh to complete...')
while self.refreshingTokens:
time.sleep(.1)
if 'access_token' in self.tokenData:
exp_d = self._expire_delta()
if exp_d is not False:
# We allow for 10 long polls to refresh the token...
if exp_d.total_seconds() < self.cfg_longPoll * 10:
LOGGER.info('Tokens {} expires {} will expire in {} seconds, so refreshing now...'.format(self.tokenData['refresh_token'],self.tokenData['expires'],exp_d.total_seconds()))
return self._getRefresh()
else:
# Only print this ones, then once a minute at most...
sd = True
if 'ctdt' in self.msgi:
md = datetime.now() - self.msgi['ctdt']
if md.total_seconds() < 60:
sd = False
if sd:
LOGGER.debug('Tokens valid until: {} ({} seconds, longPoll={})'.format(self.tokenData['expires'],exp_d.seconds,self.cfg_longPoll))
self.msgi['ctdt'] = datetime.now()
self.set_auth_st(True)
return True
else:
LOGGER.error( 'No expires in tokenData:{}'.format(self.tokenData))
else:
self.set_auth_st(False)
LOGGER.error('tokenData or access_token not available')
return False
# This is only called when refresh fails, when it works saveTokens clears
# it, otherwise we get_ a race on who's customData is saved...
def _endRefresh(self,refresh_data=False):
LOGGER.debug('enter')
if refresh_data is not False:
if 'expires_in' in refresh_data:
ts = time.time() + refresh_data['expires_in']
refresh_data['expires'] = datetime.fromtimestamp(ts).strftime("%Y-%m-%dT%H:%M:%S")
self.token = deepcopy(refresh_data)
self.set_auth_st(True)
self.Notices.clear()
# Save new token data in customData
self.Data['tokenData'] = refresh_data
self.refreshingTokens = False
LOGGER.debug('exit')
def _getRefresh(self):
if 'refresh_token' in self.tokenData:
self.refreshingTokens = True
LOGGER.info('Attempting to refresh tokens...')
res = self.session.post('token',
params = {
'grant_type': 'refresh_token',
'client_id': self.api_key,
'refresh_token': self.tokenData['refresh_token']
})
if res is False:
self.set_ecobee_st(False)
self._endRefresh()
return False
self.set_ecobee_st(True)
res_data = res['data']
res_code = res['code']
if res_data is False:
LOGGER.error('No data returned.')
else:
# https://www.ecobee.com/home/developer/api/documentation/v1/auth/auth-req-resp.shtml
if 'error' in res_data:
self.set_ecobee_st(False)
self.Notices['grant_error'] = f"{res_data['error']}: {res_data['error_description']}"
#self.addNotice({'grant_info': "For access_token={} refresh_token={} expires={}".format(self.tokenData['access_token'],self.tokenData['refresh_token'],self.tokenData['expires'])})
LOGGER.error('Requesting Auth: {} :: {}'.format(res_data['error'], res_data['error_description']))
LOGGER.error('For access_token={} refresh_token={} expires={}'.format(self.tokenData['access_token'],self.tokenData['refresh_token'],self.tokenData['expires']))
# Set auth to false for now, so user sees the error, even if we correct it later...
# JimBo: This can only happen if our refresh_token is bad, so we need to force a re-auth
if res_data['error'] == 'invalid_grant':
exp_d = self._expire_delta()
if exp_d is False:
self._reAuth(f"{res_data['error']} No token expire data available")
else:
if exp_d.total_seconds() > 0:
msg = "But token still has {} seconds to expire, so assuming this is an Ecobee server issue and will try to refresh on next poll...".format(exp_d.total_seconds())
self.Notices['grant_info_2'] = msg
LOGGER.error(msg)
else:
msg = "Token expired {} seconds ago, so will have to re-auth...".format(exp_d.total_seconds())
self.Notices['grant_info_2'] = msg
LOGGER.error(msg)
# May need to remove the re-auth requirement because we get these and they don't seem to be real?
self._reAuth(f"{res_data['error']} and Token expired")
elif res_data['error'] == 'invalid_client':
# We will Ignore it because it may correct itself on the next poll?
LOGGER.error('Ignoring invalid_client error, will try again later for now, but may need to mark it invalid if we see more than once? See: https://github.com/Einstein42/udi-ecobee-poly/issues/60')
#elif res_data['error'] == 'authorization_expired':
# self._reAuth('{}'.format(res_data['error']))
else:
# Should all other errors require re-auth?
#self._reAuth('{}'.format(res_data['error']))
LOGGER.error('Unknown error, not sure what to do here. Please Generate Log Package and Notify Author with a github issue: https://github.com/Einstein42/udi-ecobee-poly/issues')
self._endRefresh()
return False
elif 'access_token' in res_data:
self._endRefresh(res_data)
return True
else:
self._reAuth(' refresh_token not Found in tokenData={}'.format(self.tokenData))
self._endRefresh()
return False
def _getTokens(self, pinData):
LOGGER.debug('Attempting to get tokens for {}'.format(pinData))
res = self.session.post('token',
params = {
'grant_type': self.grant_type,
'client_id': self.api_key,
'code': pinData['code'],
'redirect_uri': self.redirect_url
})
if res is False:
self.set_ecobee_st(False)
self.set_auth_st(False)
return False
res_data = res['data']
res_code = res['code']
if res_data is False:
LOGGER.error('_getTokens: No data returned.')
self.set_auth_st(False)
return False
if 'error' in res_data:
LOGGER.error('_getTokens: {} :: {}'.format(res_data['error'], res_data['error_description']))
self.set_auth_st(False)
if res_data['error'] == 'authorization_expired' or res_data['error'] == 'invalid_grant':
msg = 'Nodeserver exiting because {}, please restart when you are ready to authorize.'.format(res_data['error'])
LOGGER.error('_getTokens: {}'.format(msg))
self.waiting_on_tokens = False
self.Notices.clear()
self.Notices['getTokens'] = msg
self.exit()
return False
if 'access_token' in res_data:
self.waiting_on_tokens = False
LOGGER.debug('Got tokens sucessfully.')
self.Notices.clear()
self.Notices['getTokens'] = 'Tokens obtained!'
# Save pin_code
if not self.Data.get('pin_code') != pinData['code']:
self.Data['pin_code'] = pinData['code']
self._endRefresh(res_data)
return True
self.set_auth_st(False)
def updateThermostats(self,force=False):
LOGGER.debug("{}:updateThermostats: start".format(self.address))
thermostats = self.getThermostats()
if not isinstance(thermostats, dict):
LOGGER.error('Thermostats instance wasn\'t dictionary. Skipping...')
return
for thermostatId, thermostat in thermostats.items():
LOGGER.debug("{}:updateThermostats: {}".format(self.address,thermostatId))
if self.checkRev(thermostat):
address = self.thermostatIdToAddress(thermostatId)
tnode = self.poly.getNode(address)
if tnode is None:
LOGGER.error(f"Thermostat id '{thermostatId}' address '{address}' is not in our node list ({node}). thermostat: {{thermostat}}")
else:
LOGGER.debug('Update detected in thermostat {}({}) doing full update.'.format(thermostat['name'], address))
fullData = self.getThermostatFull(thermostatId)
if fullData is not False:
tnode.update(thermostat, fullData)
else:
LOGGER.error('Failed to get updated data for thermostat: {}({})'.format(thermostat['name'], thermostatId))
else:
LOGGER.info("No {} '{}' update detected".format(thermostatId,thermostat['name']))
LOGGER.debug("{}:updateThermostats: done".format(self.address))
def checkRev(self, tstat):
if tstat['thermostatId'] in self.revData:
curData = self.revData[tstat['thermostatId']]
if (tstat['thermostatRev'] != curData['thermostatRev']
or tstat['alertsRev'] != curData['alertsRev']
or tstat['runtimeRev'] != curData['runtimeRev']
or tstat['intervalRev'] != curData['intervalRev']):
return True
return False
def query(self):
self.reportDrivers()
for node in self.poly.nodes():
node.reportDrivers()
def handler_stop(self):
LOGGER.debug('NodeServer stoping...')
self.set_ecobee_st(False)
self.poly.stop()
def thermostatIdToAddress(self,tid):
return 't{}'.format(tid)
def discover(self, *args, **kwargs):
if not self.authorized():
self.authorize("Tried to discover but not authorized")
return False
# True means we are in dsocvery
if self.in_discover:
LOGGER.info('Discovering Ecobee Thermostats already running?')
return True
self.in_discover = True
self.discover_st = False
try:
self.discover_st = self._discover()
except Exception as e:
LOGGER.error('failed: {}'.format(e),True)
self.discover_st = False
self.in_discover = False
return self.discover_st
def _discover(self, *args, **kwargs):
LOGGER.info('Discovering Ecobee Thermostats')
if not 'access_token' in self.tokenData:
return False
self.revData = {} # Intialize in case we fail
thermostats = self.getThermostats()
if thermostats is False:
LOGGER.error("Discover Failed, No thermostats returned! Will try again on next long poll")
return False
self.revData = deepcopy(thermostats)
#
# Build or update the profile first.
#
self.check_profile(thermostats)
#
# Now add our thermostats
#
for thermostatId, thermostat in thermostats.items():
address = self.thermostatIdToAddress(thermostatId)
tnode = self.poly.getNode(address)
if tnode is None:
fullData = self.getThermostatFull(thermostatId)
if fullData is not False:
tstat = fullData['thermostatList'][0]
useCelsius = True if tstat['settings']['useCelsius'] else False
self.add_node(Thermostat(self, address, address, thermostatId,
'Ecobee - {}'.format(get_valid_node_name(thermostat['name'])),
thermostat, fullData, useCelsius))
return True
def check_profile(self,thermostats):
self.profile_info = get_profile_info(LOGGER)
#
# First get all the climate programs so we can build the profile if necessary
#
climates = dict()
for thermostatId, thermostat in thermostats.items():
# Only get program data if we have the node.
fullData = self.getThermostatSelection(thermostatId,includeProgram=True)
if fullData is not False:
programs = fullData['thermostatList'][0]['program']
climates[thermostatId] = list()
for climate in programs['climates']:
climates[thermostatId].append({'name': climate['name'], 'ref':climate['climateRef']})
LOGGER.debug("check_profile: climates={}".format(climates))
#
# Set Default profile version if not Found
#
LOGGER.info('check_profile: profile_info={}'.format(self.profile_info))
LOGGER.info('check_profile: customData={}'.format(self.Data))
if not 'profile_info' in self.Data:
update_profile = True
elif self.profile_info['version'] == self.Data['profile_info']['version']:
# Check if the climates are different
update_profile = False
LOGGER.info('check_profile: update_profile={} checking climates.'.format(update_profile))
if 'climates' in self.Data:
current = self.Data['climates']
if not update_profile:
# Check if the climates have changed.
for id in climates:
if id in current:
if len(climates[id]) == len(current[id]):
for i in range(len(climates[id])):
if climates[id][i] != current[id][i]:
update_profile = True
else:
update_profile = True
else:
update_profile = True
else:
update_profile = True
else:
update_profile = True
LOGGER.warning('check_profile: update_profile={}'.format(update_profile))
if update_profile:
self.write_profile(climates)
self.poly.updateProfile()
self.Data['profile_info'] = self.profile_info
self.Data['climates'] = climates
def write_profile(self,climates):
pfx = '{}:write_profile:'.format(self.address)
#
# Start the nls with the template data.
#
en_us_txt = "profile/nls/en_us.txt"
make_file_dir(en_us_txt)
LOGGER.info("{0} Writing {1}".format(pfx,en_us_txt))
nls_tmpl = open("template/en_us.txt", "r")
nls = open(en_us_txt, "w")
for line in nls_tmpl:
nls.write(line)
nls_tmpl.close()
# Open the nodedef custom for writing
nodedef_f = 'profile/nodedef/custom.xml'
LOGGER.info("{0} Writing {1}".format(pfx,nodedef_f))
nodedef_h = open(nodedef_f, "w")
nodedef_h.write('<nodedefs>\n')
# Open the editor custom for writing
editor_f = 'profile/editor/custom.xml'
LOGGER.info("{0} Writing {1}".format(pfx,editor_f))
editor_h = open(editor_f, "w")
editor_h.write('<editors>\n')
for id in climates:
# Read thermostat template to write the custom version.
in_h = open('template/thermostat.xml','r')
for line in in_h:
nodedef_h.write(re.sub(r'tstatid',r'{0}'.format(id),line))
in_h.close()
# Read the editor template to write the custom version
in_h = open('template/editors.xml','r')
for line in in_h:
line = re.sub(r'tstatid',r'{0}'.format(id),line)
line = re.sub(r'tstatcnta',r'{0}'.format(len(climateList)-1),line)
# This is minus 3 because we don't allow selecting vacation or smartAway, ...
# But not currently using this because we don't have different list for
# status and programs?
line = re.sub(r'tstatcnt',r'{0}'.format(len(climateList)-5),line)
editor_h.write(line)
in_h.close()
# Then the NLS lines.
nls.write("\n")
nls.write('ND-EcobeeC_{0}-NAME = Ecobee Thermostat {0} (C)\n'.format(id))
nls.write('ND-EcobeeC_{0}-ICON = Thermostat\n'.format(id))
nls.write('ND-EcobeeF_{0}-NAME = Ecobee Thermostat {0} (F)\n'.format(id))
nls.write('ND-EcobeeF_{0}-ICON = Thermostat\n'.format(id))
# ucfirst them all
customList = list()
for i in range(len(climateList)):
customList.append(climateList[i][0].upper() + climateList[i][1:])
# Now see if there are custom names
for i in range(len(climateList)):
name = climateList[i]
# Find this name in the map and replace with our name.
for cli in climates[id]:
if cli['ref'] == name:
customList[i] = cli['name']
LOGGER.debug("{} customList={}".format(pfx,customList))
for i in range(len(customList)):
nls.write("CT_{}-{} = {}\n".format(id,i,customList[i]))
nodedef_h.write('</nodedefs>\n')
nodedef_h.close()
editor_h.write('</editors>\n')
editor_h.close()
nls.close()
LOGGER.info("{} done".format(pfx))
# Calls session.get and converts params to weird ecobee formatting.
def session_get (self,path,data):
if path == 'authorize':
# All calls before with have auth token, don't reformat with json
return self.session.get(path,data)
else:
res = self.session.get(path,{ 'json': json.dumps(data) },
auth='{} {}'.format(self.tokenData['token_type'], self.tokenData['access_token'])
)
if res is False:
return res
if res['data'] is False:
return False
LOGGER.debug('res={}'.format(res))
if not 'status' in res['data']:
return res
res_st_code = int(res['data']['status']['code'])
if res_st_code == 0:
return res
LOGGER.error('Checking Bad Status Code {} for {}'.format(res_st_code,res))
if res_st_code == 14:
LOGGER.error( 'Token has expired, will refresh')
# TODO: Should this be a loop instead ?
if self._getRefresh() is True:
return self.session.get(path,{ 'json': json.dumps(data) },
auth='{} {}'.format(self.tokenData['token_type'], self.tokenData['access_token']))
elif res_st_code == 16:
self._reAuth("session_get: Token deauthorized by user: {}".format(res))
return False
def getThermostats(self):
if not self._checkTokens():
LOGGER.debug('getThermostat failed. Couldn\'t get tokens.')
return False
LOGGER.debug('getThermostats: Getting Summary...')
res = self.session_get('1/thermostatSummary',
{
'selection': {
'selectionType': 'registered',
'selectionMatch': '',
'includesEquipmentStatus': True
},
})
if res is False:
self.set_ecobee_st(False)
return False
self.set_ecobee_st(True)
thermostats = {}
res_data = res['data']
res_code = res['code']
if res_data is False:
LOGGER.error('Ecobee returned code {} but no data? ({})'.format(res_code,res_data))
return thermostats
if 'revisionList' in res_data:
if res_data['revisionList'] is False:
LOGGER.error('Ecobee returned code {} but no revisionList? ({})'.format(res_code,res_data['revisionList']))
for thermostat in res_data['revisionList']:
revisionArray = thermostat.split(':')
thermostats['{}'.format(revisionArray[0])] = {
'name': revisionArray[1],
'thermostatId': revisionArray[0],
'connected': revisionArray[2],
'thermostatRev': revisionArray[3],
'alertsRev': revisionArray[4],
'runtimeRev': revisionArray[5],
'intervalRev': revisionArray[6]
}
return thermostats
def getThermostatFull(self, id):
return self.getThermostatSelection(id,True,True,True,True,True,True,True,True,True,True,True,True)
def getThermostatSelection(self,id,
includeEvents=False,
includeProgram=False,
includeSettings=False,
includeRuntime=False,
includeExtendedRuntime=False,
includeLocation=False,
includeEquipmentStatus=False,
includeVersion=False,
includeUtility=False,
includeAlerts=False,
includeWeather=False,
includeSensors=False
):
if not self._checkTokens():
LOGGER.error('getThermostat failed. Couldn\'t get tokens.')
return False
LOGGER.info('Getting Thermostat Data for {}'.format(id))
res = self.session_get('1/thermostat',
{
'selection': {
'selectionType': 'thermostats',
'selectionMatch': id,
'includeEvents': includeEvents,
'includeProgram': includeProgram,
'includeSettings': includeSettings,
'includeRuntime': includeRuntime,
'includeExtendedRuntime': includeExtendedRuntime,
'includeLocation': includeLocation,
'includeEquipmentStatus': includeEquipmentStatus,
'includeVersion': includeVersion,
'includeUtility': includeUtility,
'includeAlerts': includeAlerts,
'includeWeather': includeWeather,
'includeSensors': includeSensors
}
}
)
if self.debug_level >= 0:
LOGGER.debug(f'done {id}')
if self.debug_level >= 1:
LOGGER.debug(f'data={res}')
if res is False or res is None:
return False
return res['data']
def ecobeePost(self, thermostatId, postData = {}):
if not self._checkTokens():
LOGGER.error('ecobeePost failed. Tokens not available.')
return False
LOGGER.info('Posting Update Data for Thermostat {}'.format(thermostatId))
postData['selection'] = {
'selectionType': 'thermostats',
'selectionMatch': thermostatId
}
res = self.session.post('1/thermostat',params={'json': 'true'},payload=postData,
auth='{} {}'.format(self.tokenData['token_type'], self.tokenData['access_token']),dump=True)
if res is False:
self.refreshingTokens = False
self.set_ecobee_st(False)
return False
self.set_ecobee_st(True)
if 'error' in res:
LOGGER.error('ecobeePost: error="{}" {}'.format(res['error'], res['error_description']))
return False
res_data = res['data']
res_code = res['code']
if 'status' in res_data:
if 'code' in res_data['status']:
if res_data['status']['code'] == 0:
return True
else:
LOGGER.error('Bad return code {}:{}'.format(res_data['status']['code'],res_data['status']['message']))
return False
def ecobeeDelete(self):
if 'access_token' in self.tokenData:
res = self.session.delete("/oauth2/acess_tokens/"+self.tokenData['access_token'])
if res is False:
return False
if 'error' in res:
LOGGER.error('ecobeePost: error="{}" {}'.format(res['error'], res['error_description']))
return False
res_data = res['data']
res_code = res['code']
if 'status' in res_data:
if 'code' in res_data['status']:
if res_data['status']['code'] == 204:
LOGGER.info("Revoke successful")
return True
else:
LOGGER.error('Bad return code {}:{}'.format(res_data['status']['code'],res_data['status']['message']))
else:
LOGGER.warning("No access_token to revoke...")
return False
def cmd_poll(self, *args, **kwargs):
LOGGER.debug("{}:cmd_poll".format(self.address))
self.updateThermostats(force=True)
self.query()
def cmd_query(self, *args, **kwargs):
LOGGER.debug("{}:cmd_query".format(self.address))
self.query()
def set_ecobee_st(self,val):
ival = 1 if val else 0
LOGGER.debug("{}:set_ecobee_st: {}={}".format(self.address,val,ival))
self.setDriver('GV1',ival)
def set_auth_st(self,val):
ival = 1 if val else 0
LOGGER.debug("{}:set_auth_st: {}={}".format(self.address,val,ival))
self.setDriver('GV3',ival)
id = 'ECO_CTR'
commands = {
'DISCOVER': discover,
'QUERY': cmd_query,
'POLL': cmd_poll,
}
drivers = [
{'driver': 'ST', 'value': 1, 'uom': 25},
{'driver': 'GV1', 'value': 0, 'uom': 2},
{'driver': 'GV3', 'value': 0, 'uom': 2}
]
```
#### File: udi-poly-ecobee/nodes/Sensor.py
```python
from udi_interface import Node,LOGGER
from copy import deepcopy
from const import driversMap
from node_funcs import *
class Sensor(Node):
def __init__(self, controller, primary, address, name, id, parent):
super().__init__(controller.poly, primary, address, name)
self.type = 'sensor'
# self.code = code
self.parent = parent
self.id = id
self.drivers = deepcopy(driversMap[self.id])
controller.poly.subscribe(controller.poly.START, self.handler_start, address)
def handler_start(self):
self.query()
def update(self, sensor):
LOGGER.debug("{}:update:".format(self.address))
LOGGER.debug("{}:update: sensor={}".format(self.address,sensor))
updates = {
'GV1': 2 # Default is N/A
}
# Cross reference from sensor capabilty to driver
xref = {
'temperature': 'ST',
'humidity': 'CLIHUM',
'occupancy': 'GV1',
'responding': 'GV2',
'dryContact': 'GV3'
}
for item in sensor['capability']:
if item['type'] in xref:
val = item['value']
if val == "true":
val = 1
elif val == "false":
val = 0
if item['type'] == 'temperature':
# temperature unknown seems to mean the sensor is not responding.s
if val == 'unknown':
updates[xref['responding']] = 0
else:
updates[xref['responding']] = 1
val = self.parent.tempToDriver(val,True,False)
if val is not False:
updates[xref[item['type']]] = val
else:
LOGGER.error("{}:update: Unknown capabilty: {}".format(self.address,item))
LOGGER.debug("{}:update: updates={}".format(self.address,updates))
for key, value in updates.items():
self.setDriver(key, value)
def query(self, command=None):
self.reportDrivers()
hint = '0x01030200'
commands = {'QUERY': query, 'STATUS': query}
``` |
{
"source": "jimboca/udi-poly-ELK-v2",
"score": 3
} |
#### File: jimboca/udi-poly-ELK-v2/node_funcs.py
```python
import re,os
def myfloat(value, prec=4):
""" round and return float """
return round(float(value), prec)
# Removes invalid charaters for ISY Node description
def get_valid_node_address(name):
# Only allow utf-8 characters
# https://stackoverflow.com/questions/26541968/delete-every-non-utf-8-symbols-froms-string
name = bytes(name, 'utf-8').decode('utf-8','ignore')
# Remove <>`~!@#$%^&*(){}[]?/\;:"'` characters from name
# make it lower case, and only 14 characters
return re.sub(r"[<>`~!@#$%^&*(){}[\]?/\\;:\"']+", "", name.lower()[:14])
# Removes invalid charaters for ISY Node description
def get_valid_node_name(name):
# Only allow utf-8 characters
# https://stackoverflow.com/questions/26541968/delete-every-non-utf-8-symbols-froms-string
name = bytes(name, 'utf-8').decode('utf-8','ignore')
# Remove <>`~!@#$%^&*(){}[]?/\;:"'` characters from name
return re.sub(r"[<>`~!@#$%^&*(){}[\]?/\\;:\"']+", "", name)
def get_profile_info(logger):
pvf = 'profile/version.txt'
try:
with open(pvf) as f:
pv = f.read().replace('\n', '')
f.close()
except Exception as err:
logger.error('get_profile_info: failed to read file {0}: {1}'.format(pvf,err), exc_info=True)
pv = 0
return { 'version': pv }
def parse_range(range_in):
range_out = list()
for el in range_in.split(","):
rng = el.split("-",1)
if len(rng) > 1:
#try:
for i in range(int(rng[0]),int(rng[1])):
range_out.append(i)
range_out.append(int(rng[1]))
#except:
# print("range failed")
else:
#try:
range_out.append(int(el))
#except:
# print("el not an int")
return range_out
def is_in_list(el,list_in):
try:
return list_in.index(el)
except ValueError:
return False
def make_file_dir(file_path):
directory = os.path.dirname(file_path)
if not os.path.exists(directory):
# TODO: Trap this?
os.makedirs(directory)
return True
```
#### File: udi-poly-ELK-v2/nodes/Area.py
```python
import time
from threading import Thread,Event
from polyinterface import LOGGER
from node_funcs import get_valid_node_name
from nodes import BaseNode,ZoneNode,KeypadNode
from elkm1_lib.const import (
Max,
ZoneLogicalStatus,
ZonePhysicalStatus,
)
# For faster lookups
BYPASSED = ZoneLogicalStatus['BYPASSED'].value
VIOLATED = ZoneLogicalStatus['VIOLATED'].value
class AreaNode(BaseNode):
def __init__(self, controller, elk):
self.elk = elk
self.init = False
self.status = None
self.state = None
self.zones_bypassed = 0
self.zones_violated = 0
self.zones_logical_status = [None] * Max.ZONES.value
self.zones_physical_status = [None] * Max.ZONES.value
self._zone_nodes = {}
self._keypad_nodes = {}
self.poll_voltages = False
self.ready = False
address = f'area_{self.elk.index + 1}'
name = get_valid_node_name(self.elk.name)
if name == "":
name = f'Area_{self.elk.index + 1}'
super(AreaNode, self).__init__(controller, address, address, name)
def start(self):
self.elk.add_callback(self.callback)
self.set_drivers()
self.reportDrivers()
# elkm1_lib uses zone numbers starting at zero.
for zn in range(Max.ZONES.value):
LOGGER.debug(f'{self.lpfx} index={zn} area={self.controller.elk.zones[zn].area} definition={self.controller.elk.zones[zn].definition}')
# Add zones that are in my area, and are defined.
if self.controller.elk.zones[zn].definition > 0 and self.controller.elk.zones[zn].area == self.elk.index:
LOGGER.info(f"{self.lpfx} area {self.elk.index} {self.elk.name} node={self.name} adding zone node {zn} '{self.controller.elk.zones[zn].name}'")
self._zone_nodes[zn] = self.controller.addNode(ZoneNode(self.controller, self, self, self.controller.elk.zones[zn]))
time.sleep(.1)
for n in range(Max.KEYPADS.value):
if self.controller.elk.keypads[n].area == self.elk.index:
LOGGER.info(f"{self.lpfx} area {self.elk.index} {self.elk.name} node={self.name} adding keypad node {n} '{self.controller.elk.keypads[n]}'")
self._keypad_nodes[n] = self.controller.addNode(KeypadNode(self.controller, self, self, self.controller.elk.keypads[n]))
else:
LOGGER.debug(f"{self.lpfx} area {self.elk.index} {self.elk.name} node={self.name} skipping keypad node {n} '{self.controller.elk.keypads[n]}'")
self.ready = True
def shortPoll(self):
# Only Poll Zones if we want voltages
LOGGER.debug(f'{self.lpfx} ready={self.ready} poll_voltages={self.poll_voltages}')
if not self.ready:
return False
if self.poll_voltages:
for zn in self._zone_nodes:
self._zone_nodes[zn].shortPoll()
def longPoll(self):
pass
# Area:callback: area_1:Home: cs={'last_log': {'event': 1174, 'number': 1, 'index': 0, 'timestamp': '2021-02-06T14:47:00+00:00', 'user_number': 1}}
# user_number=1 was me
def callback(self, element, changeset):
LOGGER.info(f'{self.lpfx} cs={changeset}')
if 'alarm_state' in changeset:
self.set_alarm_state(changeset['alarm_state'])
if 'armed_status' in changeset:
self.set_armed_status(changeset['armed_status'])
if 'arm_up_state' in changeset:
self.set_arm_up_state(changeset['arm_up_state'])
# Need to investigate this more, do we really need this if keypad callback is setting it?
if 'last_log' in changeset:
if 'user_number' in changeset['last_log']:
self.set_user(int(changeset['last_log']['user_number']))
# armed_status:0 arm_up_state:1 alarm_state:0 alarm_memory:None is_exit:False timer1:0 timer2:0 cs={'name': 'Home'}
# {'armed_status': '0', 'arm_up_state': '1', 'alarm_state': '0'}
def set_drivers(self):
LOGGER.info(f'{self.lpfx} Area:{self.elk.index} {self.elk.name}')
self.set_alarm_state()
self.set_armed_status()
self.set_arm_up_state()
self.set_poll_voltages()
self.set_entry_exit_trigger()
self.set_driver('GV3',self.zones_violated)
self.set_driver('GV4',self.zones_bypassed)
#self.setDriver('GV2', pyelk.chime_mode)
# This is called by Keypad or callback
def set_user(self, val, force=False, reportCmd=True):
LOGGER.info(f'{self.lpfx} val={val}')
self.set_driver('GV6',val)
# This is only called by Keypad's
def set_keypad(self, val, force=False, reportCmd=True):
LOGGER.info(f'{self.lpfx} val={val}')
self.set_driver('GV7',val)
# This is only called by Zones's when it goes violated
# This is passed the elkm1_lib zone number, so we add 1 for our zone numbers
def set_last_violated_zone(self, val, force=False, reportCmd=True):
LOGGER.info(f'{self.lpfx} val={val} EntryExitTrigger={self.entry_exit_trigger}')
val = int(val)
self.set_driver('GV8',val+1)
# ELK only sends a violated zone for entry/exit zones when it
# starts the timer, but this option sets it as triggered
# if entry_exit_trigger is enabled.
if self.entry_exit_trigger:
LOGGER.debug(f'{self.lpfx} alarm_state={self.elk.alarm_state} zone.definition={self.controller.elk.zones[val].definition} armed_status={self.elk.armed_status}')
# Say nothing for 'Non Alarm'
if not int(self.controller.elk.zones[val].definition) == 16:
LOGGER.debug("a")
# Mode Stay, Away, Night, or Vacation?
if int(self.elk.armed_status) == 1 or int(self.elk.armed_status) == 2 or int(self.elk.armed_status) == 4 or int(self.elk.armed_status) == 6:
LOGGER.debug("b")
# Send for Entry/Exit Delay
if int(self.controller.elk.zones[val].definition) == 1 or int(self.controller.elk.zones[val].definition) == 2:
LOGGER.debug("c")
self.set_last_triggered_zone(val)
# Night mode?
elif int(self.elk.armed_status) == 4:
LOGGER.debug("d")
# Send for Interior Night Delay
if int(self.controller.elk.zones[val].definition) == 7:
self.set_last_triggered_zone(val)
# This is only called by Zone's when it triggers an alarm
# This is passed the elkm1_lib zone number, so we add 1 for our zone numbers
def set_last_triggered_zone(self,val):
LOGGER.info(f'{self.lpfx} val={val}')
self.set_driver('GV9',val+1)
def set_zone_logical_status(self, zn, st):
LOGGER.info(f'{self.lpfx} zn={zn} st={st}')
self.zones_logical_status[zn] = st
self.zones_bypassed = 0
self.zones_violated = 0
for val in self.zones_logical_status:
if val is not None:
if val == BYPASSED:
self.zones_bypassed += 1
elif val == VIOLATED:
self.zones_violated += 1
self.set_driver('GV3',self.zones_violated)
self.set_driver('GV4',self.zones_bypassed)
def set_alarm_state(self,val=None,force=False):
LOGGER.info(f'{self.lpfx} {val}')
val = self.elk.alarm_state if val is None else int(val)
# Send DON for Violated?
#if val == 2:
# self.reportCmd("DON",2)
#else:
# self.reportCmd("DOF",2)
self.set_driver('ST', val, force=force)
def set_armed_status(self,val=None,force=False):
LOGGER.info(f'{self.lpfx} {val}')
val = self.elk.armed_status if val is None else int(val)
self.set_driver('GV0',val,force=force)
def set_arm_up_state(self,val=None):
LOGGER.info(f'{self.lpfx} {val}')
val = self.elk.arm_up_state if val is None else int(val)
self.set_driver('GV1', val)
def set_poll_voltages(self,val=None):
LOGGER.info(f'{self.lpfx} {val}')
self.set_driver('GV5', val, default=0)
self.poll_voltages = False if self.get_driver('GV5') == 0 else True
def set_entry_exit_trigger(self,val=None):
LOGGER.info(f'{self.lpfx} {val}')
val = self.set_driver('GV10', val, default=1)
self.entry_exit_trigger = False if val == 0 else True
def query(self):
LOGGER.info(f'{self.lpfx}')
self.set_drivers()
self.reportDrivers()
def cmd_set_armed_status(self,command):
val = command.get('value')
LOGGER.info(f'{self.lpfx} elk.arm({val},****')
# val is a string, not integer :(
self.elk.arm(val,self.controller.user_code)
def cmd_set_bypass(self,command):
val = command.get('value')
LOGGER.info(f'{self.lpfx} Calling bypass...')
self.elk.bypass(self.controller.user_code)
def cmd_clear_bypass(self,command):
val = command.get('value')
LOGGER.info(f'{self.lpfx} Calling clear bypass...')
self.elk.clear_bypass(self.controller.user_code)
def cmd_set_poll_voltages(self,command):
val = command.get('value')
LOGGER.info(f'{self.lpfx} {val}')
self.set_poll_voltages(val)
def cmd_set_entry_exit_trigger(self,command):
val = command.get('value')
LOGGER.info(f'{self.lpfx} {val}')
self.set_entry_exit_trigger(val)
"Hints See: https://github.com/UniversalDevicesInc/hints"
hint = [1,2,3,4]
drivers = [
# status
{'driver': 'ST', 'value': 0, 'uom': 25},
{'driver': 'GV0', 'value': 0, 'uom': 25},
{'driver': 'GV1', 'value': 0, 'uom': 25},
{'driver': 'GV2', 'value': 0, 'uom': 25},
{'driver': 'GV3', 'value': 0, 'uom': 25},
{'driver': 'GV4', 'value': 0, 'uom': 25},
{'driver': 'GV5', 'value': 0, 'uom': 2},
{'driver': 'GV6', 'value': 0, 'uom': 25},
{'driver': 'GV7', 'value': 0, 'uom': 25},
{'driver': 'GV8', 'value': 0, 'uom': 25},
{'driver': 'GV9', 'value': 0, 'uom': 25},
{'driver': 'GV10', 'value': 1, 'uom': 2},
]
id = 'area'
commands = {
'SET_ARMED_STATUS': cmd_set_armed_status,
'SET_POLL_VOLTAGES': cmd_set_poll_voltages,
'SET_BYPASS': cmd_set_bypass,
'CLEAR_BYPASS': cmd_clear_bypass,
'SET_ENTRY_EXIT_TRIGGER': cmd_set_entry_exit_trigger,
}
```
#### File: udi-poly-ELK-v2/nodes/Controller.py
```python
import sys
import time
import logging
import asyncio
import os
from threading import Thread,Event
from node_funcs import *
from nodes import AreaNode,OutputNode
from polyinterface import Controller, LOGGER, LOG_HANDLER
from threading import Thread,Event
# sys.path.insert(0, "../elkm1")
from elkm1_lib import Elk
from elkm1_lib.const import Max
# asyncio.set_event_loop_policy(AnyThreadEventLoopPolicy())
mainloop = asyncio.get_event_loop()
class Controller(Controller):
def __init__(self, polyglot):
self.ready = False
# We track our drsiver values because we need the value before it's been pushed.
super(Controller, self).__init__(polyglot)
self.name = "ELK Controller"
self.hb = 0
self.elk = None
self.elk_st = None
self.elk_thread = None
self.config_st = False
self.profile_done = False
self.driver = {}
self._area_nodes = {}
self._output_nodes = {}
self._keypad_nodes = {}
self.logger = LOGGER
self.lpfx = self.name + ":"
# For the short/long poll threads, we run them in threads so the main
# process is always available for controlling devices
self.short_event = False
self.long_event = False
# Not using because it's called to many times
# self.poly.onConfig(self.process_config)
def start(self):
LOGGER.info(f"{self.lpfx} start")
# Don't check profile because we always write it later
self.server_data = self.poly.get_server_data(check_profile=False)
LOGGER.info(f"{self.lpfx} Version {self.server_data['version']}")
self.set_debug_level()
self.setDriver("ST", 1)
self.heartbeat()
self.check_params()
if self.config_st:
LOGGER.info(f"{self.lpfx} Calling elk_start...")
self.elk_start()
else:
LOGGER.error(
f"{self.lpfx} Not starting ELK since configuration not ready, please fix and restart"
)
def heartbeat(self):
LOGGER.debug(f"{self.lpfx} hb={self.hb}")
if self.hb == 0:
self.reportCmd("DON", 2)
self.hb = 1
else:
self.reportCmd("DOF", 2)
self.hb = 0
def shortPoll(self):
if not self.ready:
LOGGER.info('waiting for sync to complete')
return
if self.short_event is False:
LOGGER.debug('Setting up Thread')
self.short_event = Event()
self.short_thread = Thread(name='shortPoll',target=self._shortPoll)
self.short_thread.daemon = True
LOGGER.debug('Starting Thread')
st = self.short_thread.start()
LOGGER.debug(f'Thread start st={st}')
# Tell the thread to run
LOGGER.debug(f'thread={self.short_thread} event={self.short_event}')
if self.short_event is not None:
LOGGER.debug('calling event.set')
self.short_event.set()
else:
LOGGER.error(f'event is gone? thread={self.short_thread} event={self.short_event}')
def _shortPoll(self):
while (True):
self.short_event.wait()
LOGGER.debug('start')
for an in self._area_nodes:
self._area_nodes[an].shortPoll()
self.short_event.clear()
LOGGER.debug('done')
def longPoll(self):
self.heartbeat()
if not self.ready:
LOGGER.info('waiting for sync to complete')
return
if self.long_event is False:
LOGGER.debug('Setting up Thread')
self.long_event = Event()
self.long_thread = Thread(name='longPoll',target=self._longPoll)
self.long_thread.daemon = True
LOGGER.debug('Starting Thread')
st = self.long_thread.start()
LOGGER.debug('Thread start st={st}')
# Tell the thread to run
LOGGER.debug(f'thread={self.long_thread} event={self.long_event}')
if self.long_event is not None:
LOGGER.debug('calling event.set')
self.long_event.set()
else:
LOGGER.error(f'event is gone? thread={self.long_thread} event={self.long_event}')
def _longPoll(self):
while (True):
self.long_event.wait()
LOGGER.debug('start')
self.heartbeat()
self.check_connection()
self.long_event.clear()
LOGGER.debug('done')
def setDriver(self, driver, value):
LOGGER.debug(f"{self.lpfx} {driver}={value}")
self.driver[driver] = value
super(Controller, self).setDriver(driver, value)
def getDriver(self, driver):
if driver in self.driver:
return self.driver[driver]
else:
return super(Controller, self).getDriver(driver)
# Should not be needed with new library?
def check_connection(self):
if self.elk is None:
st = False
elif self.elk.is_connected:
st = True
else:
st = False
LOGGER.debug(f"{self.lpfx} st={st} elk_st={self.elk_st}")
self.set_st(st)
def set_st(self, st):
# Did connection status change?
if self.elk_st != st:
# We have been connected, but lost it...
if self.elk_st is True:
LOGGER.error(f"{self.lpfx} Lost Connection! Will try to reconnect.")
self.elk_st = st
if st:
LOGGER.debug(f"{self.lpfx} Connected")
self.setDriver("GV1", 1)
else:
LOGGER.debug(f"{self.lpfx} NOT Connected")
self.setDriver("GV1", 0)
def query(self):
self.check_params()
self.reportDrivers()
for node in self.nodes:
self.nodes[node].reportDrivers()
def connected(self):
LOGGER.info(f"{self.lpfx} Connected!!!")
self.set_st(True)
def disconnected(self):
LOGGER.info(f"{self.lpfx} Disconnected!!!")
self.set_st(False)
def login(self, succeeded):
if succeeded:
LOGGER.info("Login succeeded")
else:
LOGGER.error(f"{self.lpfx} Login Failed!!!")
def sync_complete(self):
LOGGER.info(f"{self.lpfx} Sync of keypad is complete!!!")
# TODO: Add driver for sync complete status, or put in ST?
LOGGER.info(f"{self.lpfx} adding areas...")
for an in range(Max.AREAS.value):
if an in self._area_nodes:
LOGGER.info(
f"{self.lpfx} Skipping Area {an+1} because it already defined."
)
elif is_in_list(an+1, self.use_areas_list) is False:
LOGGER.info(
f"{self.lpfx} Skipping Area {an+1} because it is not in areas range {self.use_areas} in configuration"
)
else:
LOGGER.info(f"{self.lpfx} Adding Area {an}")
self._area_nodes[an] = self.addNode(AreaNode(self, self.elk.areas[an]))
LOGGER.info("adding areas done, adding outputs...")
# elkm1_lib uses zone numbers starting at zero.
for n in range(Max.OUTPUTS.value):
if n in self._output_nodes:
LOGGER.info(
f"{self.lpfx} Skipping Output {n+1} because it already defined."
)
elif is_in_list(n+1, self.use_outputs_list) is False:
LOGGER.info(
f"{self.lpfx} Skipping Output {n+1} because it is not in outputs range {self.use_outputs} in configuration"
)
else:
LOGGER.info(f"{self.lpfx} Adding Output {an}")
self._output_nodes[an] = self.addNode(OutputNode(self, self.elk.outputs[n]))
LOGGER.info("adding outputs done")
# Only update profile on restart
if not self.profile_done:
self.write_profile()
self.profile_done = True
self.ready = True
def timeout(self, msg_code):
LOGGER.error(f"{self.lpfx} Timeout sending message {msg_code}!!!")
if msg_code == 'AS':
LOGGER.error(f"{self.lpfx} The above Arm System timeout is usually caused by incorrect user code, please check the Polyglot Configuration page for this nodeserver and restart the nodeserver.")
def unknown(self, msg_code, data):
if msg_code == 'EM':
return
LOGGER.error(f"{self.lpfx} Unknown message {msg_code}: {data}!!!")
def elk_start(self):
self.elk_config = {
# TODO: Support secure which would use elks: and add 'keypadid': 'xxx', 'password': '<PASSWORD>'
"url": "elk://"
+ self.host,
}
# We have to create a loop since we are in a thread
# mainloop = asyncio.new_event_loop()
LOGGER.info(f"{self.lpfx} started")
logging.getLogger("elkm1_lib").setLevel(logging.DEBUG)
asyncio.set_event_loop(mainloop)
self.elk = Elk(self.elk_config, loop=mainloop)
LOGGER.info(f"{self.lpfx} Waiting for sync to complete...")
self.elk.add_handler("connected", self.connected)
self.elk.add_handler("disconnected", self.disconnected)
self.elk.add_handler("login", self.login)
self.elk.add_handler("sync_complete", self.sync_complete)
self.elk.add_handler("timeout", self.timeout)
self.elk.add_handler("unknown", self.unknown)
LOGGER.info(f"{self.lpfx} Connecting to Elk...")
self.elk.connect()
LOGGER.info(
f"{self.lpfx} Starting Elk Thread, will process data when sync completes..."
)
self.elk_thread = Thread(name="ELK-" + str(os.getpid()), target=self.elk.run)
self.elk_thread.daemon = True
self.elk_thread.start()
def discover(self):
# TODO: What to do here, kill and restart the thread?
LOGGER.error(f"{self.lpfx} discover currently does nothing")
pass
def elkm1_run(self):
self.elk.run()
def delete(self):
LOGGER.info(
f"{self.lpfx} Oh no I am being deleted. Nooooooooooooooooooooooooooooooooooooooooo."
)
def stop(self):
LOGGER.debug(f"{self.lpfx} NodeServer stopping...")
if self.elk is not None:
self.elk.disconnect()
if self.elk_thread is not None:
# Wait for actual termination (if needed)
self.elk_thread.join()
LOGGER.debug(f"{self.lpfx} NodeServer stopping complete...")
def process_config(self, config):
# this seems to get called twice for every change, why?
# What does config represent?
LOGGER.info(f"{self.lpfx} Enter config={config}")
LOGGER.info(f"{self.lpfx} process_config done")
def check_params(self):
"""
Check all user params are available and valid
"""
self.removeNoticesAll()
# Assume it's good unless it's not
self.config_st = True
# TODO: Only when necessary
self.update_profile()
# Temperature Units
default_temperature_unit = "F"
if "temperature_unit" in self.polyConfig["customParams"]:
self.temperature_unit = self.polyConfig["customParams"]["temperature_unit"]
else:
self.temperature_unit = default_temperature_unit
LOGGER.error(
f"{self.lpfx} temperature unit not defined in customParams, Using default {self.temperature_unit}"
)
self.temperature_uom = 4 if self.controller.temperature_unit == "C" else 17
LOGGER.info(f"temperature_unit={self.temperature_unit} temerature_uom={self.temperature_uom}")
# Host
default_host = "Your_ELK_IP_Or_Host:PortNum"
if "host" in self.polyConfig["customParams"]:
self.host = self.polyConfig["customParams"]["host"]
else:
self.host = default_host
LOGGER.error(
f"{self.lpfx} host not defined in customParams, please add it. Using {self.host}"
)
# Code
default_code = "Your_ELK_User_Code_for_Polyglot"
# Fix messed up code
if "keypad_code" in self.polyConfig["customParams"]:
self.user_code = int(self.polyConfig["customParams"]["user_code"])
elif "user_code" in self.polyConfig["customParams"]:
try:
self.user_code = int(self.polyConfig["customParams"]["user_code"])
except:
self.user_code = default_code
self.addNotice(
"ERROR user_code is not an integer, please fix, save and restart this nodeserver",
"host",
)
else:
self.user_code = default_code
LOGGER.error(
f"{self.lpfx} user_code not defined in customParams, please add it. Using {self.user_code}"
)
# Areas
self.use_areas = self.getCustomParam("areas")
self.use_areas_list = ()
if self.use_areas == "":
errs = "No areas defined in config so none will be added"
LOGGER.error(errs)
self.addNotice(errs, "areas")
else:
if self.use_areas is None:
self.use_areas = "1"
try:
self.use_areas_list = parse_range(self.use_areas)
except:
errs = f"ERROR: Failed to parse areas range '{self.use_areas}' will not add any areas: {sys.exc_info()[1]}"
LOGGER.error(errs)
self.addNotice(errs, "areas")
self.config_st = False
# Outputs
self.use_outputs = self.getCustomParam("outputs")
self.use_outputs_list = ()
if self.use_outputs == "" or self.use_outputs is None:
LOGGER.warning("No outputs defined in config so none will be added")
else:
try:
self.use_outputs_list = parse_range(self.use_outputs)
except:
errs = f"ERROR: Failed to parse outputs range '{self.use_outputs}' will not add any outputs: {sys.exc_info()[1]}"
LOGGER.error(errs)
self.addNotice(errs, "outputs")
self.config_st = False
#self.use_keypads = self.getCustomParam("keypads")
#self.use_keypads_list = ()
#if self.use_keypads == "" or self.use_keypads is None:
# LOGGER.warning("No keypads defined in config so none will be added")
#else:
# try:
# self.use_keypads_list = parse_range(self.use_keypads)
# except:
# errs = f"ERROR: Failed to parse keypads range '{self.use_keypads}' will not add any keypads: {sys.exc_info()[1]}"
# LOGGER.error(errs)
# self.addNotice(errs, "keypads")
# self.config_st = False
# Make sure they are in the params
self.addCustomParam(
{
"temperature_unit": self.temperature_unit,
"host": self.host,
"user_code": self.user_code,
"areas": self.use_areas,
"outputs": self.use_outputs,
#"keypads": self.use_keypads
}
)
# Add a notice if they need to change the keypad/password from the default.
if self.host == default_host:
# This doesn't pass a key to test the old way.
self.addNotice(
"Please set proper host in configuration page, and restart this nodeserver",
"host",
)
self.config_st = False
if self.user_code == default_code:
# This doesn't pass a key to test the old way.
self.addNotice(
"Please set proper user_code in configuration page, and restart this nodeserver",
"code",
)
self.config_st = False
# self.poly.add_custom_config_docs("<b>And this is some custom config data</b>")
def write_profile(self):
LOGGER.info(f"{self.lpfx} Starting...")
#
# Start the nls with the template data.
#
en_us_txt = "profile/nls/en_us.txt"
make_file_dir(en_us_txt)
LOGGER.info(f"{self.lpfx} Writing {en_us_txt}")
nls_tmpl = open("template/en_us.txt", "r")
nls = open(en_us_txt, "w")
for line in nls_tmpl:
nls.write(line)
nls_tmpl.close()
#
# Then write our custom NLS lines
nls.write("\nUSER-0 = Unknown\n")
for n in range(Max.USERS.value - 3):
LOGGER.debug(f"{self.lpfx} user={self.elk.users[n]}")
nls.write(f"USER-{n+1} = {self.elk.users[n].name}\n")
# Version 4.4.2 and later, user code 201 = Program Code, 202 = ELK RP Code, 203 = Quick Arm, no code.
nls.write(f"USER-{Max.USERS.value-2} = Program Code\n")
nls.write(f"USER-{Max.USERS.value-1} = ELK RP Code\n")
nls.write(f"USER-{Max.USERS.value} = Quick Arm, no code\n")
#
# Now the keypad names
nls.write("\n\nKEYPAD-0 = Unknown\n")
for n in range(Max.KEYPADS.value):
LOGGER.debug(f"{self.lpfx} keypad={self.elk.keypads[n]}")
nls.write(f"KEYPAD-{n+1} = {self.elk.keypads[n].name}\n")
#
# Now the zones names
nls.write("\n\nZONE-0 = Unknown\n")
for n in range(Max.ZONES.value):
LOGGER.debug(f"{self.lpfx} zone={self.elk.zones[n]}")
nls.write(f"ZONE-{n+1} = {self.elk.zones[n].name}\n")
#
# Close it and update the ISY
nls.close()
self.update_profile()
LOGGER.info(f"{self.lpfx} Done...")
def get_driver(self, mdrv, default=None):
# Restore from DB for existing nodes
try:
val = self.getDriver(mdrv)
LOGGER.info(f"{self.lpfx} {mdrv}={val}")
if val is None:
LOGGER.info(
f"{self.lpfx} getDriver({mdrv}) returned None which can happen on new nodes, using {default}"
)
val = default
except:
LOGGER.warning(
f"{self.lpfx} getDriver({mdrv}) failed which can happen on new nodes, using {default}"
)
val = default
return val
def set_all_logs(self, level, slevel=logging.WARNING):
LOGGER.info(
f"Setting level={level} sublevel={slevel} CRITICAL={logging.CRITICAL} ERROR={logging.ERROR} WARNING={logging.WARNING},INFO={logging.INFO} DEBUG={logging.DEBUG}"
)
LOGGER.setLevel(level)
#This sets for all modules
#LOG_HANDLER.set_basic_config(True, slevel)
#but we do each indivudally
logging.getLogger("elkm1_lib.elk").setLevel(slevel)
logging.getLogger("elkm1_lib.proto").setLevel(slevel)
def set_debug_level(self, level=None):
LOGGER.info(f"level={level}")
mdrv = "GV2"
if level is None:
# Restore from DB for existing nodes
level = self.get_driver(mdrv, 20)
level = int(level)
if level == 0:
level = 20
LOGGER.info(f"Seting {mdrv} to {level}")
self.setDriver(mdrv, level)
# 0=All 10=Debug are the same because 0 (NOTSET) doesn't show everything.
slevel = logging.WARNING
if level <= 10:
if level < 10:
slevel = logging.DEBUG
level = logging.DEBUG
elif level == 20:
level = logging.INFO
elif level == 30:
level = logging.WARNING
elif level == 40:
level = logging.ERROR
elif level == 50:
level = logging.CRITICAL
else:
LOGGER.error(f"Unknown level {level}")
#LOG_HANDLER.set_basic_config(True,logging.DEBUG)
self.set_all_logs(level, slevel)
def update_profile(self):
LOGGER.info(f"{self.lpfx}")
return self.poly.installprofile()
def cmd_update_profile(self, command):
LOGGER.info(f"{self.lpfx}")
return self.update_profile()
def cmd_discover(self, command):
LOGGER.info(f"{self.lpfx}")
return self.discover()
def cmd_set_debug_mode(self, command):
val = int(command.get("value"))
LOGGER.debug(f"val={val}")
self.set_debug_level(val)
id = "controller"
commands = {
"QUERY": query,
"DISCOVER": cmd_discover,
"UPDATE_PROFILE": cmd_update_profile,
"SET_DM": cmd_set_debug_mode,
}
drivers = [
{"driver": "ST", "value": 0, "uom": 2},
{"driver": "GV1", "value": 0, "uom": 2},
{"driver": "GV2", "value": logging.DEBUG, "uom": 25},
]
``` |
{
"source": "jimboca/udi-poly-FlumeWater",
"score": 2
} |
#### File: udi-poly-FlumeWater/nodes/Controller.py
```python
try:
from polyinterface import Controller,LOG_HANDLER,LOGGER
except ImportError:
from pgc_interface import Controller,LOGGER
import logging
from requests import Session
import pyflume
# My Template Node
from nodes import Flume1Node,Flume2Node
from node_funcs import id_to_address,get_valid_node_name,get_valid_node_address
KEY_DEVICE_TYPE = "type"
KEY_DEVICE_ID = "id"
TYPE_TO_NAME = {1: 'Hub', 2: 'Sensor'}
# IF you want a different log format than the current default
#LOG_HANDLER.set_log_format('%(asctime)s %(threadName)-10s %(name)-18s %(levelname)-8s %(module)s:%(funcName)s: %(message)s')
class Controller(Controller):
"""
The Controller Class is the primary node from an ISY perspective. It is a Superclass
of polyinterface.Node so all methods from polyinterface.Node are available to this
class as well.
"""
def __init__(self, polyglot):
super(Controller, self).__init__(polyglot)
self.name = 'Flume Water Controller'
self.hb = 0
self._mydrivers = {}
# This can be used to call your function everytime the config changes
# But currently it is called many times, so not using.
#self.poly.onConfig(self.process_config)
def start(self):
self.removeNoticesAll()
# TODO: Currently fails on PGC
try:
serverdata = self.poly.get_server_data(check_profile=True)
except Exception as ex:
LOGGER.error('get_server_data failed, is this PGC?: {}'.format(ex))
serverdata = {}
serverdata['version'] = "FIXME_PGC"
self.poly.installprofile()
LOGGER.info('Started Template NodeServer {}'.format(serverdata['version']))
#LOGGER.debug('ST=%s',self.getDriver('ST'))
self.set_driver('ST', 1)
self.set_driver('GV2', 0)
self.heartbeat(0)
self.ready = self.check_params()
#self.set_debug_level(self.getDriver('GV1'))
if self.ready:
if self.connect():
self.discover()
def shortPoll(self):
for node in self.nodes:
if node != self.address:
self.nodes[node].shortPoll()
def longPoll(self):
LOGGER.debug('longPoll')
self.heartbeat()
for node in self.nodes:
if node != self.address:
self.nodes[node].longPoll()
def query(self,command=None):
self.check_params()
for node in self.nodes:
self.nodes[node].reportDrivers()
def delete(self):
LOGGER.info('Oh God I\'m being deleted. Nooooooooooooooooooooooooooooooooooooooooo.')
def stop(self):
LOGGER.debug('NodeServer stopped.')
def process_config(self, config):
# this seems to get called twice for every change, why?
# What does config represent?
LOGGER.info("process_config: Enter config={}".format(config))
LOGGER.info("process_config: Exit")
def heartbeat(self,init=False):
LOGGER.debug('heartbeat: init={}'.format(init))
if init is not False:
self.hb = init
LOGGER.debug('heartbeat: hb={}'.format(self.hb))
if self.hb == 0:
self.reportCmd("DON",2)
self.hb = 1
else:
self.reportCmd("DOF",2)
self.hb = 0
def set_debug_level(self,level):
LOGGER.debug('set_debug_level: {}'.format(level))
if level is None:
level = 10
level = int(level)
if level == 0:
level = 30
LOGGER.info('set_debug_level: Set GV1 to {}'.format(level))
self.set_driver('GV1', level)
# 0=All 10=Debug are the same because 0 (NOTSET) doesn't show everything.
if level <= 10:
LOGGER.setLevel(logging.DEBUG)
elif level == 20:
LOGGER.setLevel(logging.INFO)
elif level == 30:
LOGGER.setLevel(logging.WARNING)
elif level == 40:
LOGGER.setLevel(logging.ERROR)
elif level == 50:
LOGGER.setLevel(logging.CRITICAL)
else:
LOGGER.debug("set_debug_level: Unknown level {}".format(level))
if level < 10:
LOG_HANDLER.set_basic_config(True,logging.DEBUG)
else:
# This is the polyinterface default
LOG_HANDLER.set_basic_config(True,logging.WARNING)
def check_params(self):
"""
This is an example if using custom Params for user and password and an example with a Dictionary
"""
self.removeNoticesAll()
default_username = "YourUserName"
default_password = "<PASSWORD>"
default_client_id = "YourClientId"
default_client_secret = "YourClientSecret"
default_current_interval_minutes = 5
add_param = False
self.username = self.getCustomParam('username')
if self.username is None:
self.username = default_username
LOGGER.error('check_params: username not defined in customParams, please add it. Using {}'.format(self.username))
add_param = True
self.password = self.getCustomParam('password')
if self.password is None:
self.password = <PASSWORD>
LOGGER.error('check_params: password not defined in customParams, please add it. Using {}'.format(self.password))
add_param = True
self.client_id = self.getCustomParam('client_id')
if self.client_id is None:
self.client_id = default_client_id
LOGGER.error('check_params: client_id not defined in customParams, please add it. Using {}'.format(self.client_id))
add_param = True
self.client_secret = self.getCustomParam('client_secret')
if self.client_secret is None:
self.client_secret = default_client_secret
LOGGER.error('check_params: client_secret not defined in customParams, please add it. Using {}'.format(self.client_secret))
add_param = True
self.current_interval_minutes = self.getCustomParam('current_interval_minutes')
if self.current_interval_minutes is None:
self.current_interval_minutes = default_current_interval_minutes
add_param = True
if (add_param):
self.addCustomParam({
'username': self.username,
'password': self.password,
'client_id': self.client_id,
'client_secret': self.client_secret,
'current_interval_minutes': self.current_interval_minutes
})
# Add a notice if they need to change the username/password from the default.
if self.username == default_username or self.password == <PASSWORD> or self.client_id == default_client_id or self.client_secret == default_client_secret:
# This doesn't pass a key to test the old way.
msg = 'Please set your information in configuration page, and restart this nodeserver'
self.addNotice({'config': msg})
LOGGER.error(msg)
return False
else:
return True
def connect(self):
self.session = Session()
LOGGER.info("Connecting to Flume...")
self.set_driver('GV2',1)
try:
self.auth = pyflume.FlumeAuth(
self.username, self.password, self.client_id, self.client_secret, http_session=self.session
)
self.set_driver('GV2',2)
LOGGER.info("Flume Auth={}".format(self.auth))
except Exception as ex:
self.set_driver('GV2',3)
msg = 'Error from PyFlue: {}'.format(ex)
LOGGER.error(msg)
self.addNotice({'auth': msg})
return False
except:
self.set_driver('GV2',3)
msg = 'Unknown Error from PyFlue: {}'.format(ex)
LOGGER.error(msg)
self.addNotice({'auth': msg})
LOGGER.error(msg,exc_info=True)
return False
self.flume_devices = pyflume.FlumeDeviceList(self.auth)
devices = self.flume_devices.get_devices()
LOGGER.info("Connecting complete...")
return True
def discover(self, *args, **kwargs):
cst = int(self.get_driver('GV2'))
if cst == 2:
for device in self.flume_devices.device_list:
if device[KEY_DEVICE_TYPE] <= 2:
ntype = 'Flume{}Node'.format(device[KEY_DEVICE_TYPE])
address = id_to_address(device[KEY_DEVICE_ID])
name = 'Flume {} {}'.format(TYPE_TO_NAME[device[KEY_DEVICE_TYPE]],device[KEY_DEVICE_ID])
# TODO: How to use ntype as the function to call?
if (device[KEY_DEVICE_TYPE] == 1):
self.addNode(Flume1Node(self, self.address, address, name, device))
else:
self.addNode(Flume2Node(self, self.address, address, name, device))
else:
if cst == 0:
LOGGER.error("Can not discover, Connection has not started")
elif cst == 1:
LOGGER.error("Can not discover, Connection is started but not complete")
elif cst == 3:
LOGGER.error("Can not discover, Connection Failed")
else:
LOGGER.error("Can not discover, Unknown error")
def set_driver(self,drv,val):
self._mydrivers[drv] = val
self.setDriver(drv,val)
def get_driver(self,drv):
if drv in self._mydrivers:
return self._mydrivers[drv]
return self.getDriver(drv)
def update_profile(self,command):
LOGGER.info('update_profile:')
st = self.poly.installprofile()
return st
def cmd_set_debug_mode(self,command):
val = int(command.get('value'))
LOGGER.debug("cmd_set_debug_mode: {}".format(val))
self.set_debug_level(val)
"""
"""
id = 'controller'
commands = {
'QUERY': query,
'DISCOVER': discover,
'SET_DM': cmd_set_debug_mode,
'UPDATE_PROFILE': update_profile,
}
drivers = [
{'driver': 'ST', 'value': 1, 'uom': 2},
{'driver': 'GV1', 'value': 10, 'uom': 25}, # Debug (Log) Mode, default=30=Warning
{'driver': 'GV2', 'value': 0, 'uom': 25}, # Authorization status
]
``` |
{
"source": "jimboca/udi-poly-IFTTT-Maker",
"score": 2
} |
#### File: jimboca/udi-poly-IFTTT-Maker/ifttt-maker.py
```python
import sys
from udi_interface import Interface,LOGGER
from nodes import Controller
def main():
if sys.version_info < (3, 6):
LOGGER.error("ERROR: Python 3.6 or greater is required not {}.{}".format(sys.version_info[0],sys.version_info[1]))
sys.exit(1)
try:
polyglot = Interface([Controller])
polyglot.start()
control = Controller(polyglot, 'iftttmkctl', 'iftttmkctl', 'IFTTT Webhooks Controller')
polyglot.runForever()
except (KeyboardInterrupt, SystemExit):
"""
Catch SIGTERM or Control-C and exit cleanly.
"""
sys.exit(0)
if __name__ == "__main__":
main()
```
#### File: udi-poly-IFTTT-Maker/nodes/Controller.py
```python
import logging,requests,markdown2,os
from udi_interface import Node,LOGGER,Custom,LOG_HANDLER
from node_funcs import get_valid_node_name
from nodes import Webhook,POST_STATUS
class Controller(Node):
def __init__(self, poly, primary, address, name):
super(Controller, self).__init__(poly, primary, address, name)
self.ready = False
self.first_run = True
self.hb = 0
self.handler_typed_data_st = None
self.Notices = Custom(self.poly, 'notices')
self.Params = Custom(self.poly, 'customparams')
LOGGER.debug(f'params={self.Params}')
self.poly.subscribe(self.poly.START, self.handler_start, address)
self.poly.subscribe(self.poly.POLL, self.handler_poll)
self.poly.subscribe(self.poly.LOGLEVEL, self.handler_log_level)
self.poly.subscribe(self.poly.CONFIGDONE, self.handler_config_done)
self.poly.subscribe(self.poly.CUSTOMPARAMS, self.handler_params)
self.poly.subscribe(self.poly.CUSTOMTYPEDDATA, self.handler_typed_data)
self.TypedData = Custom(self.poly, 'customtypeddata')
self.TypedParams = Custom(self.poly, 'customtypedparams')
self.TypedParams.load(
[
{
'name': 'maker_events',
'title': 'IFTTT Webhook Event',
'desc': 'IFTTT Webhook Events',
'isList': True,
'params': [
{
'name': 'node_address',
'title': 'Node Address',
'desc': 'Must be 8 Characters or less, and never change'
},
{
'name': 'node_name',
'title': 'Node Name',
'desc': 'Must be 8 Characters or less, and never change'
},
{
'name': 'on_event',
'title': 'On Event Name',
'isRequired': False,
},
{
'name': 'off_event',
'title': 'Off Event Name',
'isRequired': False,
},
]
},
],
True
)
self.poly.ready()
self.Notices.clear()
self.session = requests.Session()
self.poly.addNode(self, conn_status='ST')
def handler_start(self):
LOGGER.info(f"Started IFTTT Webhooks NodeServer {self.poly.serverdata['version']}")
self.update_profile()
self.heartbeat()
#self.check_params()
configurationHelp = './CONFIGURATION.md'
if os.path.isfile(configurationHelp):
cfgdoc = markdown2.markdown_path(configurationHelp)
self.poly.setCustomParamsDoc(cfgdoc)
self.ready = True
LOGGER.info(f'exit {self.name}')
# For things we only do have the configuration is loaded...
def handler_config_done(self):
LOGGER.debug(f'enter')
self.poly.addLogLevel('DEBUG_MODULES',9,'Debug + Modules')
LOGGER.debug(f'exit')
def handler_poll(self, polltype):
pass
def query(self):
self.setDriver('ST', 1)
self.reportDrivers()
def heartbeat(self):
LOGGER.debug('hb={self.hb}')
if self.hb == 0:
self.reportCmd("DON",2)
self.hb = 1
else:
self.reportCmd("DOF",2)
self.hb = 0
def handler_log_level(self,level):
LOGGER.info(f'enter: level={level}')
if level['level'] < 10:
LOGGER.info("Setting basic config to DEBUG...")
LOG_HANDLER.set_basic_config(True,logging.DEBUG)
else:
LOGGER.info("Setting basic config to WARNING...")
LOG_HANDLER.set_basic_config(True,logging.WARNING)
LOGGER.info(f'exit: level={level}')
'''
Read the user entered custom parameters. Here is where the user will
configure the number of child nodes that they want created.
'''
def handler_params(self, params):
LOGGER.debug(f'loading params: {params}')
self.Params.load(params)
LOGGER.debug(f'params={self.Params}')
self.params_valid = False
if params is None or not "API Key" in params:
self.api_key = ""
self.Params['API Key'] = self.api_key
# Must exist because adding the key calls this method again...
return
self.api_key = params['API Key']
if self.api_key == "":
self.poly.Notices['API Key'] = "Please add your IFTT Key https://ifttt.com/maker_webhooks/settings"
else:
# Assume it's good
self.poly.Notices.delete('API Key')
self.params_valid = True
def handler_typed_data(self, typed_data):
LOGGER.debug("Enter config={}".format(typed_data))
self.TypedData.load(typed_data)
self.maker_events = self.TypedData['maker_events']
#if self.handler_typed_data_st = True:
# Not the first run, create any new events
if self.maker_events is not None:
self.add_maker_events()
self.handler_type_data_st = True
def add_maker_events(self):
LOGGER.debug('enter')
if len(self.maker_events) == 0:
LOGGER.warning("No Webhook Events defined in configuration")
else:
for event in self.maker_events:
if 'node_name' in event:
LOGGER.info(f"Adding node for {event}")
self.add_maker_node(event)
else:
LOGGER.warning(f"No Event Name in {self.maker_events[i]}")
LOGGER.debug('exit')
def add_maker_node(self,event):
return self.poly.addNode(Webhook(self,self.address,event))
def delete(self):
LOGGER.info('Oh No I\'m being deleted. Nooooooooooooooooooooooooooooooooooooooooo.')
def stop(self):
LOGGER.debug('NodeServer stopped.')
def check_params(self):
pass
def post(self,event_name,value1=None,value2=None,value3=None):
LOGGER.debug(f'event_name={event_name} value1={value1} value2={value2} value3={value3}')
url = f'https://maker.ifttt.com/trigger/{event_name}/with/key/{self.api_key}/'
#url = f'https://maker.ifttt.com/trigger/bad_event_name/with/key/bad_api_key/'
#payload = {'value1': value1, 'value2': value2, 'value3': value3}
payload = {}
LOGGER.info(f"Sending: {url} payload={payload}")
try:
response = self.session.post(url,payload)
# This is supposed to catch all request excpetions.
except requests.exceptions.RequestException as e:
LOGGER.error(f"Connection error for {url}: {e}")
return {'st': False, 'post_status': POST_STATUS['Connection']}
LOGGER.info(f' Got: code={response.status_code}')
if response.status_code == 200:
LOGGER.info(f"Got: text={response.text}")
return { 'st:': True, 'post_status': POST_STATUS['Success'], 'data':response.text }
elif response.status_code == 400:
LOGGER.error(f"Bad request: {url}")
return { 'st': False, 'post_status': POST_STATUS['Request Error'] }
elif response.status_code == 404:
LOGGER.error(f"Not Found: {url}")
return { 'st': False, 'post_status': POST_STATUS['Invalid Error'] }
elif response.status_code == 401:
# Authentication error
LOGGER.error("Failed to authenticate, please your API Key")
return { 'st': False, 'post_status': POST_STATUS['Authentication Error'] }
else:
LOGGER.error(f"Unknown response {response.status_code}: {url} {response.text}")
return { 'st': False, 'post_status': POST_STATUS['Unknow Error'] }
def update_profile(self):
LOGGER.info('start')
st = self.poly.updateProfile()
return st
id = 'IFTTTCntl'
commands = {
'QUERY': query,
}
drivers = [
{'driver': 'ST', 'value': 1, 'uom': 25} ,
]
``` |
{
"source": "jimboca/udi-poly-notification-V2",
"score": 3
} |
#### File: udi-poly-notification-V2/nodes/AssistantRelay.py
```python
import polyinterface
from ntSession import ntSession
LOGGER = polyinterface.LOGGER
class AssistantRelay(polyinterface.Node):
"""
"""
def __init__(self, controller, primary, address, name):
"""
"""
super(AssistantRelay, self).__init__(controller, primary, address, name)
self.l_name = name
def start(self):
"""
"""
self.setDriver('ST', 1)
self.set_user(self.getDriver('GV1'))
# Start the session for talking to wirelesstag server
self.session = ntSession(self,LOGGER,self.controller.ar_host,self.controller.ar_port)
pass
def query(self):
"""
Called by ISY to report all drivers for this node. This is done in
the parent class, so you don't need to override this method unless
there is a need.
"""
self.reportDrivers()
def l_info(self, name, string):
LOGGER.info("%s:%s: %s" % (self.id,name,string))
def l_error(self, name, string):
LOGGER.error("%s:%s: %s" % (self.id,name,string))
def l_warning(self, name, string):
LOGGER.warning("%s:%s: %s" % (self.id,name,string))
def l_debug(self, name, string):
LOGGER.debug("%s:%s: %s" % (self.id,name,string))
def set_user(self,val):
self.l_info('set_user',val)
if val is None:
val = 0
val = int(val)
self.l_info('set_user','Set GV1 to {}'.format(val))
self.setDriver('GV1', val)
def cmd_set_user(self,command):
val = int(command.get('value'))
self.l_info("cmd_set_user",val)
self.set_user(val)
def cmd_send(self,command):
self.l_info("cmd_send",'')
#11curl -d '{"command":"This is Izzy, can you hear me?", "user":"jna", "broadcast":"true"}' -H "Content-Type: application/json" -X POST http://192.168.86.79:3001/assistant
self.session.post('assistant',{"command":"Izzy is alive", "user":"jna", "broadcast":"true"})
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 2},
{'driver': 'GV1', 'value': 0, 'uom': 25},
]
id = 'assistantrelay'
commands = {
#'DON': setOn, 'DOF': setOff
'SET_USER': cmd_set_user,
'SEND': cmd_send
}
``` |
{
"source": "jimboca/udi-poly-stresstest",
"score": 2
} |
#### File: udi-poly-stresstest/nodes/Controller.py
```python
try:
import polyinterface
except ImportError:
import pgc_interface as polyinterface
from nodes import STNode1
LOGGER = polyinterface.LOGGER
class Controller(polyinterface.Controller):
def __init__(self, polyglot):
super(Controller, self).__init__(polyglot)
self.name = 'Stress Test Controller'
self.driver = {}
self._inShortPoll = None # None until shortpoll actually runs
#self.poly.onConfig(self.process_config)
def start(self):
# This grabs the server.json data and checks profile_version is up to date
#serverdata = self.poly.get_server_data()
#LOGGER.info('Started Stress Test NodeServer {}'.format(serverdata['version']))
self.update_profile(None)
self.heartbeat(0)
self.check_params()
if self.getDriver('GV0') is None:
self.setDriver('GV0',30)
else:
self.setDriver('GV0',self.getDriver('GV0'))
# Alwyas set back to zero on restarts.
self.setDriver('GV1',0)
self.discover()
#self.poly.add_custom_config_docs("<b>And this is some custom config data</b>")
def setDriver(self,driver,value):
self.driver[driver] = value
super(Controller, self).setDriver(driver,value)
def getDriver(self,driver):
if driver in self.driver:
return self.driver[driver]
else:
return None
# WARNING: This only works on local, will not work on PGC
#return next((dv["value"] for dv in self.drivers if dv["driver"] == driver), None)
def shortPoll(self):
if int(self.getDriver('GV1')) == 0:
return
LOGGER.debug('Controller:shortPoll')
if self._inShortPoll is True:
LOGGER.error('Controller:shortPoll: can not run {}'.format(self._inShortPoll))
return
self._inShortPoll = True
for node in self.nodes:
if self.nodes[node].address != self.address:
self.nodes[node].shortPoll()
self._inShortPoll = False
def longPoll(self):
LOGGER.debug('Controller:longPoll')
self.heartbeat()
def query(self,command=None):
LOGGER.debug('Controller:query')
self.check_params()
for node in self.nodes:
if self.nodes[node].address != self.address:
self.nodes[node].query()
self.reportDrivers()
def discover(self, *args, **kwargs):
cnt = int(self.getDriver('GV0'))
for i in range(cnt):
fi = '%04d' % (i + 1)
self.addNode(STNode1(self, self.address, 'st_{}'.format(fi), 'ST Node {}'.format(fi)))
def delete(self):
LOGGER.info('Oh God I\'m being deleted. Nooooooooooooooooooooooooooooooooooooooooo.')
def stop(self):
LOGGER.debug('NodeServer stopped.')
def process_config(self, config):
# this seems to get called twice for every change, why?
# What does config represent?
LOGGER.info("process_config: Enter config={}".format(config));
LOGGER.info("process_config: Exit");
def heartbeat(self,init=False):
LOGGER.debug('heartbeat: init={}'.format(init))
if init is not False:
self.hb = init
LOGGER.debug('heartbeat: hb={}'.format(self.hb))
if self.hb == 0:
self.reportCmd("DON",2)
self.hb = 1
else:
self.reportCmd("DOF",2)
self.hb = 0
def check_params(self):
"""
This is an example if using custom Params for user and password and an example with a Dictionary
"""
self.removeNoticesAll()
def update_profile(self,command):
LOGGER.info('update_profile:')
st = self.poly.installprofile()
return st
def cmd_set_cnt(self,command):
val = int(command.get('value'))
LOGGER.info('cmd_set_cnt: {}'.format(val))
self.setDriver('GV0',val)
self.discover()
def cmd_set_sp(self,command):
val = int(command.get('value'))
LOGGER.info('cmd_set_cnt: {}'.format(val))
self.setDriver('GV1',val)
id = 'controller'
commands = {
'QUERY': query,
'DISCOVER': discover,
'UPDATE_PROFILE': update_profile,
'SET_CNT': cmd_set_cnt,
'SET_SP': cmd_set_sp,
}
drivers = [
{'driver': 'ST', 'value': 1, 'uom': 2},
{'driver': 'GV0', 'value': 25, 'uom': 107},
{'driver': 'GV1', 'value': 0, 'uom': 2}
]
```
#### File: udi-poly-stresstest/nodes/STNode1.py
```python
import time
try:
import polyinterface
except ImportError:
import pgc_interface as polyinterface
LOGGER = polyinterface.LOGGER
class STNode1(polyinterface.Node):
def __init__(self, controller, primary, address, name):
super(STNode1, self).__init__(controller, primary, address, name)
self.driver = {}
def start(self):
self.setDriver('ST', 0)
pass
def setDriver(self,driver,value):
self.driver[driver] = value
super(STNode1, self).setDriver(driver,value)
def getDriver(self,driver):
if driver in self.driver:
return self.driver[driver]
else:
return super(STNode1, self).getDriver(driver)
def shortPoll(self):
LOGGER.debug('%s:shortPoll: ',self.address)
self.update_time()
stv = self.getDriver('ST')
if stv is None:
# New node not completly started yet
return
if int(stv) == 0:
self.setOn(None)
ckval = 1
else:
self.setOff(None)
ckval = 0
dv = 'ST'
val = self.getDriver(dv)
if val is None:
LOGGER.error('%s:shortPoll: %s expected %d, got %s',self.address,dv,ckval,val)
else:
val=int(val)
if val != ckval:
LOGGER.error('%s:shortPoll: %s expected %d, got %d',self.address,dv,ckval,val)
dv = 'GV1'
val = self.getDriver(dv)
if val is None:
LOGGER.error('%s:shortPoll: %s expected %d, got %s',self.address,dv,ckval,val)
else:
val=int(val)
if val != ckval:
LOGGER.error('%s:shortPoll: %s expected %d, got %d',self.address,dv,ckval,val)
def update_time(self):
self.setDriver('GV0',int(time.time()))
def longPoll(self):
LOGGER.debug('{}:longPoll'.format(self.address))
def setOn(self, command):
LOGGER.debug('%s:setOn: ',self.address)
self.setDriver('ST', 1)
self.setDriver('GV1', 1)
def setOff(self, command):
LOGGER.debug('%s:setOff: ',self.address)
self.setDriver('ST', 0)
self.setDriver('GV1', 0)
def query(self,command=None):
LOGGER.debug('{}:query'.format(self.address))
self.update_time()
self.reportDrivers()
"Hints See: https://github.com/UniversalDevicesInc/hints"
hint = [1,2,3,4]
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 2},
{'driver': 'GV0', 'value': 0, 'uom': 110},
{'driver': 'GV1', 'value': 0, 'uom': 2},
]
id = 'stnode1'
commands = {
'DON': setOn, 'DOF': setOff
}
``` |
{
"source": "jimboca/udi-wirelesstag-poly-V2",
"score": 2
} |
#### File: udi-wirelesstag-poly-V2/wt_nodes/wTagManager.py
```python
import polyinterface
import sys
import time
import requests, json
from threading import Thread
from wtServer import wtSession
from wt_funcs import get_valid_node_name
from wt_nodes import wTag
LOGGER = polyinterface.LOGGER
# For even more debug... should make a setting?
DEBUG_LEVEL=1
class wTagManager(polyinterface.Node):
"""
This is the class that all the Nodes will be represented by. You will add this to
Polyglot/ISY with the controller.addNode method.
Class Variables:
self.primary: String address of the Controller node.
self.parent: Easy access to the Controller Class from the node itself.
self.address: String address of this Node 14 character limit. (ISY limitation)
self.added: Boolean Confirmed added to ISY
Class Methods:
start(): This method is called once polyglot confirms the node is added to ISY.
setDriver('ST', 1, report = True, force = False):
This sets the driver 'ST' to 1. If report is False we do not report it to
Polyglot/ISY. If force is True, we send a report even if the value hasn't changed.
reportDrivers(): Forces a full update of all drivers to Polyglot/ISY.
query(): Called when ISY sends a query request to Polyglot for this specific node
"""
def __init__(self, controller, address, name, mac, node_data=False, do_discover=False):
"""
Optional.
Super runs all the parent class necessities. You do NOT have
to override the __init__ method, but if you do, you MUST call super.
:param controller: Reference to the Controller class
:param primary: Controller address
:param address: This nodes address
:param name: This nodes name
"""
# Fpr our logger lines
self.l_name = "{}:{}:{}".format(self.id,address,name)
self.l_debug('__init__','start')
# Save the real mac before we legalize it.
self.ready = False
self.do_discover = do_discover
self.node_data = node_data
self.mac = mac
super(wTagManager, self).__init__(controller, address, address, name)
# These start in threads cause they take a while
self.discover_thread = None
self.set_url_thread = None
self.set_url_config_st = None
self.l_debug('__init__','done')
def start(self):
"""
Optional.
This method is run once the Node is successfully added to the ISY
and we get a return result from Polyglot. Only happens once.
"""
self.l_info('start','...')
self.set_st(True)
self.set_use_tags(self.get_use_tags())
self.start_session()
self.l_info("start",'{0} {1}'.format(self._drivers,self.use_tags))
self.degFC = 1 # I like F.
# When we are added by the controller discover, then run our discover.
self.ready = True
if self.do_discover:
self.discover(thread=False)
else:
if self.use_tags == 1:
self.l_info("start",'Call add_existing_tags because use_tags={0}'.format(self.use_tags))
self.add_existing_tags()
#self.discover() # Needed to fix tag_id's
self.query() # To get latest tag info.
self.reportDrivers()
self.l_info('start','done')
def query(self):
"""
Called by ISY to report all drivers for this node. This is done in
the parent class, so you don't need to override this method unless
there is a need.
"""
if self.use_tags == 0:
self.l_debug('query','use_tags={}'.format(self.use_tags))
return
mgd = self.GetTagList()
if mgd['st']:
self.set_st(True)
for tag in mgd['result']:
tag_o = self.get_tag_by_id(tag['slaveId'])
if tag_o is None:
self.l_error('query','No tag with id={0}'.format(tag['slaveId']))
else:
tag_o.set_from_tag_data(tag)
tag_o.reportDrivers()
else:
self.set_st(False)
self.reportDrivers()
def shortPoll(self):
"""
Optional.
This runs every 10 seconds. You would probably update your nodes either here
or longPoll. No need to Super this method the parent version does nothing.
The timer can be overriden in the server.json.
"""
if not self.ready: return False
if self.discover_thread is not None:
if self.discover_thread.isAlive():
self.l_debug('shortPoll','discover thread still running...')
else:
self.l_debug('shortPoll','discover thread is done...')
self.discover_thread = None
if self.set_url_thread is not None:
if self.set_url_thread.isAlive():
self.l_debug('shortPoll','set_url thread still running...')
else:
self.l_debug('shortPoll','set_url thread is done...')
self.set_url_thread = None
if self.discover_thread is None and self.set_url_thread is None:
if self.set_url_config_st == False:
# Try again...
self.l_error('shortPoll',"Calling set_url_config since previous st={}".format(self.set_url_config_st))
self.set_url_config()
for tag in self.get_tags():
tag.shortPoll()
def longPoll(self):
"""
Optional.
This runs every 30 seconds. You would probably update your nodes either here
or shortPoll. No need to Super this method the parent version does nothing.
The timer can be overriden in the server.json.
"""
if not self.ready: return False
self.l_debug('longPoll','...')
if self.st is False:
ret = self.controller.wtServer.select_tag_manager(self.mac)
self.set_st(ret)
def discover(self, thread=False):
"""
Start the discover in a thread so we don't cause timeouts :(
"""
if getattr(self,'discover_running',None) is None:
self.discover_running = False
if self.discover_running:
self.l_debug('discover','Already running...')
return False
self.discover_running = True
cnt = 30
while ((not self.ready) and cnt > 0):
self.l_debug('discover','waiting for node to be ready ({})..'.format(cnt))
cnt -= 1
time.sleep(1)
if not self.ready:
self.l_error('discover','timed out waiting for node to be ready, did it crash?')
return
if thread:
self.discover_thread = Thread(target=self._discover)
self.discover_thread.start()
else:
self._discover()
self.discover_running = False
def _discover(self):
self.l_debug('discover','use_tags={}'.format(self.use_tags))
if self.use_tags == 0:
return False
ret = self.GetTagList()
if ret['st'] is False:
return
index = 0
for tag in ret['result']:
self.l_debug('discover','Got Tag: {}'.format(tag))
self.add_tag(tdata=tag, uom=self.get_tag_temp_unit(tag))
self.reportDrivers() # Report now so they show up while set_url runs.
self.set_url_config(thread=False)
self.discover_running = False
def add_existing_tags(self):
"""
Called on startup to add the tags from the config.
This has to loop thru the _nodes list to figure out if it's one of the
tags for this tag manager.
"""
self.l_debug("add_existing_tags","Looking for my tags")
if DEBUG_LEVEL > 0: self.l_debug("add_existing_tags"," in _nodes={}".format(self.controller._nodes))
for address in self.controller._nodes:
if address != self.address:
node = self.controller._nodes[address]
# One of my tags?
self.l_debug("add_existing_tags","check node primary={}".format(node['primary']))
if node['primary'] == self.address:
self.l_info("add_existing_tags","node={0} = {1}".format(address,node))
self.add_tag(address=node['address'], name=node['name'], is_new=False)
self.set_url_config()
def add_tag(self, address=None, name=None, tag_type=None, uom=None, tdata=None, is_new=True):
return self.controller.addNode(wTag(self.controller, self.address, address,
name=name, tag_type=tag_type, uom=uom, tdata=tdata, is_new=is_new))
"""
Misc functions
"""
def get_tags(self):
"""
Get all the active tags for this tag manager.
"""
nodes = list()
for address in self.controller.nodes:
node = self.controller.nodes[address]
#self.l_debug('get_tags','node={}'.format(node))
if hasattr(node,'tag_id') and node.primary_n.mac == self.mac:
nodes.append(node)
#self.l_debug('get_tags','nodes={0}'.format(nodes))
return nodes
def get_tag_by_id(self,tid):
tid = int(tid)
for tag in self.get_tags():
self.l_debug('get_tag_by_id','{0} address={1} id={2}'.format(tid,tag.address,tag.tag_id))
if int(tag.tag_id) == tid:
return tag
return None
"""
Wireless Tags API Communication functions
"""
def start_session(self):
self.session = wtSession(self,LOGGER,self.controller.wtServer,self.mac)
# http://wirelesstag.net/ethClient.asmx?op=GetTagList
def GetTagList(self):
ret = self.session.api_post_d('ethClient.asmx/GetTagList',{})
self.set_st(ret)
if ret: return ret
self.l_error('GetTagList',"Failed: st={}".format(ret))
return ret
# http://wirelesstag.net/ethClient.asmx?op=LoadEventURLConfig
def LoadEventURLConfig(self,params):
return self.session.api_post_d('ethClient.asmx/LoadEventURLConfig',params)
# http://wirelesstag.net/ethClient.asmx?op=SaveEventURLConfig
def SaveEventURLConfig(self,params):
return self.session.api_post_d('ethClient.asmx/SaveEventURLConfig',params)
# http://wirelesstag.net/ethClient.asmx?op=LoadTempSensorConfig
def LoadTempSensorConfig(self,params):
return self.session.api_post_d('ethClient.asmx/LoadTempSensorConfig',params)
# http://wirelesstag.net/ethClient.asmx?op=RequestImmediatePostback
def RequestImmediatePostback(self,params):
return self.session.api_post_d('ethClient.asmx/RequestImmediatePostback',params)
def RebootTagManager(self,tmgr_mac):
return self.session.api_post_d('ethClient.asmx/RebootTagManager',{})
def PingAllTags(self,tmgr_mac):
return self.session.api_post_d('ethClient.asmx/PingAllTags',{'autoRetry':True})
def LightOn(self,tmgr_mac,id,flash):
return self.session.api_post_d('ethClient.asmx/LightOn',{'id': id, 'flash':flash})
def LightOff(self,tmgr_mac,id):
return self.session.api_post_d('ethClient.asmx/LightOff',{'id': id})
# TODO: Cache the temp sensor config's?
def get_tag_temp_unit(self,tag_data):
"""
Returns the LoadTempSensorConfig temp_unit. 0 = Celcius, 1 = Fahrenheit
"""
mgd = self.LoadTempSensorConfig({'id':tag_data['slaveId']})
if mgd['st']:
return mgd['result']['temp_unit']
else:
return -1
"""
Call set_url_config tags so updates are pushed back to me.
# TODO: This needs to run in a seperate thread because it can take to long.
"""
def set_url_config(self, thread=True, force=False):
"""
Start the set_url_config in a thread so we don't cause timeouts :(
"""
if thread:
if force:
self.set_url_thread = Thread(target=self._set_url_config_true)
else:
self.set_url_thread = Thread(target=self._set_url_config_false)
self.set_url_thread.start()
else:
self._set_url_config()
def _set_url_config_true(self):
self._set_url_config(force=True)
def _set_url_config_false(self):
self._set_url_config(force=False)
def _set_url_config(self,force=False):
# None means there are no tags.
self.set_url_config_st = None
if self.use_tags == 0:
return False
# Tracks our status so longPoll can auto-rerun it when necessary
tags = self.get_tags()
if len(tags) == 0:
self.l_error("_set_url_config","No tags in Polyglot DB, you need to discover?")
return False
self.set_url_config_st = False
for tag in tags:
tag.set_url_config(force=force)
self.set_url_config_st = True
def l_info(self, name, string):
LOGGER.info("%s:%s: %s" % (self.l_name,name,string))
def l_error(self, name, string):
LOGGER.error("%s:%s: %s" % (self.l_name,name,string))
def l_warning(self, name, string):
LOGGER.warning("%s:%s: %s" % (self.l_name,name,string))
def l_debug(self, name, string):
LOGGER.debug("%s:%s: %s" % (self.l_name,name,string))
"""
Set Functions
"""
def set_params(self,params):
"""
Set params from the getTagManager data
"""
self.set_st(params['online'])
def set_st(self,value,force=False):
self.l_debug('set_st',"{},{}".format(value,force))
if not force and hasattr(self,"st") and self.st == value:
return True
self.st = value
if value:
self.setDriver('ST', 1)
else:
self.setDriver('ST', 0)
def get_use_tags(self):
self.use_tags = self.getDriver('GV1')
if self.use_tags is None: return None
self.use_tags = int(self.use_tags)
return self.use_tags
def set_use_tags(self,value,force=False):
if value is None: value = 0
value = int(value)
if not force and hasattr(self,"use_tags") and self.use_tags == value:
return True
self.use_tags = value
self.setDriver('GV1', value)
if self.ready and value == 1:
self.discover()
"""
"""
def cmd_set_use_tags(self,command):
self.set_use_tags(command.get("value"))
def cmd_set_url_config(self,command):
self.set_url_config(thread=True,force=True)
def cmd_ping_all_tags(self,command):
self.PingAllTags()
def cmd_reboot(self,command):
self.RebootTagManager(self.mac)
def cmd_set_on(self, command):
"""
Example command received from ISY.
Set DON on MyNode.
Sets the ST (status) driver to 1 or 'True'
"""
self.setDriver('ST', 1)
def cmd_set_off(self, command):
"""
Example command received from ISY.
Set DOF on MyNode
Sets the ST (status) driver to 0 or 'False'
"""
self.setDriver('ST', 0)
def cmd_test(self, command):
self.l_debug('cmd_test','just a test')
self.l_debug('cmd_test',str(self.controller.nodes['foo']))
id = 'wTagManager'
drivers = [
{'driver': 'ST', 'value': 0, 'uom': 2},
{'driver': 'GV1', 'value': 0, 'uom': 2}, # Use Tags
]
commands = {
'SET_USE_TAGS': cmd_set_use_tags,
'QUERY': query,
'SET_URL_CONFIG': cmd_set_url_config,
'PING_ALL_TAGS': cmd_ping_all_tags,
'DISCOVER': discover,
'REBOOT': cmd_reboot,
'TEST': cmd_test,
}
```
#### File: udi-wirelesstag-poly-V2/wt_nodes/wTag.py
```python
import polyinterface
import sys
import time
import re
from copy import deepcopy
from wt_funcs import id_to_address,myfloat,CtoF
from wt_params import wt_params
LOGGER = polyinterface.LOGGER
DLEV = 0
class wTag(polyinterface.Node):
"""
This is the class that all the Nodes will be represented by. You will add this to
Polyglot/ISY with the controller.addNode method.
Class Variables:
self.primary: String address of the Controller node.
self.parent: Easy access to the Controller Class from the node itself.
self.address: String address of this Node 14 character limit. (ISY limitation)
self.added: Boolean Confirmed added to ISY
Class Methods:
start(): This method is called once polyglot confirms the node is added to ISY.
setDriver('ST', 1, report = True, force = False):
This sets the driver 'ST' to 1. If report is False we do not report it to
Polyglot/ISY. If force is True, we send a report even if the value hasn't changed.
reportDrivers(): Forces a full update of all drivers to Polyglot/ISY.
query(): Called when ISY sends a query request to Polyglot for this specific node
"""
def __init__(self, controller, primary, address=None, name=None,
tag_type=None, uom=None, tdata=None, is_new=True):
"""
Optional.
Super runs all the parent class necessities. You do NOT have
to override the __init__ method, but if you do, you MUST call super.
:param controller: Reference to the Controller class
:param primary: Controller address
:param address: This nodes address
:param name: This nodes name
"""
LOGGER.debug('wTag:__init__: start: address={0} name={1} type={2} uom={3}'.format(address,name,tag_type,uom))
tag_id = None
# So logger calls won't crash
self.address = address
self.id = 'wTag' # Until we figure out the uom
self.name = name
self.is_new = is_new
self.node_set_url = False
# Have to set this to call getDriver
self.controller = controller
self.primary_n = controller.nodes[primary]
if is_new:
# It's a new tag.
self.address = address
if tdata is None:
self.l_error('__init__',"New node address ({0}), name ({1}), and type ({2}) must be specified when tdata is None".format(address,name,tag_type))
return False
if uom is None:
self.l_error('__init__',"uom ({0}) must be specified for new tags.".format(uom))
self.l_debug('__init__','New node {}'.format(tdata))
tag_type = tdata['tagType']
self.tag_uom = uom
tag_id = tdata['slaveId']
self.uuid = tdata['uuid']
address = id_to_address(self.uuid)
name = tdata['name']
else:
#
# An existing node,
self.l_debug('__init__','Existing node...')
# We need to pull info from existing tags to know what they are.
#
# tag_uom = UOM
# Should never happen, just need for old data added before it existed.
self.tag_uom = self.getDriver('UOM')
if self.tag_uom is None:
self.l_error('__init__','No tag_uom (UOM)')
self.tag_uom = -1
# tag_id = GPV
tag_id = self.getDriver('GPV')
if tag_id is None:
self.l_error('__init__','No tag_id (GPV) in node_data={0}'.format(node_data))
return False
# tag_type = GV1
tag_type = self.getDriver('GV1')
if tag_type is None:
self.l_error('__init__','No tag_type (GV1) in node_data={0}'.format(node_data))
return False
tag_id = int(tag_id)
tag_type = int(tag_type)
self.name = name
self.tdata = tdata
self.tag_id = tag_id
self.tag_type = tag_type
self.l_info('__init__','type={} uom={} id={} address={} name={}'.format(self.tag_type,self.tag_uom,self.tag_id,address,name))
#
# C or F?
# Fix our temp_uom in drivers
# This won't change an existing tag, only new ones.
#
# TODO: test changing it by forcing update?
temp_uom = 4 if self.tag_uom == 0 else 17
dv = [
{'driver': 'ST', 'value': 0, 'uom': 2},
# tag_id
{'driver': 'GPV', 'value': self.tag_id, 'uom': 56},
# UOM 0=C 1=F
{'driver': 'UOM', 'value': 0, 'uom': 56},
# tag_type:
{'driver': 'GV1', 'value': self.tag_type, 'uom': 56},
# temp: Curent temperature (17=F 4=C)
{'driver': 'CLITEMP', 'value': 0, 'uom': temp_uom},
# batv: Battery Voltag 72=Volt
{'driver': 'CV', 'value': 0, 'uom': 72},
# lit: Light
# fan: Honeywell Fan State
{'driver': 'GV7', 'value': 0, 'uom': 25},
# tempState:
{'driver': 'GV9', 'value': 0, 'uom': 25},
# time:
{'driver': 'GV13', 'value': 0, 'uom': 25},
# seconds since update
{'driver': 'GV14', 'value': 0, 'uom': 25},
]
if (not (tag_type == 102 or tag_type == 107)):
# batp: Battery percent (51=percent)
dv.append({'driver': 'BATLVL', 'value': 0, 'uom': 51})
if (tag_type == 12 or tag_type == 13 or tag_type == 21 or tag_type == 26
or tag_type == 32 or tag_type == 52 or tag_type == 62 or
tag_type == 72):
# evst: Event State
dv.append({'driver': 'ALARM', 'value': 0, 'uom': 25})
if (tag_type == 26 or tag_type == 107):
# lux: Lux (36=lux)
dv.append({'driver': 'LUMIN', 'value': 0, 'uom': 36})
if (tag_type == 13 or tag_type == 21 or tag_type == 26 or tag_type == 32
or tag_type == 52 or tag_type == 62 or tag_type == 72
or tag_type == 102 or tag_type == 107):
# hum: Humidity (21 = absolute humidity)
dv.append({'driver': 'CLIHUM', 'value': 0, 'uom': 22})
if (tag_type == 12 or tag_type == 13 or tag_type == 21 or tag_type == 26):
# motion:
dv.append({'driver': 'GV2', 'value': 0, 'uom': 25})
if (tag_type == 12 or tag_type == 13 or tag_type == 21):
# orien: Orientation
dv.append({'driver': 'GV3', 'value': 0, 'uom': 56})
# xaxis: X-Axis
dv.append({'driver': 'GV4', 'value': 0, 'uom': 56})
# yasis: Y-Axis
dv.append({'driver': 'GV5', 'value': 0, 'uom': 56})
# zaxis: Z-Axis
dv.append({'driver': 'GV6', 'value': 0, 'uom': 56})
if (tag_type == 12 or tag_type == 13 or tag_type == 21 or tag_type == 26
or tag_type == 32 or tag_type == 52 or tag_type == 72
or tag_type == 102 or tag_type == 107):
# oor: OutOfRange
dv.append({'driver': 'GV8', 'value': 0, 'uom': 2})
# signaldBm:
dv.append({'driver': 'CC', 'value': 0, 'uom': 56})
if (tag_type == 13 or tag_type == 21 or tag_type == 26
or tag_type == 32 or tag_type == 52 or tag_type == 62
or tag_type == 72 or tag_type == 107):
# moisture(cap)State:
dv.append({'driver': 'GV10', 'value': 0, 'uom': 25})
if (tag_type == 26 or tag_type == 107):
# lightState:
dv.append({'driver': 'GV11', 'value': 0, 'uom': 25})
if (tag_type == 32):
# TODO: Only 32 has water sensor?
dv.append({'driver': 'GV12', 'value': 1, 'uom': 25})
if (tag_type == 42):
# TODO: Only 42 has chip temperature
dv.append({'driver': 'GV15', 'value': 0, 'uom': temp_uom})
self.drivers = dv
uomS = "C" if self.tag_uom == 0 else "F"
self.id = 'wTag' + str(self.tag_type) + uomS
self.address = address
self.l_info('__init__','super id={} controller{} primary={} address={} name={} type={} id={} uom={}'.format(wTag,controller,primary,address,name,self.tag_type,self.tag_id,self.tag_uom))
super(wTag, self).__init__(controller, primary, address, name)
def start(self):
"""
Optional.
This method is run once the Node is successfully added to the ISY
and we get a return result from Polyglot. Only happens once.
"""
# Always set driver from tag type
self.set_tag_type(self.tag_type)
self.set_tag_id(self.tag_id)
self.set_tag_uom(self.tag_uom)
if self.tdata is not None:
self.set_from_tag_data(self.tdata)
self.set_time_now()
if self.controller.update_profile:
# Drivers were updated, need to query
self.query()
else:
# Otherwise just report previous values
self.reportDrivers()
def shortPoll(self):
self.set_seconds()
def query(self):
"""
Called by ISY to report all drivers for this node. This is done in
the parent class, so you don't need to override this method unless
there is a need.
"""
# This askes for the sensor to report
mgd = self.primary_n.RequestImmediatePostback({'id':self.tag_id})
if mgd['st']:
self.set_from_tag_data(mgd['result'])
self.reportDrivers()
def l_info(self, name, string):
LOGGER.info("%s:%s:%s:%s:%s: %s" % (self.primary_n.name,self.name,self.address,self.id,name,string))
def l_error(self, name, string):
LOGGER.error("%s:%s:%s:%s:%s: %s" % (self.primary_n.name,self.name,self.address,self.id,name,string))
def l_warning(self, name, string):
LOGGER.warning("%s:%s:%s:%s:%s: %s" % (self.primary_n.name,self.name,self.address,self.id,name,string))
def l_debug(self, name, string):
LOGGER.debug("%s:%s:%s:%s:%s: %s" % (self.primary_n.name,self.name,self.address,self.id,name,string))
def set_url_config(self,force=False):
# If we haven't tried to set this nodes url's or it failed, the reset it.
url = self.controller.wtServer.listen_url
if not self.node_set_url or force:
mgd = self.primary_n.LoadEventURLConfig({'id':self.tag_id})
self.l_debug('set_url_config','{0}'.format(mgd))
if mgd['st'] is False:
self.node_set_url = False
else:
#{'in_free_fall': {'disabled': True, 'nat': False, 'verb': None, 'url': 'http://', 'content': None}
newconfig = dict()
for key, value in mgd['result'].items():
if key != '__type':
if key in wt_params:
param = wt_params[key]
else:
self.l_error('set_url_config',"Unknown tag param '{0}' it will be ignored".format(key))
param = False
# Just skip for now
if param is not False:
# for PIR and ALS {1}: timestamp, {2}: tag ID)
if key == 'motion_detected' and (self.tag_type == 72 or self.tag_type == 26):
param = 'name={0}&tagid={2}&ts={1}'
self.l_debug('set_url_config',"key={0} value={1}".format(key,value))
value['disabled'] = False
value['url'] = '{0}/{1}?tmgr_mac={2}&{3}'.format(url,key,self.primary_n.mac,param)
value['nat'] = True
newconfig[key] = value
res = self.primary_n.SaveEventURLConfig({'id':self.tag_id, 'config': newconfig, 'applyAll': False})
self.node_set_url = res['st']
def get_handler(self,command,params):
"""
This is called by the controller get_handler after parsing the node_data
"""
self.l_debug('get_handler','command={} params={}'.format(command,params))
if command == '/update':
#tagname=Garage Freezer&tagid=0&temp=-21.4213935329179&hum=0&lux=0&ts=2018-02-15T11:18:02+00:00 HTTP/1.1" 400 -
pass
elif command == '/motion_detected':
self.set_motion(1)
elif command == '/motion_timedout':
self.set_motion(0)
elif command == '/door_opened':
self.set_motion(2)
elif command == '/door_closed':
self.set_motion(4)
elif command == '/door_open_toolong':
self.set_motion(2)
elif command == '/oor':
self.set_oor(1)
elif command == '/back_in_range':
self.set_oor(0)
elif command == '/temp_normal':
self.set_tmst(1)
elif command == '/temp_toohigh':
self.set_tmst(2)
elif command == '/temp_toolow':
self.set_tmst(3)
elif command == '/too_humid':
self.set_cpst(4)
elif command == '/too_dry':
self.set_cpst(3)
elif command == '/cap_normal':
self.set_cpst(2)
elif command == '/water_detected':
self.set_wtst(2)
elif command == '/water_dried':
self.set_wtst(1)
elif command == '/low_battery':
self.set_batl(1)
elif command == '/too_bright':
self.set_list(4)
elif command == '/too_dark':
self.set_list(3)
elif command == '/light_normal':
self.set_list(2)
else:
self.l_error('get_handler',"Unknown command '{0}'".format(command))
if 'temp' in params:
# This is always C ?
if self.tag_uom == 0:
self.set_temp(params['temp'])
else:
self.set_temp(CtoF(params['temp']))
elif self.tag_uom == 0:
if 'tempc' in params:
self.set_temp(params['tempc'])
elif self.tag_uom == 1:
if 'tempf' in params:
self.set_temp(params['tempf'])
if 'hum' in params:
# hum is used for chip_temp on tag type 42, all others is humidity
if self.tag_type == 42:
# This is always C
if self.tag_uom == 0:
self.set_chip_temp(params['hum'])
else:
self.set_chip_temp(CtoF(params['hum']))
else:
self.set_hum(params['hum'])
if 'lux' in params:
self.set_lux(params['lux'])
if 'orien' in params:
self.set_orien(params['orien'])
if 'xaxis' in params:
self.set_xaxis(params['xaxis'])
if 'yaxis' in params:
self.set_yaxis(params['yaxis'])
if 'zaxis' in params:
self.set_zaxis(params['zaxis'])
if 'batv' in params:
self.set_batv(params['batv'])
self.set_time_now()
return True
"""
Set Functions
"""
def set_from_tag_data(self,tdata):
self.l_debug('set_from_tag_data','{}'.format(tdata))
if 'alive' in tdata:
self.set_alive(tdata['alive'])
if 'temperature' in tdata:
# This is always C ?
if self.tag_uom == 0:
self.set_temp(tdata['temperature'])
else:
self.set_temp(CtoF(tdata['temperature']))
if 'batteryVolt' in tdata:
self.set_batv(tdata['batteryVolt'])
if 'batteryRemaining' in tdata:
self.set_batp(float(tdata['batteryRemaining']) * 100)
if 'lux' in tdata:
self.set_lux(tdata['lux'])
if 'cap' in tdata:
# cap is used for chip_temp on tag type 42, all others is humidity
if self.tag_type == 42:
# This is always C
if self.tag_uom == 0:
self.set_chip_temp(tdata['cap'])
else:
self.set_chip_temp(CtoF(tdata['cap']))
else:
self.set_hum(tdata['cap'])
if self.tag_type == 62:
if 'thermostat' in tdata and tdata['thermostat'] is not None and 'fanOn' in tdata['thermostat']:
self.set_fan(tdata['thermostat']['fanOn'])
else:
if 'lit' in tdata:
self.set_lit(tdata['lit'])
if 'eventState' in tdata:
self.set_evst(tdata['eventState'])
# Used to be oor, not it's OutOfRange ?
if 'OutOfRange' in tdata:
self.set_oor(tdata['OutOfRange'])
if 'oor' in tdata:
self.set_oor(tdata['oor'])
if 'signaldBm' in tdata:
self.set_signaldbm(tdata['signaldBm'])
if 'tempEventState' in tdata:
self.set_tmst(tdata['tempEventState'])
if 'capEventState' in tdata:
self.set_cpst(tdata['capEventState'])
if 'lightEventState' in tdata:
self.set_list(tdata['lightEventState'])
# This is the last time the tag manager has heard from the tag?
if 'lastComm' in tdata:
self.set_time(tdata['lastComm'],wincrap=True)
self.set_seconds()
# This is the tag_type number, we don't really need to show it, but
# we need the info when recreating the tags from the config.
def set_tag_type(self,value):
self.l_debug('set_tag_type','GV1 to {0}'.format(value))
self.tag_type = value
self.setDriver('GV1', value)
def set_tag_id(self,value):
self.l_debug('set_tag_id','GPV to {0}'.format(value))
self.tag_id = value
self.setDriver('GPV', value)
def set_tag_uom(self,value):
self.l_debug('set_tag_uom','UOM to {0}'.format(value))
self.tag_uom = value
self.setDriver('UOM', value)
def set_alive(self,value):
self.l_debug('set_alive','{0}'.format(value))
self.setDriver('ST', int(value))
def set_temp(self,value):
self.l_debug('set_temp','{0}'.format(value))
self.setDriver('CLITEMP', myfloat(value,1))
def set_chip_temp(self,value):
self.l_debug('set_chip_temp','{0}'.format(value))
self.setDriver('GV15', myfloat(value,1))
def set_hum(self,value):
self.l_debug('set_hum','{0}'.format(value))
self.setDriver('CLIHUM', myfloat(value,1))
def set_lit(self,value):
self.l_debug('set_lit','{0}'.format(value))
self.setDriver('GV7', int(value))
def get_lit(self):
self.l_debug('get_lit','')
return self.getDriver('GV7')
def set_fan(self,value):
self.l_debug('set_fan','{0}'.format(value))
self.setDriver('GV7', int(value))
def set_lux(self,value):
self.l_debug('set_lux','{0}'.format(value))
self.setDriver('LUMIN', myfloat(value,2))
def set_batp(self,value,force=False):
self.l_debug('set_batp','{0}'.format(value))
self.setDriver('BATLVL', myfloat(value,2))
def set_batv(self,value):
self.l_debug('set_batv','{0}'.format(myfloat(value,3)))
self.setDriver('CV', myfloat(value,3))
def set_batl(self,value,force=False):
# TODO: Implement battery low!
return
self.setDriver('CV', value)
def set_motion(self,value=None):
self.l_debug('set_motion','{0}'.format(value))
value = int(value)
# Not all have motion, but that's ok, just sent it.
self.setDriver('GV2', value)
if value == 0: # False
self.set_evst(1,andMotion=False) # Armed
elif value == 1: # True
self.set_evst(5,andMotion=False) # Detected Movement
if value == 2: # Door Open
self.set_evst(3,andMotion=False) # Opened
elif value == 3: # Open too long
self.set_evst(3,andMotion=False) # Opened
elif value == 4: # Closed
self.set_evst(4,andMotion=False) # Closed
def set_orien(self,value):
self.l_debug('set_orien','{0}'.format(value))
self.setDriver('GV3', myfloat(value,1))
def set_xaxis(self,value):
self.l_debug('set_xaxis','{0}'.format(value))
self.setDriver('GV4', int(value))
def set_yaxis(self,value):
self.l_debug('set_yaxis','{0}'.format(value))
self.setDriver('GV5', int(value))
def set_zaxis(self,value):
self.l_debug('set_zaxis','{0}'.format(value))
self.setDriver('GV6', int(value))
def set_evst(self,value,andMotion=True):
self.l_debug('set_evst','{0}'.format(value))
self.setDriver('ALARM', int(value))
# eventState 1=Armed, so no more motion
if andMotion and int(value) == 1:
self.set_motion(0)
def set_oor(self,value):
self.l_debug('set_oor','{0}'.format(value))
self.setDriver('GV8', int(value))
def set_signaldbm(self,value):
self.l_debug('set_signaldbm','{0}'.format(value))
self.setDriver('CC', int(value))
def set_tmst(self,value):
self.l_debug('set_tmst','{0}'.format(value))
self.setDriver('GV9', int(value))
def set_cpst(self,value):
self.l_debug('set_cpst','{0}'.format(value))
self.setDriver('GV10', int(value))
def set_list(self,value):
self.l_debug('set_list','{0}'.format(value))
self.setDriver('GV11', int(value))
def set_wtst(self,value):
self.l_debug('set_wtst','{0}'.format(value))
# Force to 1, Dry state on initialization since polyglot ignores the init value
value = int(value)
if value == 0: value = 1
self.setDriver('GV12', int(value))
def set_time_now(self):
self.set_time(int(time.time()))
self.set_seconds()
def set_time(self,value,wincrap=False):
self.l_debug('set_time','{0},{1}'.format(value,wincrap))
value = int(value)
if wincrap:
# Convert windows timestamp to unix :(
# https://stackoverflow.com/questions/10411954/convert-windows-timestamp-to-date-using-php-on-a-linux-box
value = int(value / 10000000 - 11644477200)
self.l_debug('set_time','{0}'.format(value))
self.time = value
self.setDriver('GV13', self.time)
def set_seconds(self,force=True):
if not hasattr(self,"time"): return False
time_now = int(time.time())
if DLEV > 0: self.l_debug('set_seconds','time_now {}'.format(time_now))
if DLEV > 0: self.l_debug('set_seconds','last_time - {}'.format(self.time))
if self.time == 0:
value = -1
else:
value = time_now - self.time
if DLEV > 0:
self.l_debug('set_seconds',' = {}'.format(value))
else:
self.l_debug('set_seconds','{}'.format(value))
self.setDriver('GV14', value)
"""
"""
def cmd_set_light(self,command):
value = int(command.get("value"))
# Save current value, and change it.
slit = self.get_lit()
self.set_lit(value)
if value == 0:
ret = self.primary_n.LightOff(self.primary_n.mac,self.tag_id)
elif value == 1:
ret = self.primary_n.LightOn(self.primary_n.mac,self.tag_id,False)
elif value == 2:
ret = self.primary_n.LightOn(self.primary_n.mac,self.tag_id,True)
if ret['st']:
self.set_from_tag_data(ret['result'])
else:
# Command failed, restore status
self.set_lit(slit)
def cmd_set_url_config(self,command):
self.set_url_config(force=True)
commands = {
'QUERY': query,
'SET_LIGHT': cmd_set_light,
'SET_URL_CONFIG': cmd_set_url_config,
}
``` |
{
"source": "jimbofreedman/django-rest-framework-json-api",
"score": 2
} |
#### File: django-rest-framework-json-api/tests/test_serializers.py
```python
import pytest
from django.db import models
from rest_framework_json_api import serializers
from tests.models import DJAModel, ManyToManyTarget
from tests.serializers import ManyToManyTargetSerializer
def test_get_included_serializers():
class IncludedSerializersModel(DJAModel):
self = models.ForeignKey("self", on_delete=models.CASCADE)
target = models.ForeignKey(ManyToManyTarget, on_delete=models.CASCADE)
other_target = models.ForeignKey(ManyToManyTarget, on_delete=models.CASCADE)
class Meta:
app_label = "tests"
class IncludedSerializersSerializer(serializers.ModelSerializer):
included_serializers = {
"self": "self",
"target": ManyToManyTargetSerializer,
"other_target": "tests.serializers.ManyToManyTargetSerializer",
}
class Meta:
model = IncludedSerializersModel
fields = ("self", "other_target", "target")
included_serializers = IncludedSerializersSerializer.included_serializers
expected_included_serializers = {
"self": IncludedSerializersSerializer,
"target": ManyToManyTargetSerializer,
"other_target": ManyToManyTargetSerializer,
}
assert included_serializers == expected_included_serializers
def test_reserved_field_names():
with pytest.raises(AssertionError) as e:
class ReservedFieldNamesSerializer(serializers.Serializer):
meta = serializers.CharField()
results = serializers.CharField()
ReservedFieldNamesSerializer().fields
assert str(e.value) == (
"Serializer class tests.test_serializers.test_reserved_field_names.<locals>."
"ReservedFieldNamesSerializer uses following reserved field name(s) which is "
"not allowed: meta, results"
)
``` |
{
"source": "jimbofreedman/mopidy-spotify",
"score": 2
} |
#### File: mopidy-spotify/tests/test_extension.py
```python
from __future__ import unicode_literals
import mock
from mopidy_spotify import Extension, backend as backend_lib
def test_get_default_config():
ext = Extension()
config = ext.get_default_config()
assert '[spotify]' in config
assert 'enabled = true' in config
def test_get_config_schema():
ext = Extension()
schema = ext.get_config_schema()
assert 'username' in schema
assert 'password' in schema
assert 'bitrate' in schema
assert 'volume_normalization' in schema
assert 'private_session' in schema
assert 'timeout' in schema
assert 'cache_dir' in schema
assert 'settings_dir' in schema
assert 'allow_cache' in schema
assert 'allow_network' in schema
assert 'allow_playlists' in schema
assert 'search_album_count' in schema
assert 'search_artist_count' in schema
assert 'search_track_count' in schema
assert 'toplist_countries' in schema
def test_setup():
registry = mock.Mock()
ext = Extension()
ext.setup(registry)
registry.add.assert_called_with('backend', backend_lib.SpotifyBackend)
```
#### File: mopidy-spotify/tests/test_playlists.py
```python
from __future__ import unicode_literals
import mock
from mopidy import backend as backend_api
from mopidy.models import Ref
import pytest
import spotify
from mopidy_spotify import backend, playlists
@pytest.fixture
def session_mock(
sp_playlist_mock,
sp_playlist_folder_start_mock, sp_playlist_folder_end_mock,
sp_user_mock):
sp_playlist2_mock = mock.Mock(spec=spotify.Playlist)
sp_playlist2_mock.is_loaded = True
sp_playlist2_mock.owner = mock.Mock(spec=spotify.User)
sp_playlist2_mock.owner.canonical_name = 'bob'
sp_playlist2_mock.link.uri = 'spotify:playlist:bob:baz'
sp_playlist2_mock.name = 'Baz'
sp_playlist2_mock.tracks = []
sp_playlist3_mock = mock.Mock(spec=spotify.Playlist)
sp_playlist3_mock.is_loaded = False
sp_session_mock = mock.Mock(spec=spotify.Session)
sp_session_mock.user = sp_user_mock
sp_session_mock.user_name = 'alice'
sp_session_mock.playlist_container = [
sp_playlist_mock,
sp_playlist_folder_start_mock,
sp_playlist2_mock,
sp_playlist_folder_end_mock,
sp_playlist3_mock,
]
return sp_session_mock
@pytest.fixture
def backend_mock(session_mock, config):
backend_mock = mock.Mock(spec=backend.SpotifyBackend)
backend_mock._config = config
backend_mock._session = session_mock
backend_mock._bitrate = 160
return backend_mock
@pytest.fixture
def provider(backend_mock):
return playlists.SpotifyPlaylistsProvider(backend_mock)
def test_is_a_playlists_provider(provider):
assert isinstance(provider, backend_api.PlaylistsProvider)
def test_as_list_when_not_logged_in(
session_mock, provider):
session_mock.playlist_container = None
result = provider.as_list()
assert len(result) == 0
def test_as_list_when_offline(session_mock, provider):
session_mock.connection.state = spotify.ConnectionState.OFFLINE
result = provider.as_list()
assert len(result) == 2
def test_as_list_when_playlist_container_isnt_loaded(session_mock, provider):
session_mock.playlist_container = None
result = provider.as_list()
assert len(result) == 0
def test_as_list_with_folders_and_ignored_unloaded_playlist(provider):
result = provider.as_list()
assert len(result) == 2
assert result[0] == Ref.playlist(
uri='spotify:user:alice:playlist:foo', name='Foo')
assert result[1] == Ref.playlist(
uri='spotify:playlist:bob:baz', name='Bar/Baz (by bob)')
def test_get_items_when_playlist_exists(
session_mock, sp_playlist_mock, provider):
session_mock.get_playlist.return_value = sp_playlist_mock
result = provider.get_items('spotify:user:alice:playlist:foo')
assert len(result) == 1
assert result[0] == Ref.track(uri='spotify:track:abc', name='ABC 123')
def test_get_items_when_playlist_is_unknown(provider):
result = provider.get_items('spotify:user:alice:playlist:unknown')
assert result is None
def test_lookup(session_mock, sp_playlist_mock, provider):
session_mock.get_playlist.return_value = sp_playlist_mock
playlist = provider.lookup('spotify:user:alice:playlist:foo')
assert playlist.uri == 'spotify:user:alice:playlist:foo'
assert playlist.name == 'Foo'
assert playlist.tracks[0].bitrate == 160
def test_lookup_loads_playlist_when_a_playlist_isnt_loaded(
sp_playlist_mock, session_mock, provider):
is_loaded_mock = mock.PropertyMock()
type(sp_playlist_mock).is_loaded = is_loaded_mock
is_loaded_mock.side_effect = [False, True]
session_mock.get_playlist.return_value = sp_playlist_mock
playlist = provider.lookup('spotify:user:alice:playlist:foo')
sp_playlist_mock.load.assert_called_once_with(10)
assert playlist.uri == 'spotify:user:alice:playlist:foo'
assert playlist.name == 'Foo'
def test_lookup_when_playlist_is_unknown(session_mock, provider):
session_mock.get_playlist.side_effect = spotify.Error
assert provider.lookup('foo') is None
def test_lookup_of_playlist_with_other_owner(
session_mock, sp_user_mock, sp_playlist_mock, provider):
sp_user_mock.canonical_name = 'bob'
sp_playlist_mock.owner = sp_user_mock
session_mock.get_playlist.return_value = sp_playlist_mock
playlist = provider.lookup('spotify:user:alice:playlist:foo')
assert playlist.uri == 'spotify:user:alice:playlist:foo'
assert playlist.name == 'Foo (by bob)'
def test_create(session_mock, sp_playlist_mock, provider):
session_mock.playlist_container = mock.Mock(
spec=spotify.PlaylistContainer)
session_mock.playlist_container.add_new_playlist.return_value = (
sp_playlist_mock)
playlist = provider.create('Foo')
session_mock.playlist_container.add_new_playlist.assert_called_once_with(
'Foo')
assert playlist.uri == 'spotify:user:alice:playlist:foo'
assert playlist.name == 'Foo'
def test_create_with_invalid_name(session_mock, provider, caplog):
session_mock.playlist_container = mock.Mock(
spec=spotify.PlaylistContainer)
session_mock.playlist_container.add_new_playlist.side_effect = ValueError(
'Too long name')
playlist = provider.create('Foo')
assert playlist is None
assert (
'Failed creating new Spotify playlist "Foo": Too long name'
in caplog.text())
def test_create_fails_in_libspotify(session_mock, provider, caplog):
session_mock.playlist_container = mock.Mock(
spec=spotify.PlaylistContainer)
session_mock.playlist_container.add_new_playlist.side_effect = (
spotify.Error)
playlist = provider.create('Foo')
assert playlist is None
assert 'Failed creating new Spotify playlist "Foo"' in caplog.text()
def test_on_container_loaded_triggers_playlists_loaded_event(
sp_playlist_container_mock, caplog, backend_listener_mock):
playlists.on_container_loaded(sp_playlist_container_mock)
assert 'Spotify playlist container loaded' in caplog.text()
backend_listener_mock.send.assert_called_once_with('playlists_loaded')
def test_on_playlist_added_does_nothing_yet(
sp_playlist_container_mock, sp_playlist_mock,
caplog, backend_listener_mock):
playlists.on_playlist_added(
sp_playlist_container_mock, sp_playlist_mock, 0)
assert 'Spotify playlist "Foo" added to index 0' in caplog.text()
assert backend_listener_mock.send.call_count == 0
def test_on_playlist_removed_does_nothing_yet(
sp_playlist_container_mock, sp_playlist_mock,
caplog, backend_listener_mock):
playlists.on_playlist_removed(
sp_playlist_container_mock, sp_playlist_mock, 0)
assert 'Spotify playlist "Foo" removed from index 0' in caplog.text()
assert backend_listener_mock.send.call_count == 0
def test_on_playlist_moved_does_nothing_yet(
sp_playlist_container_mock, sp_playlist_mock,
caplog, backend_listener_mock):
playlists.on_playlist_moved(
sp_playlist_container_mock, sp_playlist_mock, 0, 1)
assert 'Spotify playlist "Foo" moved from index 0 to 1' in caplog.text()
assert backend_listener_mock.send.call_count == 0
``` |
{
"source": "jimbofreedman/naggingnelly-api",
"score": 2
} |
#### File: naggingnelly-api/gtd/models.py
```python
from datetime import datetime
from django.db import models
from django.db.models.signals import post_save
from django.utils import timezone
from recurrence.fields import RecurrenceField
from silk.profiling.profiler import silk_profile
from api.users.models import User
class GtdUser(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
user = models.OneToOneField(User, related_name="gtd_user")
bin = models.OneToOneField('Folder', related_name="bin_owner")
collectbox = models.OneToOneField('Folder', related_name="collectbox_owner")
actions = models.OneToOneField('Folder', related_name="actions_owner")
waiting_for = models.OneToOneField(
'Folder', related_name="waitingfor_owner")
tickler = models.OneToOneField('Folder', related_name="tickler_owner")
someday = models.OneToOneField('Folder', related_name="someday_owner")
def __str__(self):
return self.user.username
class Folder(models.Model):
BIN = 0
COLLECTBOX = 1
ACTIONS = 2
WAITING_FOR = 3
TICKLER = 4
SOMEDAY = 5
SPECIAL_FOLDER_CHOICES = (
(BIN, "Bin"),
(COLLECTBOX, "Collectbox"),
(ACTIONS, "Actions"),
(WAITING_FOR, "Waiting For"),
(TICKLER, "Tickler"),
(SOMEDAY, "Someday")
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
owner = models.ForeignKey(User)
name = models.CharField(max_length=100)
special_type = models.PositiveSmallIntegerField(choices=SPECIAL_FOLDER_CHOICES, null=True, blank=True)
def __str__(self):
return self.name
class Context(models.Model):
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
owner = models.ForeignKey(User)
name = models.CharField(max_length=100)
glyph = models.CharField(max_length=100)
color = models.CharField(max_length=6, default="ffffff")
def __str__(self):
return self.name
# Create your models here.
class Action(models.Model):
STATUS_OPEN = 0
STATUS_FAILED = 1
STATUS_CANCELLED = 2
STATUS_COMPLETED = 3
STATUS_CHOICES = (
(STATUS_OPEN, 'Open'),
(STATUS_FAILED, 'Failed'),
(STATUS_CANCELLED, 'Cancelled'),
(STATUS_COMPLETED, 'Completed'),
)
created_at = models.DateTimeField(auto_now_add=True)
updated_at = models.DateTimeField(auto_now=True)
completed_at = models.DateTimeField(null=True, blank=True)
start_at = models.DateTimeField(null=True, blank=True)
due_at = models.DateTimeField(null=True, blank=True)
owner = models.ForeignKey(User)
short_description = models.CharField(max_length=100)
notes = models.TextField(default="", blank=True)
status = models.IntegerField(choices=STATUS_CHOICES, default=STATUS_OPEN)
folder = models.ForeignKey(Folder)
context = models.ForeignKey(Context)
priority = models.IntegerField(default=0)
recurrence = RecurrenceField(null=True, blank=True)
dependencies = models.ManyToManyField('self',
symmetrical=False,
related_name='depends_on',
blank=True)
def __str__(self):
return self.short_description
@silk_profile(name='Save Action')
def save(self, *args, **kwargs):
# If we have just been completed
if (self.status > self.STATUS_OPEN and self.completed_at is None):
# Make an ActionRecurrence object for this Action,
# and reset it with new start_at/due_at
if (self.recurrence is not None and len(self.recurrence.rrules) > 0 and self.start_at):
# Possible to create recurrence without dtstart, then it recurs to same date
if self.recurrence.dtstart is None:
self.recurrence.dtstart = datetime.combine(timezone.make_naive(self.due_at).date(),
datetime.min.time())
action_recurrence = ActionRecurrence.objects.create(
action=self,
status=self.status,
start_at=self.start_at,
due_at=self.due_at
)
action_recurrence.save()
recur_date = self.recurrence.after(timezone.make_naive(self.start_at), inc=False)
if recur_date is not None:
self.start_at = timezone.make_aware(datetime.combine(recur_date, self.start_at.time()))
self.due_at = timezone.make_aware(
datetime.combine(recur_date, self.due_at.time())) if self.due_at else None
self.status = self.STATUS_OPEN
else:
self.completed_at = timezone.now()
else:
self.completed_at = timezone.now()
is_new = self.id is None
super(Action, self).save(*args, **kwargs)
if is_new and self.priority == 0:
self.priority = self.id * 10000
super(Action, self).save()
class ActionRecurrence(models.Model):
action = models.ForeignKey(Action)
status = models.IntegerField(choices=Action.STATUS_CHOICES)
start_at = models.DateTimeField(null=True, blank=True)
due_at = models.DateTimeField(null=True, blank=True)
completed_at = models.DateTimeField(auto_now_add=True)
def create_user(sender, instance, created, **kwargs):
if not created:
return
Context.objects.create(name="Agenda", owner=instance)
Context.objects.create(name="Calls", owner=instance)
Context.objects.create(name="Computer", owner=instance)
Context.objects.create(name="Errand", owner=instance)
Context.objects.create(name="Home", owner=instance)
Context.objects.create(name="Office", owner=instance)
Context.objects.create(name="Read/Review", owner=instance)
Context.objects.create(name="Shopping", owner=instance)
bin1 = Folder.objects.create(
name="Bin", special_type=Folder.BIN, owner=instance)
collectbox = Folder.objects.create(
name="Collectbox", special_type=Folder.COLLECTBOX, owner=instance)
actions = Folder.objects.create(
name="Actions", special_type=Folder.ACTIONS, owner=instance)
waiting_for = Folder.objects.create(
name="Waiting For", special_type=Folder.WAITING_FOR, owner=instance)
tickler = Folder.objects.create(
name="Tickler", special_type=Folder.TICKLER, owner=instance)
someday = Folder.objects.create(
name="Someday", special_type=Folder.SOMEDAY, owner=instance)
GtdUser.objects.create(
user=instance, bin=bin1, collectbox=collectbox, actions=actions,
waiting_for=waiting_for, tickler=tickler, someday=someday)
post_save.connect(create_user, sender=User,
dispatch_uid="gtd_create_user")
```
#### File: tests/models/test_gtduser.py
```python
from django.test import TestCase
from faker import Faker
from api.users.models import User
from ...models import GtdUser
class FolderModelTests(TestCase):
def setUp(self):
self.faker = Faker()
def test_str(self):
name = self.faker.name()
user = User.objects.create(username=name)
gtd_user = GtdUser.objects.get(user=user)
self.assertEqual(str(gtd_user), name)
```
#### File: naggingnelly-api/smelltest/views.py
```python
import json
from django.db.models import Count
from django.http import HttpResponse
from django.template import loader
from .models import Scent, TestResult
def index(request):
template = loader.get_template('smelltest/index.html')
context = {
}
return HttpResponse(template.render(context, request))
def data(request):
scents = Scent.objects.order_by('id')
test_results = TestResult.objects.values('scent', 'guess').annotate(Count('scent'))
ret = {
'nodes': [{
'name': s.name,
'group': 1,
'testCount': s.tests.count()
} for s in scents],
'links': [{
'source': r['scent'] - 1, # 0-index array vs 1-index table PK
'target': r['guess'] - 1,
'value': r['scent__count']
} for r in test_results]
}
return HttpResponse(json.dumps(ret), content_type="application/json")
``` |
{
"source": "jimbofreedman/naggingnelly-backend",
"score": 2
} |
#### File: backend/todo/views.py
```python
from __future__ import unicode_literals
from django.shortcuts import render
from django.utils import timezone
from .models import TodoItem
def index(request):
context = {
"items": TodoItem.objects.filter(status=TodoItem.STATUS.open,start__lte=timezone.now())
}
return render(request, "todo/index.html", context)
def complete(request, item_id):
item = TodoItem.objects.get(pk=item_id)
item.status = TodoItem.STATUS.complete
item.save()
return index(request)
def cancel(request, item_id):
item = TodoItem.objects.get(pk=item_id)
item.status = TodoItem.STATUS.cancelled
item.save()
return index(request)
def fail(request, item_id):
item = TodoItem.objects.get(pk=item_id)
item.status = TodoItem.STATUS.failed
item.save()
return index(request)
``` |
{
"source": "jimboH/reducer",
"score": 2
} |
#### File: reducer/reducer/image_browser.py
```python
from __future__ import (division, print_function, absolute_import,
unicode_literals)
from collections import OrderedDict
import os
from io import BytesIO
import numpy as np
import matplotlib.image as mimg
from astropy.io import fits
from astropy.extern import six
from astropy.visualization import simple_norm
from astropy.nddata.utils import block_reduce
import ipywidgets as widgets
from ipywidgets import Accordion
import msumastro
from .notebook_dir import get_data_path
__all__ = [
'ImageTree',
'FitsViewer',
'ImageBrowser',
'ndarray_to_png',
]
class ImageTree(object):
"""
Create a tree view of a collection of images.
Parameters
----------
tree : `msumastro.TableTree`
Tree of images, arranged by metadata.
"""
def __init__(self, tree):
if not isinstance(tree, msumastro.TableTree):
raise ValueError("argument must be a TableTree")
self._tree = tree
self._id_string = lambda l: os.path.join(*[str(s) for s in l]) if l else ''
self._gui_objects = OrderedDict()
self._top = None
self._create_gui()
self._set_titles()
# Generate an array to improve initial display time
ndarray_to_png(np.random.rand(1200, 1200))
@property
def top(self):
"""
Widget at the top of the tree.
"""
return self._top
def _get_index_in_children(self, widget):
parent = widget.parent
for idx, wid in enumerate(parent.children):
if widget is wid:
return idx
def _replace_child(self, parent, old=None, new=None):
"""
Replace old child with new.
Parameters
----------
parent : IPython widget
String that identifies parent in gui
old : IPython widget
Child to be replaced
new : IPython widget or None
Replacement child (or None)
Notes
-----
Children are stored as a tuple so they are immutable.
"""
current_children = list(parent.children)
for idx, child in enumerate(current_children):
if child is old:
current_children[idx] = new
parent.children = current_children
def _create_gui(self):
"""
Create the tree gui elements.
Notes
-----
Each node of the tree is either an
`IPython.html.widgets.Accordion`, if the node has child nodes,
or a `IPython.html.widgets.Select`, if the node has a list.
Note well this does **not** allow for the case of child nodes and
a list, so this does not really suffice as a file browser.
List nodes monkey with their parents by editing the description to
include the number of list items in the node.
"""
for parents, children, index in self._tree.walk():
if children and index:
# This should be impossible...
raise RuntimeError("What the ???")
parent_string = self._id_string(parents)
depth = len(parents)
try:
key = self._tree.tree_keys[depth]
except IndexError:
key = ''
if depth == 0:
self._top = Accordion()
self._top.description = key
# self._top.selected_index = -1
self._gui_objects[parent_string] = self._top
parent = self._gui_objects[parent_string]
# Do I have children? If so, add them as sub-accordions
if children:
child_objects = []
for child in children:
desc = ": ".join([key, str(child)])
child_container = Accordion()
child_container.description = desc
# Make sure all panels start out closed.
# child_container.selected_index = -1
child_container.parent = self._gui_objects[parent_string]
child_string = os.path.join(parent_string, str(child))
self._gui_objects[child_string] = child_container
child_objects.append(child_container)
parent.children = child_objects
# Do I have only a list? Populate a select box with those...
if index:
new_text = widgets.Select(options=index)
new_text.layout.width = '100%'
index_string = self._id_string([parent_string, 'files'])
self._gui_objects[index_string] = new_text
# On the last pass an Accordion will have been created for
# this item. We need to replace that Accordion with a Select.
# The Select should be inside a box so that we can set a
# description on the box that won't be displayed on the
# Select. When titles are built for the image viewer tree
# later on they are based on the description of the Accordions
# and their immediate children.
old_parent = parent
grandparent = old_parent.parent
desc = old_parent.description
s_or_not = ['', 's']
n_files = len(index)
desc += " ({0} image{1})".format(n_files,
s_or_not[n_files > 1])
# Place the box between the Select and the parent Accordion
parent = widgets.Box()
parent.description = desc
parent.children = [new_text]
parent.parent = grandparent
self._replace_child(grandparent, old=old_parent, new=parent)
def display(self):
"""
Display and format this widget.
"""
from IPython.display import display
display(self._top)
def _set_titles(self):
"""
Set titles for accordions.
This should apparently be done *before* the widget is displayed.
"""
for name, obj in six.iteritems(self._gui_objects):
if isinstance(obj, Accordion):
for idx, child in enumerate(obj.children):
if not isinstance(child, widgets.Select):
obj.set_title(idx, child.description)
def format(self):
"""
This gets called by the ImageBrowser so don't delete it.
For now it also closes all of the tabs after the browser is created
because doing it before (at least ipywidgets 5.1.5 and lower) causes
a javascript error which prevents properly setting the titles.
"""
for name, obj in six.iteritems(self._gui_objects):
if isinstance(obj, Accordion):
obj.selected_index = None
for idx, child in enumerate(obj.children):
if isinstance(child, Accordion):
child.selected_index = None
elif isinstance(child, widgets.Box):
child.children[0].width = "15em"
def ndarray_to_png(x, min_percent=20, max_percent=99.5):
shape = np.array(x.shape)
# Reverse order for reasons I do not understand...
shape = shape[::-1]
if len(shape) != 2:
return
width = 600 # pixels
downsample = (shape[0] // width) + 1
if downsample > 1:
x = block_reduce(x,
block_size=(downsample, downsample))
norm = simple_norm(x,
min_percent=min_percent,
max_percent=max_percent)
img_buffer = BytesIO()
mimg.imsave(img_buffer, norm(x), format='png', cmap='gray')
return img_buffer.getvalue()
class FitsViewer(object):
"""
Display the image and header from a single FITS file.
"""
def __init__(self):
self._top = widgets.Tab(visible=False)
self._data = None # hdu.data
self._png_image = None # ndarray_to_png(self._data)
self._header = ''
self._image_box = widgets.VBox()
self._image = widgets.Image()
# Do this so the initial display looks ok.
self._image.layout.min_width = '400px'
self._image_title = widgets.Label()
self._image_box.children = [self._image, self._image_title]
self._header_box = widgets.VBox()
self._header_display = widgets.Textarea(disabled=True)
self._header_display.layout.width = '50rem'
self._header_display.layout.height = '20rem'
self._header_box.children = [self._header_display]
self._top.children = [self._image_box, self._header_box]
@property
def top(self):
return self._top
def display(self):
"""
Display and format this widget.
"""
from IPython.display import display
display(self._top)
self.format()
def format(self):
"""
Format widget.
Must be called after the widget is displayed, and is automatically
called by the `display` method.
"""
self._top.set_title(0, 'Image')
self._top.set_title(1, 'Header')
self._header_display.height = '400px'
self._header_display.width = '500px'
# Let the bike shedding begin....
self._image_box.align = "center"
self._image.padding = "10px"
self._image_box.border_style = 'solid'
self._image_box.border_radius = "5px"
self._image_box.border_color = "lightgray"
self._header_box.align = "center"
self._header_box.padding = "10px"
def set_fits_file_callback(self, demo=True, image_dir=None):
"""
Returns a callback function that sets the name of FITS file to
display and updates the widget.
The callback takes one argument, the name of the fits file, or 'demo'
to enable the display of a couple of sample images.
"""
def set_fits_file(name, fits_file):
"""
Set image and header to a particular FITS file.
Parameters
----------
fits_file : str
The name of the fits file, or 'demo' to enable the display of
a couple of sample images.
"""
if demo:
import random
place_holder_files = ['flood-flat-001R.fit',
'SA112-SF1-001R1.fit']
use_file = random.choice(place_holder_files)
full_path = os.path.join(get_data_path(), use_file)
else:
if image_dir is not None:
full_path = os.path.join(image_dir, fits_file)
else:
full_path = fits_file
with fits.open(full_path) as hdulist:
hdu = hdulist[0]
self._data = hdu.data
self._header = hdu.header
self._header_display.value = repr(self._header)
self._image.value = ndarray_to_png(self._data)
self._image_title.value = os.path.basename(full_path)
self.top.visible = True
return set_fits_file
class ImageBrowser(widgets.Box):
"""
Browse a tree of FITS images and view image/header.
Parameters
----------
collection : `ccdproc.ImageFileCollection`
Directory of images.
"""
def __init__(self, collection, allow_missing=True, *args, **kwd):
self._directory = collection.location
self._demo = kwd.pop('demo', False)
self._tree_keys = kwd.pop('keys', [])
missing = 'No value' if allow_missing else None
tree = msumastro.TableTree(collection.summary, self._tree_keys, 'file',
fill_missing=missing)
kwd['orientation'] = 'horizontal'
super(ImageBrowser, self).__init__(*args, **kwd)
self._tree_widget = ImageTree(tree)
self._fits_display = FitsViewer()
self._fits_display.top.visible = False
self.children = [self.tree_widget, self.fits_display]
# Connect the select boxes to the image displayer
self._add_handler(self.tree_widget)
@property
def tree_widget(self):
"""
Widget that represents the image tree.
"""
return self._tree_widget.top
@property
def fits_display(self):
"""
Widget that displays FITS image/header.
"""
return self._fits_display.top
def display(self):
"""
Display and format this widget.
"""
from IPython.display import display
display(self)
self.format()
def format(self):
"""
Format widget.
Must be called after the widget is displayed, and is automatically
called by the `display` method.
"""
# self.set_css('width', '100%')
self.width = '100%'
self._tree_widget.format()
self._fits_display.format()
# self.tree_widget.add_class('box-flex1')
self.tree_widget.width = '25%'
# self.fits_display.add_class('box-flex2')
self.fits_display.width = '67%'
for child in self.children:
# child.set_css('margin', '10px')
child.margin = '5px'
def _add_handler(self, node):
if isinstance(node, widgets.Select):
node.on_trait_change(
self._fits_display.set_fits_file_callback(demo=self._demo,
image_dir=self._directory),
str('value'))
return
if hasattr(node, 'children'):
for child in node.children:
self._add_handler(child)
``` |
{
"source": "jimboid/aiida-gromacs",
"score": 2
} |
#### File: aiida_gromacs/calculations/mdrun.py
```python
from aiida.common import datastructures
from aiida.engine import CalcJob
from aiida.orm import SinglefileData
from aiida.plugins import DataFactory
MdrunParameters = DataFactory('gromacs.mdrun')
class MdrunCalculation(CalcJob):
"""
AiiDA calculation plugin wrapping the 'gmx mdrun' executable.
AiiDA plugin wrapper for converting PDB files to GRO files.
"""
@classmethod
def define(cls, spec):
"""Define inputs and outputs of the calculation."""
# yapf: disable
super().define(spec)
# set default values for AiiDA options
spec.inputs['metadata']['options']['resources'].default = {
'num_machines': 1,
'num_mpiprocs_per_machine': 1,
}
spec.inputs['metadata']['options']['parser_name'].default = 'gromacs.mdrun'
spec.input('metadata.options.output_filename', valid_type=str, default='mdrun.out')
spec.input('tprfile', valid_type=SinglefileData, help='Input structure.')
spec.input('parameters', valid_type=MdrunParameters, help='Command line parameters for gmx mdrun')
spec.output('stdout', valid_type=SinglefileData, help='stdout')
spec.output('trrfile', valid_type=SinglefileData, help='Output trajectory.')
spec.output('grofile', valid_type=SinglefileData, help='Output structure file.')
spec.output('logfile', valid_type=SinglefileData, help='Output log file.')
spec.output('enfile', valid_type=SinglefileData, help='Output energy file.')
spec.output('cptfile', valid_type=SinglefileData, required=False, help='Checkpoint file.')
spec.exit_code(300, 'ERROR_MISSING_OUTPUT_FILES', message='Calculation did not produce all expected output files.')
def prepare_for_submission(self, folder):
"""
Create input files.
:param folder: an `aiida.common.folders.Folder` where the plugin should temporarily place all files
needed by the calculation.
:return: `aiida.common.datastructures.CalcInfo` instance
"""
codeinfo = datastructures.CodeInfo()
codeinfo.cmdline_params = self.inputs.parameters.cmdline_params(
tprfile=self.inputs.tprfile.filename)
codeinfo.code_uuid = self.inputs.code.uuid
codeinfo.stdout_name = self.metadata.options.output_filename
codeinfo.withmpi = self.inputs.metadata.options.withmpi
# Prepare a `CalcInfo` to be returned to the engine
calcinfo = datastructures.CalcInfo()
calcinfo.codes_info = [codeinfo]
calcinfo.local_copy_list = [
(self.inputs.tprfile.uuid, self.inputs.tprfile.filename, self.inputs.tprfile.filename),
]
calcinfo.retrieve_list = [self.metadata.options.output_filename,
self.inputs.parameters['c'],
self.inputs.parameters['e'],
self.inputs.parameters['g'],
self.inputs.parameters['o']]
if 'cpo' in self.inputs.parameters.keys():
calcinfo.retrieve_list.append(self.inputs.parameters['cpo'])
return calcinfo
``` |
{
"source": "jimbojsb/PlexConnect",
"score": 2
} |
#### File: jimbojsb/PlexConnect/ATVSettings.py
```python
import sys
from os import sep
import ConfigParser
import fnmatch
from Debug import * # dprint()
options = { \
'libraryview' :('List', 'Grid', 'Bookcase'), \
'movieview' :('Grid', 'List', 'Detailed List'), \
'homevideoview' :('Grid', 'List', 'Detailed List'), \
'actorview' :('Movies', 'Portrait'), \
'showview' :('List', 'Detailed List', 'Grid', 'Bookcase'), \
'flattenseason' :('False', 'True'), \
'seasonview' :('List', 'Coverflow'), \
'channelview' :('List', 'Grid', 'Bookcase'), \
'durationformat' :('Hours/Minutes', 'Minutes'), \
'showtitles_movies' :('Highlighted Only', 'Show All'), \
'showtitles_tvshows' :('Highlighted Only', 'Show All'), \
'showtitles_homevideos' :('Highlighted Only', 'Show All'), \
'showtitles_channels' :('Highlighted Only', 'Show All'), \
'movies_navbar_unwatched' :('checked', 'unchecked'), \
'movies_navbar_byfolder' :('checked', 'unchecked'), \
'movies_navbar_collections' :('checked', 'unchecked'), \
'movies_navbar_genres' :('checked', 'unchecked'), \
'movies_navbar_decades' :('checked', 'unchecked'), \
'movies_navbar_directors' :('checked', 'unchecked'), \
'movies_navbar_actors' :('checked', 'unchecked'), \
'movies_navbar_more' :('checked', 'unchecked'), \
'homevideos_navbar_unwatched' :('checked', 'unchecked'), \
'homevideos_navbar_byfolder' :('checked', 'unchecked'), \
'homevideos_navbar_collections' :('checked', 'unchecked'), \
'homevideos_navbar_genres' :('checked', 'unchecked'), \
'tv_navbar_unwatched' :('checked', 'unchecked'), \
'tv_navbar_genres' :('checked', 'unchecked'), \
'tv_navbar_more' :('checked', 'unchecked'), \
'transcodequality' :('1080p 40.0Mbps', \
'480p 2.0Mbps', \
'720p 3.0Mbps', '720p 4.0Mbps', \
'1080p 8.0Mbps', '1080p 10.0Mbps', '1080p 12.0Mbps', '1080p 20.0Mbps'), \
'transcoderaction' :('Auto', 'DirectPlay', 'Transcode'), \
'remotebitrate' :('720p 3.0Mbps', '720p 4.0Mbps', \
'1080p 8.0Mbps', '1080p 10.0Mbps', '1080p 12.0Mbps', '1080p 20.0Mbps', '1080p 40.0Mbps', \
'480p 2.0Mbps'), \
'phototranscoderaction' :('Auto', 'Transcode'), \
'subtitlerenderer' :('Auto', 'iOS, PMS', 'PMS'), \
'subtitlesize' :('100', '125', '150', '50', '75'), \
'audioboost' :('100', '175', '225', '300'), \
'showunwatched' :('True', 'False'), \
'showsynopsis' :('Hide', 'Show'), \
'showplayerclock' :('True', 'False'), \
'overscanadjust' :('0', '1', '2', '3', '-3', '-2', '-1'), \
'clockposition' :('Center', 'Right', 'Left'), \
'showendtime' :('True', 'False'), \
'timeformat' :('24 Hour', '12 Hour'), \
'myplex_user' :('', ), \
'myplex_auth' :('', ), \
}
class CATVSettings():
def __init__(self):
dprint(__name__, 1, "init class CATVSettings")
self.cfg = None
self.loadSettings()
# load/save config
def loadSettings(self):
dprint(__name__, 1, "load settings")
# options -> default
dflt = {}
for opt in options:
dflt[opt] = options[opt][0]
# load settings
self.cfg = ConfigParser.SafeConfigParser(dflt)
self.cfg.read(self.getSettingsFile())
def saveSettings(self):
dprint(__name__, 1, "save settings")
f = open(self.getSettingsFile(), 'wb')
self.cfg.write(f)
f.close()
def getSettingsFile(self):
return sys.path[0] + sep + "ATVSettings.cfg"
def checkSection(self, UDID):
# check for existing UDID section
sections = self.cfg.sections()
if not UDID in sections:
self.cfg.add_section(UDID)
dprint(__name__, 0, "add section {0}", UDID)
# access/modify AppleTV options
def getSetting(self, UDID, option):
self.checkSection(UDID)
dprint(__name__, 1, "getsetting {0}", self.cfg.get(UDID, option))
return self.cfg.get(UDID, option)
def setSetting(self, UDID, option, val):
self.checkSection(UDID)
self.cfg.set(UDID, option, val)
def checkSetting(self, UDID, option):
self.checkSection(UDID)
val = self.cfg.get(UDID, option)
opts = options[option]
# check val in list
found = False
for opt in opts:
if fnmatch.fnmatch(val, opt):
found = True
# if not found, correct to default
if not found:
self.cfg.set(UDID, option, opts[0])
dprint(__name__, 1, "checksetting: default {0} to {1}", option, opts[0])
def toggleSetting(self, UDID, option):
self.checkSection(UDID)
cur = self.cfg.get(UDID, option)
opts = options[option]
# find current in list
i=0
for i,opt in enumerate(opts):
if opt==cur:
break
# get next option (circle to first)
i=i+1
if i>=len(opts):
i=0
# set
self.cfg.set(UDID, option, opts[i])
def setOptions(self, option, opts):
global options
if option in options:
options[option] = opts
dprint(__name__, 1, 'setOption: update {0} to {1}', option, opts)
if __name__=="__main__":
ATVSettings = CATVSettings()
UDID = '007'
ATVSettings.checkSection(UDID)
option = 'transcodequality'
print ATVSettings.getSetting(UDID, option)
print "setSetting"
ATVSettings.setSetting(UDID, option, 'True') # error - pick default
print ATVSettings.getSetting(UDID, option)
ATVSettings.setSetting(UDID, option, '9')
print ATVSettings.getSetting(UDID, option)
print "toggleSetting"
ATVSettings.toggleSetting(UDID, option)
print ATVSettings.getSetting(UDID, option)
ATVSettings.toggleSetting(UDID, option)
print ATVSettings.getSetting(UDID, option)
ATVSettings.toggleSetting(UDID, option)
print ATVSettings.getSetting(UDID, option)
del ATVSettings
``` |
{
"source": "jimbonothing64/codewof",
"score": 3
} |
#### File: tests/users/test_utils.py
```python
from users.utils import send_invitation_email, create_invitation_plaintext, create_invitation_html
from tests.users.test_views import get_outbox_sorted
from django.test import TestCase
from django.contrib.auth import get_user_model
from django.http import HttpResponse
from django.urls import reverse
from tests.conftest import user
from django.conf import settings
from tests.codewof_test_data_generator import (
generate_users,
generate_groups,
)
from users.models import Group
User = get_user_model()
class TestCreateInvitationPlaintext(TestCase):
@classmethod
def setUpTestData(cls):
# never modify this object in tests
generate_users(user)
generate_groups()
def setUp(self):
self.john = User.objects.get(pk=1)
self.sally = User.objects.get(pk=2)
self.group_north = Group.objects.get(name="Group North")
def test_user_exists(self):
expected_url = settings.CODEWOF_DOMAIN + reverse('users:dashboard')
expected = "Hi Sally,\n\n<NAME> has invited you to join the Group 'Group North'. Click the link below to "\
"sign in. You will see your invitation in the dashboard, where you can join the group.\n\n{}"\
"\n\nThanks,\nThe Computer Science Education Research Group".format(expected_url)
self.assertEqual(create_invitation_plaintext(True, self.sally.first_name,
self.john.first_name + " " + self.john.last_name,
self.group_north.name, self.sally.email),
expected)
def test_user_does_not_exist(self):
expected_url = settings.CODEWOF_DOMAIN + reverse('account_signup')
expected = "Hi,\n\n<NAME> has invited you to join the Group 'Group North'. CodeWOF helps you maintain your "\
"programming fitness with short daily programming exercises. With a free account you can save your"\
" progress and track your programming fitness over time. Click the link below to make an account,"\
" using the email unknown<EMAIL>. You will see your invitation in the dashboard, where you can "\
"join the group. If you already have a CodeWOF account, then add <EMAIL> to your profile "\
"to make the invitation appear.\n\n{}\n\nThanks,\nThe Computer Science Education Research Group"\
.format(expected_url)
self.assertEqual(create_invitation_plaintext(False, None, self.john.first_name + " " + self.john.last_name,
self.group_north.name, "<EMAIL>"), expected)
class TestCreateInvitationHTML(TestCase):
@classmethod
def setUpTestData(cls):
# never modify this object in tests
generate_users(user)
generate_groups()
def setUp(self):
self.john = User.objects.get(pk=1)
self.sally = User.objects.get(pk=2)
self.group_north = Group.objects.get(name="Group North")
def test_user_exists_html_contains_name(self):
expected = "<p>Hi Sally,</p>"
response = HttpResponse(create_invitation_html(True, self.sally.first_name,
self.john.first_name + " " + self.john.last_name,
self.group_north.name, self.sally.email))
self.assertContains(response, expected, html=True)
def test_user_exists_html_contains_correct_message(self):
expected = "<p><NAME> has invited you to join the Group 'Group North'. Click the link below to " \
"sign in. You will see your invitation in the dashboard, where you can join the group.</p>"
response = HttpResponse(create_invitation_html(True, self.sally.first_name,
self.john.first_name + " " + self.john.last_name,
self.group_north.name, self.sally.email))
self.assertContains(response, expected, html=True)
def test_user_exists_html_contains_correct_link(self):
expected_url = settings.CODEWOF_DOMAIN + reverse('users:dashboard')
expected = f"<a href=\"{expected_url}\" style=\"color: #007bff; text-decoration: underline;\">Sign In</a>"
response = HttpResponse(create_invitation_html(True, self.sally.first_name,
self.john.first_name + " " + self.john.last_name,
self.group_north.name, self.sally.email))
self.assertContains(response, expected, html=True)
def test_user_does_not_exist_html_contains_no_name(self):
expected = "<p>Hi,</p>"
response = HttpResponse(create_invitation_html(False, None, self.john.first_name + " " + self.john.last_name,
self.group_north.name, "<EMAIL>"))
self.assertContains(response, expected, html=True)
def test_user_does_not_exist_html_contains_correct_message(self):
expected = "<p><NAME> has invited you to join the Group 'Group North'. CodeWOF helps you maintain "\
"your programming fitness with short daily programming exercises. With a free account you can "\
"save your progress and track your programming fitness over time. Click the link below to make an "\
"account, using the email <EMAIL>. You will see your invitation in the dashboard, where "\
"you can join the group. If you already have a CodeWOF account, then add <EMAIL> to your "\
"profile to make the invitation appear.</p>"
response = HttpResponse(create_invitation_html(False, None, self.john.first_name + " " + self.john.last_name,
self.group_north.name, "<EMAIL>"))
self.assertContains(response, expected, html=True)
def test_user_does_not_exist_html_contains_correct_link(self):
expected_url = settings.CODEWOF_DOMAIN + reverse('account_signup')
expected = f"<a href=\"{expected_url}\" style=\"color: #007bff; text-decoration: underline;\">Sign Up</a>"
response = HttpResponse(create_invitation_html(False, None, self.john.first_name + " " + self.john.last_name,
self.group_north.name, "<EMAIL>"))
self.assertContains(response, expected, html=True)
class TestSendInvitationEmail(TestCase):
@classmethod
def setUpTestData(cls):
# never modify this object in tests
generate_users(user)
generate_groups()
def setUp(self):
self.john = User.objects.get(pk=1)
self.sally = User.objects.get(pk=2)
self.group_north = Group.objects.get(name="Group North")
def test_email_sent_user_exists(self):
send_invitation_email(self.sally, self.john, self.group_north.name, self.sally.email)
outbox = get_outbox_sorted()
expected_url = settings.CODEWOF_DOMAIN + reverse('users:dashboard')
expected = "Hi Sally,\n\n<NAME> has invited you to join the Group 'Group North'. Click the link below to "\
"sign in. You will see your invitation in the dashboard, where you can join the group.\n\n{}"\
"\n\nThanks,\nThe Computer Science Education Research Group".format(expected_url)
self.assertEqual(len(outbox), 1)
self.assertTrue(self.sally.first_name in outbox[0].body)
self.assertTrue(expected in outbox[0].body)
def test_email_sent_user_does_not_exist(self):
send_invitation_email(None, self.john, self.group_north.name, "<EMAIL>")
outbox = get_outbox_sorted()
expected_url = settings.CODEWOF_DOMAIN + reverse('account_signup')
expected = "Hi,\n\n<NAME> has invited you to join the Group 'Group North'. CodeWOF helps you maintain your "\
"programming fitness with short daily programming exercises. With a free account you can save your"\
" progress and track your programming fitness over time. Click the link below to make an account,"\
" using the email <EMAIL>. You will see your invitation in the dashboard, where you can "\
"join the group. If you already have a CodeWOF account, then add <EMAIL> to your profile "\
"to make the invitation appear.\n\n{}\n\nThanks,\nThe Computer Science Education Research Group"\
.format(expected_url)
self.assertEqual(len(outbox), 1)
self.assertTrue(expected in outbox[0].body)
``` |
{
"source": "Jimboom7/what-a-goal-viewer",
"score": 3
} |
#### File: Jimboom7/what-a-goal-viewer/main.py
```python
import threading
import time
import math
import sys
import os
import tkinter as tk
from tkinter import ttk
import cv2
import pytesseract
from PIL import ImageGrab
import numpy as np
pytesseract.pytesseract.tesseract_cmd = r'Tesseract-OCR\tesseract.exe'
'''
Convert coordinates for different screen sizes. Only works for 16:9 format.
'''
class ScreenConverter:
def __init__(self):
self.base_x = 1920
self.value = {}
self.value['dead_x'] = 1818
self.value['dead_y'] = 52
self.value['last_x'] = 1000
self.value['last_y'] = 25
self.value['contour_area_min'] = 120
self.value['contour_height_max'] = 60
self.value['contour_height_min'] = 25
self.value['contour_width_max'] = 45
self.value['contour_width_min'] = 5
self.value['contour_diff_dist_max'] = 45
self.value['contour_diff_height_max'] = 5
self.value['contour_diff_y_max'] = 5
self.value['check_left_first_x1'] = 110
self.value['check_left_first_x2'] = 190
self.value['check_left_first_y1'] = 870
self.value['check_left_first_y2'] = 970
self.value['check_left_x1'] = 110
self.value['check_left_x2'] = 160
self.value['check_left_y1'] = 780
self.value['check_left_y2'] = 930
self.value['check_right_first_x1'] = 110
self.value['check_right_first_x2'] = 190
self.value['check_right_first_y1'] = 1325
self.value['check_right_first_y2'] = 1425
self.value['check_right_x1'] = 110
self.value['check_right_x2'] = 160
self.value['check_right_y1'] = 1065
self.value['check_right_y2'] = 1210
self.value['check_balls_x1'] = 775
self.value['check_balls_x2'] = 825
self.value['check_balls_y1'] = 933
self.value['check_balls_y2'] = 987
def convert(self, x):
factor = self.base_x / x
for v in self.value:
self.value[v] = int(self.value[v] / factor)
'''
A very simple graphical user interface.
'''
class Gui:
def __init__(self, main):
self.main = main
self.root = tk.Tk(className="wag-viewer")
self.root.protocol("WM_DELETE_WINDOW", self.on_closing)
def show(self):
self.left_score_label = tk.Label(self.root, text = "0", font=("calibre", 50, "bold"), width=4)
self.left_score_label.grid(row=0)
self.right_score_label = tk.Label(self.root, text = "0", font=("calibre", 50, "bold"), width=4)
self.right_score_label.grid(row=0, column=2)
source_label = tk.Label(self.root, text="Source")
source_label.grid(row=1)
self.source = ttk.Combobox(self.root, values = ["Monitor", "Virtualcam"], state="readonly", width=10)
self.source.set("Monitor")
self.source.bind('<<ComboboxSelected>>', self.source_changed)
self.source.grid(row=1, column=2)
device_number_var = tk.IntVar()
device_number_label = tk.Label(self.root, text="Cam Device Number")
device_number_label.grid(row=2)
self.device_number = tk.Entry(self.root, textvariable=device_number_var, width=3, state="disabled")
self.device_number.grid(row=2, column=2)
self.start_button = tk.Button(self.root, text="Start", command=self.start)
self.start_button.grid(row=3)
self.pause_button = tk.Button(self.root, text="Pause", command=self.pause)
self.pause_button.grid(row=3, column=1)
self.reset_button = tk.Button(self.root, text="Reset", command=self.reset)
self.reset_button.grid(row=3, column=2)
self.reset()
update_gui_thread = threading.Thread(target=self.update_gui_thread)
update_gui_thread.start()
self.root.mainloop()
def update_gui_thread(self):
while True:
start = time.time()
self.left_score_label.config(text=self.main.left_team_score)
self.right_score_label.config(text=self.main.right_team_score)
try:
time.sleep(start - time.time() + 0.5)
except:
pass
def source_changed(self, event):
if self.source.get() == "Monitor":
self.main.use_capture_card = False
self.device_number.config(state="disabled")
else:
self.main.use_capture_card = True
self.device_number.config(state="normal")
def start(self):
self.main.device_number = self.device_number.get()
self.start_button.config(state="disabled")
self.pause_button.config(state="normal")
self.reset_button.config(state="normal")
self.source.config(state="disabled")
self.device_number.config(state="disabled")
self.main.start()
def pause(self):
self.start_button.config(state="normal")
self.pause_button.config(state="disabled")
self.reset_button.config(state="normal")
self.main.pause()
def reset(self):
self.start_button.config(state="normal")
self.pause_button.config(state="disabled")
self.reset_button.config(state="disabled")
self.source.config(state="readonly")
if self.source.get() == "Monitor":
self.device_number.config(state="disabled")
else:
self.device_number.config(state="normal")
self.main.reset()
def on_closing(self):
self.reset()
self.root.destroy()
sys.exit()
'''
Main Class. Contains all the logic.
'''
class Main:
def __init__(self):
self.DEBUG = False
self.border_size = 10
self.tesseract_confidence = 82
self.initialize()
self.update_screensize(1920)
self.use_capture_card = False
self.device_number = 0
def initialize(self):
self.running = False
self.is_last_2_min = False
self.is_dead = False
self.left_team_score = 0
self.right_team_score = 0
self.first_goal = False
self.left_score_list = [None, None, None, None, None]
self.right_score_list = [None, None, None, None, None]
self.ball_list = [0,0,0,0,0]
self.screenshot = None
def update_screensize(self, screensize):
self.screen_size = screensize
self.screen = ScreenConverter()
self.screen.convert(self.screen_size)
def set_is_dead(self, frame):
try:
if sum(frame[self.screen.value['dead_y']][self.screen.value['dead_x']]) > 500: # Checks "+" Button at top right for greyscreen
self.is_dead = False
else:
self.is_dead = True
self.ball_list.append(0)
except:
print("Failed to check death")
def set_is_last_2_min(self, frame):
try:
if (frame[self.screen.value['last_y']][self.screen.value['last_x']].item(0) > 200
and frame[self.screen.value['last_y']][self.screen.value['last_x']].item(1) > 200
and frame[self.screen.value['last_y']][self.screen.value['last_x']].item(2) < 100): # Checks color of timer at top; 200,200,100 for yellow
self.is_last_2_min = True
else:
self.is_last_2_min = False
except:
print("Failed to check last 2 minutes timer")
def check_contours(self, frame):
orig_cnts = cv2.findContours(frame, cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)[0]
mask = np.ones(frame.shape[:2], dtype="uint8") * 255
cnts = []
for c in orig_cnts:
x,y,w,h = cv2.boundingRect(c)
if all([cv2.contourArea(c) >= self.screen.value['contour_area_min'],
h <= self.screen.value['contour_height_max'],
h >= self.screen.value['contour_height_min'],
w <= self.screen.value['contour_width_max'],
w >= self.screen.value['contour_width_min'],
x > self.border_size * 1.2, x+w < (frame.shape[1] - (self.border_size * 1.2)),
y > self.border_size * 1.2, y+h < (frame.shape[0] - (self.border_size * 1.2))]): # only keep contours with good forms and not at border
cnts.append(c)
else:
cv2.drawContours(mask, [c], -1, 0, -1)
if len(cnts) == 0:
return None
i = 0
for c in cnts:
i += 1
x,y,w,h = cv2.boundingRect(c)
middle_x = x + (w / 2)
middle_y = y + (h / 2)
distance_to_middle = math.sqrt(((frame.shape[1]/2) - middle_x)**2 + ((frame.shape[0]/2) - middle_y)**2)
min_dist = 9999
max_dist_to_middle = 0
min_height_diff = 9999
min_y_diff = 9999
j = 0
for c2 in cnts: # Check distance and other relations to all other contours
j += 1
if i == j:
continue
x2,y2,w2,h2 = cv2.boundingRect(c2)
middle_x2 = x2 + (w2 / 2)
middle_y2 = y2 + (h2 / 2)
dist = math.sqrt((middle_x - middle_x2)**2 + (middle_y - middle_y2)**2)
other_distance_to_middle = math.sqrt(((frame.shape[1]/2) - middle_x2)**2 + ((frame.shape[0]/2) - middle_y2)**2)
if dist < min_dist:
min_dist = dist
if other_distance_to_middle > max_dist_to_middle:
max_dist_to_middle = other_distance_to_middle
if abs(h - h2) < min_height_diff:
min_height_diff = abs(h - h2)
if abs(y - y2) < min_y_diff:
min_y_diff = abs(y - y2)
if min_dist > self.screen.value['contour_diff_dist_max']: # Two contours far from each other: Take the one closer to middle; three or more: take the best group
if len(cnts) == 2 and distance_to_middle == max_dist_to_middle:
cv2.drawContours(mask, [c], -1, 0, -1)
if len(cnts) > 2: # Problem: If there are 3 single contours all get deleted
cv2.drawContours(mask, [c], -1, 0, -1)
if len(cnts) > 2 and (min_height_diff > self.screen.value['contour_diff_height_max'] or min_y_diff > self.screen.value['contour_diff_y_max']):
cv2.drawContours(mask, [c], -1, 0, -1)
frame = cv2.bitwise_and(frame, frame, mask=mask)
return frame
def prepare_frame_for_text(self, frame, y1, y2, x1, x2, window_name):
frame_small = frame[(y1-self.border_size):(y2+self.border_size), (x1-self.border_size):(x2+self.border_size)]
# frame_small = cv2.bilateralFilter(frame_small,9,75,75)
frame_small = cv2.copyMakeBorder(frame_small, self.border_size, self.border_size, self.border_size, self.border_size, cv2.BORDER_CONSTANT)
if window_name == "Balls":
frame_small = cv2.resize(frame_small, (0, 0), fx=1.5, fy=1.5, interpolation=cv2.INTER_AREA)
space = cv2.COLOR_RGB2HSV # Screenshot is RGB
if self.use_capture_card:
space = cv2.COLOR_BGR2HSV
frame_hsv = cv2.cvtColor(frame_small, space)
if self.is_dead:
frame_cleaned = cv2.inRange(frame_hsv, (15,40,120), (40,225,160))
else:
frame_cleaned = cv2.inRange(frame_hsv, (15,40,200), (40,225,255))
frame_cleaned = self.check_contours(frame_cleaned)
if frame_cleaned is None:
return None
frame_cleaned = cv2.erode(frame_cleaned, np.ones((2, 2), np.uint8))
frame_cleaned = cv2.dilate(frame_cleaned, np.ones((2, 2), np.uint8))
frame_final = cv2.bitwise_not(frame_cleaned) # Swap Black/White
''' # Remove cursive by shearing - doesn't help in practise
shear_value = 0.18
M = np.float32([[1, shear_value, 0],
[0, 1 , 0],
[0, 0 , 1]])
y, x = frame_final.shape
frame_final = cv2.warpPerspective(frame_final,M,(x,y)) # Shear
frame_final = frame_final[0:y, int(y*shear_value):x]
frame_final = cv2.blur(frame_final, (2,2))
'''
if self.DEBUG:
t = time.time()
cv2.imwrite("DEBUG/" + window_name + str(t) + ".png", frame_final)
# cv2.imwrite("DEBUG/" + window_name + str(t) + "_c.png", frame_small)
return frame_final
def get_number_with_confidence(self, tess_dict, conf):
try:
for i in range(0, len(tess_dict['text'])):
if int(tess_dict['conf'][i]) > conf:
number = tess_dict['text'][i]
conf = int(tess_dict['conf'][i])
return int(number)
except:
return None
def get_score(self, frame, side):
tess_config = r'--oem 3 --psm 7 -l digits -c tessedit_char_whitelist=0123456789' # psm 7: Treat image as single line
try:
tess_dict = pytesseract.image_to_data(frame, config=tess_config, output_type=pytesseract.Output.DICT)
number = self.get_number_with_confidence(tess_dict, self.tesseract_confidence)
if self.DEBUG:
print(str(tess_dict['text']) + str(tess_dict['conf']))
print(side + " " + str(number))
if number > 0 and number < 51 or (number > 50 and number < 101 and number % 2 == 0):
return number
return None
except:
return None
def get_own_balls(self, frame):
tess_config = r'--oem 3 --psm 6 -l digits -c tessedit_char_whitelist=0123456789' # psm 6 (Uniform block of text) works better here
try:
tess_dict = pytesseract.image_to_data(frame, config=tess_config, output_type=pytesseract.Output.DICT)
number = self.get_number_with_confidence(tess_dict, self.tesseract_confidence)
if self.DEBUG and number != self.ball_list[-1]:
print("Balls: " + str(number))
if number >= 0 and number < 51:
return number
return None
except:
return None
def check_scored(self, y1, y2, x1, x2, side, score_list):
small_frame = self.prepare_frame_for_text(self.screenshot, y1, y2, x1, x2, side)
score = None
if small_frame is not None:
score = self.get_score(small_frame, side)
if(score != None and
score_list[-4] != score and
score_list[-3] != score and
(score_list[-2] != score_list[-1] and # Same number needs to be already found last or second last time
(score_list[-2] == score or
score_list[-1] == score))):
if side == "Left":
self.left_team_score += score
else:
self.right_team_score += score
print (side + " Team scored " + str(score) + " points!")
print ("Left " + str(self.left_team_score) + " - " + str(self.right_team_score) + " Right")
self.first_goal = True
score_list.append(score)
return score
def left_thread(self):
if not self.first_goal:
self.check_scored(self.screen.value['check_left_first_x1'], self.screen.value['check_left_first_x2'],
self.screen.value['check_left_first_y1'], self.screen.value['check_left_first_y2'], "Left", self.left_score_list)
else:
self.check_scored(self.screen.value['check_left_x1'], self.screen.value['check_left_x2'],
self.screen.value['check_left_y1'], self.screen.value['check_left_y2'], "Left", self.left_score_list)
def right_thread(self):
if not self.first_goal:
self.check_scored(self.screen.value['check_right_first_x1'], self.screen.value['check_right_first_x2'],
self.screen.value['check_right_first_y1'], self.screen.value['check_right_first_y2'], "Right", self.right_score_list)
else:
self.check_scored(self.screen.value['check_right_x1'], self.screen.value['check_right_x2'],
self.screen.value['check_right_y1'], self.screen.value['check_right_y2'], "Right", self.right_score_list)
def own_balls_thread(self):
ball_frame = self.prepare_frame_for_text(self.screenshot, self.screen.value['check_balls_x1'], self.screen.value['check_balls_x2'],
self.screen.value['check_balls_y1'], self.screen.value['check_balls_y2'], "Balls")
current_balls = self.get_own_balls(ball_frame)
if current_balls is None:
return
if (self.ball_list[-4] != 0 and
self.ball_list[-3] != 0 and
self.ball_list[-2] == 0 and
self.ball_list[-1] == 0 and
current_balls == 0): # Needs 0 three times to count score
score = self.ball_list[-3]
if self.is_last_2_min:
score *= 2
self.left_team_score += score
self.first_goal = True
print ("You scored " + str(score) + " points!")
print ("Left " + str(self.left_team_score) + " - " + str(self.right_team_score) + " Right")
self.ball_list.append(current_balls)
def start(self):
self.running = True
main_thread = threading.Thread(target=self.main_thread)
main_thread.start()
def pause(self):
self.running = False
def reset(self):
self.running = False
time.sleep(0.5)
self.initialize() # The main_thread might crash, but it should be fine
def main_thread(self):
if self.use_capture_card:
num = 0
try:
num = int(self.device_number)
except:
pass
cap = cv2.VideoCapture(num)
cap.set(cv2.CAP_PROP_FRAME_WIDTH, 1920)
cap.set(cv2.CAP_PROP_FRAME_HEIGHT, 1080)
if not cap.isOpened():
print("ERROR: Cannot open camera " + str(self.device_number))
return
while self.running:
start = time.time()
screenshot = None
if self.use_capture_card:
ret, screenshot = cap.read()
else:
screenshot = ImageGrab.grab()
self.screenshot = np.array(screenshot)
if (self.screenshot.shape[1] != self.screen_size):
self.update_screensize(self.screenshot.shape[1])
self.set_is_dead(self.screenshot)
self.set_is_last_2_min(self.screenshot)
t1 = threading.Thread(target=self.left_thread)
t2 = threading.Thread(target=self.right_thread)
t3 = threading.Thread(target=self.own_balls_thread)
t1.start()
t2.start()
if not self.is_dead:
t3.start()
t1.join()
t2.join()
if not self.is_dead:
t3.join()
sleeptime = start - time.time() + 0.3
if sleeptime > 0:
time.sleep(sleeptime) # Run every 0.3 seconds
def main(self):
if not os.path.isfile(pytesseract.pytesseract.tesseract_cmd):
print("ERROR: Tesseract is missing: " + str(pytesseract.pytesseract.tesseract_cmd))
sys.exit()
self.gui = Gui(self)
self.gui.show()
if __name__ == "__main__":
main = Main()
if (len(sys.argv) > 1 and sys.argv[1] == "DEBUG"):
main.DEBUG = True
main.main()
``` |
{
"source": "JimBoonie/hydra",
"score": 2
} |
#### File: hydra/examples/ldr_to_hdr.py
```python
import os
import re
import scipy as sp
import scipy.misc
import matplotlib.pyplot as plt
import hydra.io
import hydra.gen
import hydra.tonemap
dirname = '../data/stack'
filename = 'memorial.hdr_image_list.txt'
def main():
hdr = None
with open(os.path.join(dirname, filename), 'r') as f:
lines = [ l.strip() for l in f if not l.startswith('#') ]
nimg = int(lines[0])
imgs = [ None ] * nimg
expotimes = [ 0.0 ] * nimg
for i in range(nimg):
items = re.split(' +', lines[i + 1])
imgs[i] = sp.misc.imread(os.path.join(dirname, items[0]))
expotimes[i] = float(items[1])
hdr = hydra.gen.devebec(imgs, expotimes)
tm = hydra.tonemap.durand(hdr)
tm = hydra.tonemap.gamma(tm, 1.0 / 2.2)
fig, ax = plt.subplots()
ax.imshow(tm)
ax.set_title('Generated HDR')
ax.axis('off')
plt.show()
if __name__ == '__main__':
main()
```
#### File: hydra/examples/load_and_save.py
```python
import os
import time
import hydra as hdr
rootdir = os.path.join(os.path.dirname(__file__), os.path.pardir)
filename = os.path.join(rootdir, 'data', 'memorial.hdr')
def main():
# Load HDR
start = time.time()
img = hdr.io.load(filename)
print('Load time: {:.6f} sec'.format(time.time() - start))
# Save HDR
start = time.time()
hdr.io.save('image.hdr', img)
print('Save time: {:.6f} sec'.format(time.time() - start))
if __name__ == '__main__':
main()
```
#### File: hydra/filters/bilateral.py
```python
import math
import numpy as np
def rec_filter_horizontal(I, D, sigma):
a = math.exp(-math.sqrt(2.0) / sigma)
F = I.copy()
V = np.power(a, D)
h, w, num_channels = I.shape
for i in range(1,w):
for c in range(num_channels):
F[:,i,c] = F[:,i,c] + V[:,i] * (F[:,i-1,c] - F[:,i,c])
for i in range(w-2,-1,-1):
for c in range(num_channels):
F[:,i,c] = F[:,i,c] + V[:,i+1] * (F[:,i+1,c] - F[:,i,c])
return F
def bilateral(I, sigma_s, sigma_r, num_iterations=5, J=None):
if I.ndim == 3:
img = I.copy()
else:
h, w = I.shape
img = I.reshape((h, w, 1))
if J is None:
J = img
if J.ndim == 2:
h, w = J.shape
J = np.reshape(J, (h, w, 1))
h, w, num_channels = J.shape
dIcdx = np.diff(J, n=1, axis=1)
dIcdy = np.diff(J, n=1, axis=0)
dIdx = np.zeros((h, w))
dIdy = np.zeros((h, w))
for c in range(num_channels):
dIdx[:,1:] = dIdx[:,1:] + np.abs(dIcdx[:,:,c])
dIdy[1:,:] = dIdy[1:,:] + np.abs(dIcdy[:,:,c])
dHdx = (1.0 + sigma_s / sigma_r * dIdx)
dVdy = (1.0 + sigma_s / sigma_r * dIdy)
dVdy = dVdy.T
N = num_iterations
F = img.copy()
sigma_H = sigma_s
for i in range(num_iterations):
sigma_H_i = sigma_H * math.sqrt(3.0) * (2.0 ** (N - (i + 1))) / math.sqrt(4.0 ** N - 1.0)
F = rec_filter_horizontal(F, dHdx, sigma_H_i)
F = np.swapaxes(F, 0, 1)
F = rec_filter_horizontal(F, dVdy, sigma_H_i)
F = np.swapaxes(F, 0, 1)
return F
```
#### File: hydra/io/pfm_format.py
```python
import struct
import numpy as np
class PFMFormat(object):
@staticmethod
def load(filename):
img = None
with open(filename, 'rb') as f:
f.readline()
w, h = f.readline().decode('ascii').strip().split(' ')
w = int(w)
h = int(h)
f.readline()
siz = h * w * 3
img = np.array(struct.unpack('f' * siz, f.read(4 * siz)))
img = img.reshape((h, w, 3))
if img is None:
raise Exception('Failed to load file "{0}"'.format(filename))
return img
@staticmethod
def save(filename, img):
h, w, dim = img.shape
with open(filename, 'wb') as f:
f.write(bytearray('PFM\n', 'ascii'))
f.write(bytearray('{0:d} {1:d}\n\n'.format(w, h), 'ascii'))
siz = h * w * 3
tmp = img.reshape(siz)
f.write(struct.pack('f' * siz, *tmp))
```
#### File: JimBoonie/hydra/setup.py
```python
from setuptools import setup
from setuptools.command.install import install as _install
class install(_install):
def run(self):
_install.run(self)
setup(
cmdclass = { 'install' : install },
name = 'hydra',
version = '0.1',
author = 'tatsy',
author_email = '<EMAIL>',
url = 'https://github.com/tatsy/hydra.git',
description = 'Python HDR image processing library.',
license = 'MIT',
classifiers = [
'Development Status :: 1 - Planning',
'Programming Language :: Python :: 3',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4'
],
packages = [
'hydra',
'hydra.core',
'hydra.eo',
'hydra.filters',
'hydra.gen',
'hydra.io',
'hydra.tonemap'
]
)
```
#### File: hydra/tests/test_core.py
```python
try:
import unittest2 as unittest
except:
import unittest
import hydra.core
from .test_helper import *
from random import *
class TestCore(unittest.TestCase):
def test_clamp(self):
for t in range(REPEAT):
r = random()
a = hydra.core.clamp(r)
self.assertLessEqual(0.0, r)
self.assertLessEqual(r, 1.0)
if 0.0 <= r and r <= 1.0:
self.assertEqual(r, a)
def test_clamp_range(self):
for t in range(REPEAT):
r = random()
l = random()
h = random()
if l > h:
l, h = h, l
a = hydra.core.clamp(r, range=(l, h))
self.assertLessEqual(l, a)
self.assertLessEqual(a, h)
if l <= r and r <= h:
self.assertEqual(r, a)
def test_clamp_raises(self):
for t in range(REPEAT):
r = random()
l = random()
h = random()
if l <= h:
l, h = h, l
with self.assertRaises(Exception):
hydra.core.clamp(r, range=(l, h))
def test_pixel(self):
p = hydra.core.Pixel(0.0, 0.0, 0.0)
self.assertEqual(p.r, 0)
self.assertEqual(p.g, 0)
self.assertEqual(p.b, 0)
self.assertEqual(p.e, 0)
p = hydra.core.Pixel(1.0, 1.0, 1.0)
self.assertEqual(p.r, 128)
self.assertEqual(p.g, 128)
self.assertEqual(p.b, 128)
self.assertEqual(p.e, 129)
def test_pixel_get(self):
p = hydra.core.Pixel(0.0, 0.0, 0.0)
self.assertEqual(p.get(0), 0)
self.assertEqual(p.get(1), 0)
self.assertEqual(p.get(2), 0)
self.assertEqual(p.get(3), 0)
with self.assertRaises(Exception):
p[4]
self.assertEqual(p[0], 0)
self.assertEqual(p[1], 0)
self.assertEqual(p[2], 0)
self.assertEqual(p[3], 0)
with self.assertRaises(Exception):
p[4]
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jimbopants/DADA2_processing",
"score": 2
} |
#### File: jimbopants/DADA2_processing/DADA2_prep.py
```python
import subprocess
import glob
import argparse
import sys
import os
import shutil
# Check dependencies and print header:
print('\nDADA2 Cutadapt Preparation Script')
print('v1.0 Written by <NAME>, 1/8/18')
print('See docs at github.com/jimbopants/DADA2_processing\n')
if shutil.which('cutadapt') is None:
print('Cutadapt not in PATH, check environment')
sys.exit(0)
def main():
args = parse_arguments()
primers = check_and_set_primers(args)
os.makedirs(args.out_dir, exist_ok=True)
# Get reads, checks that there are an even number of files (1 fwd/rev per sample) otherwise exits.
files = []
for root, dirs, dir_files in os.walk(args.raw_dir, topdown=False):
for name in dir_files:
if '.DS' in name:
pass
else:
files.append(os.path.join(root, name))
if len(files)%2 != 0:
print('Uneven number of files. Check raw reads')
sys.exit()
# Cutadapt trim reads
name_prefix = ['F_', 'R_']
read_index = 0
for i in files:
cutadapt_process(i, args.out_dir, primers, name_prefix, read_index)
read_index +=1
read_index = read_index %2
# Break early if just running the first 2 samples for verification:
if args.verify_only:
if read_index == 0:
break
if args.names:
for i in files:
print(split_name(i))
## Subfunctions:
def split_name(file_str):
name = file_str.rsplit('/',1)[1]
name = name.split('_', 1)[0]
return name
def cutadapt_process(file, out_dir, primers, name_prefix, read_index):
"""
Uses subprocess to trim 5' adapters from the reads in file,
writes only the reads where the adapter was trimmed.
"""
name = split_name(file)
output_file = out_dir + name_prefix[read_index] + name + '.fastq'
trim_command = 'cutadapt -g {0} -o {1} --discard-untrimmed {2}'.format(primers[read_index], output_file, file)
subprocess.check_call(trim_command, shell=True)
return
def parse_arguments():
parser = argparse.ArgumentParser()
# Default options:
parser.add_argument("--primer_set", help="Predefined primer sets", choices=['amoA', 'nxrB', '16S_515F_926R'])
parser.add_argument('--raw_dir', help='Directory with raw fastq reads', required=True)
parser.add_argument('--out_dir', help='Output directory')
# Allow manual primer input:
parser.add_argument('--fwd', help='New forward primer (Ignored if using the primer_set option)')
parser.add_argument('--rev', help='New reverse primer (Ignored if using the primer_set option)')
## Simple run-tests
# Option to check just the first sample for the correct primer
parser.add_argument('--verify_only',
help='Verifies the first sample contains the specified primers, prints cutadapt output and exits.',
action='store_true')
# Option to print the result of the name prefix trimming
parser.add_argument('--names',
help='Prints the trimmed names for each sample. Does not actually trim reads',
action='store_true')
# Print help if no options given.
if len(sys.argv)==1:
parser.print_help()
print("\n\nNeed command line input\n\n")
sys.exit(1)
# Parse Command Line Arguments:
try:
result = parser.parse_args()
return result
except Exception as e:
parser.print_help()
print(e)
sys.exit(0)
def check_and_set_primers(args):
"""
After parsing arguments, this function checks that a primer set was specified
and returns the actual primer sets if a default primerset was specified at the command line.
"""
if args.primer_set is None:
if all([args.fwd, args.rev]):
return args.fwd, args.rev
else:
print("\n\nNeed to enter a fwd/rev primer or a valid primer set name. Try --help at command line\n\n")
sys.exit(1)
primer_dict = {
'amoA' : ['GGGGTTTCTACTGGTGGT', 'CCCCTCKGSAAAGCCTTCTTC'],
'nxrB' : ['TACATGTGGTGGAACA', 'CGGTTCTGGTCRATCA'],
'16S_515F_926R' : ['GTGYCAGCMGCCGCGGTAA', 'CCGYCAATTYMTTTRAGTTT'] # 2016+ version.
}
return primer_dict[args.primer_set]
if __name__ == "__main__":
main()
``` |
{
"source": "jim-bo/scdecon",
"score": 2
} |
#### File: scdecon/utils/heirheat.py
```python
import random
# Third party modules #
import numpy, scipy, matplotlib, pandas
from matplotlib import pyplot
import scipy.cluster.hierarchy as sch
import scipy.spatial.distance as dist
import names
###############################################################################
# Create Custom Color Gradients #
red_black_sky = {'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.1), (1.0, 1.0, 1.0)),
'green': ((0.0, 0.0, 0.9), (0.5, 0.1, 0.0), (1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 1.0), (0.5, 0.1, 0.0), (1.0, 0.0, 0.0))}
red_black_blue = {'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.1), (1.0, 1.0, 1.0)),
'green': ((0.0, 0.0, 0.0), (1.0, 0.0, 0.0)),
'blue': ((0.0, 0.0, 1.0), (0.5, 0.1, 0.0), (1.0, 0.0, 0.0))}
red_black_green = {'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.1), (1.0, 1.0, 1.0)),
'blue': ((0.0, 0.0, 0.0), (1.0, 0.0, 0.0)),
'green': ((0.0, 0.0, 1.0), (0.5, 0.1, 0.0), (1.0, 0.0, 0.0))}
yellow_black_blue = {'red': ((0.0, 0.0, 0.0), (0.5, 0.0, 0.1), (1.0, 1.0, 1.0)),
'green': ((0.0, 0.0, 0.8), (0.5, 0.1, 0.0), (1.0, 1.0, 1.0)),
'blue': ((0.0, 0.0, 1.0), (0.5, 0.1, 0.0), (1.0, 0.0, 0.0))}
make_cmap = lambda x: matplotlib.colors.LinearSegmentedColormap('my_colormap', x, 256)
color_gradients = {'red_black_sky' : make_cmap(red_black_sky),
'red_black_blue' : make_cmap(red_black_blue),
'red_black_green' : make_cmap(red_black_green),
'yellow_black_blue' : make_cmap(yellow_black_blue),
'red_white_blue' : pyplot.cm.bwr,
'seismic' : pyplot.cm.seismic,
'green_white_purple' : pyplot.cm.PiYG_r,
'coolwarm' : pyplot.cm.coolwarm,}
###############################################################################
class HiearchicalHeatmap(object):
"""A common use case for biologists analyzing their gene expression data is to cluster and visualize patterns of expression in the form of a heatmap and associated dendrogram."""
row_method = 'single' # Can be: linkage, single, complete, average, weighted, centroid, median, ward
column_method = 'single' # Can be: linkage, single, complete, average, weighted, centroid, median, ward
row_metric = 'braycurtis' # Can be: see scipy documentation
column_metric = 'braycurtis' # Can be: see scipy documentation
gradient_span = 'only_max' # Can be: min_to_max, min_to_max_centered, only_max, only_min
color_gradient = 'yellow_black_blue' # Can be: see color_gradients dictionary
fig_weight = 12
fig_height = 8.5
def plot(self):
# Names #
row_header = self.frame.index
column_header = self.frame.columns
# What color to use #
cmap = color_gradients[self.color_gradient]
# Scale the max and min colors #
value_min = self.frame.min().min()
value_max = self.frame.max().max()
if self.gradient_span == 'min_to_max_centered':
value_max = max([value_max, abs(value_min)])
value_min = value_max * -1
if self.gradient_span == 'only_max': value_min = 0
if self.gradient_span == 'only_min': value_max = 0
norm = matplotlib.colors.Normalize(value_min, value_max)
# Scale the figure window size #
fig = pyplot.figure(figsize=(self.fig_weight, self.fig_height))
# Calculate positions for all elements #
# ax1, placement of dendrogram 1, on the left of the heatmap
### The second value controls the position of the matrix relative to the bottom of the view
[ax1_x, ax1_y, ax1_w, ax1_h] = [0.05, 0.22, 0.2, 0.6]
width_between_ax1_axr = 0.004
### distance between the top color bar axis and the matrix
height_between_ax1_axc = 0.004
### Sufficient size to show
color_bar_w = 0.015
# axr, placement of row side colorbar #
### second to last controls the width of the side color bar - 0.015 when showing
[axr_x, axr_y, axr_w, axr_h] = [0.31, 0.1, color_bar_w, 0.6]
axr_x = ax1_x + ax1_w + width_between_ax1_axr
axr_y = ax1_y; axr_h = ax1_h
width_between_axr_axm = 0.004
# axc, placement of column side colorbar #
### last one controls the hight of the top color bar - 0.015 when showing
[axc_x, axc_y, axc_w, axc_h] = [0.4, 0.63, 0.5, color_bar_w]
axc_x = axr_x + axr_w + width_between_axr_axm
axc_y = ax1_y + ax1_h + height_between_ax1_axc
height_between_axc_ax2 = 0.004
# axm, placement of heatmap for the data matrix #
[axm_x, axm_y, axm_w, axm_h] = [0.4, 0.9, 2.5, 0.5]
axm_x = axr_x + axr_w + width_between_axr_axm
axm_y = ax1_y; axm_h = ax1_h
axm_w = axc_w
# ax2, placement of dendrogram 2, on the top of the heatmap #
### last one controls hight of the dendrogram
[ax2_x, ax2_y, ax2_w, ax2_h] = [0.3, 0.72, 0.6, 0.15]
ax2_x = axr_x + axr_w + width_between_axr_axm
ax2_y = ax1_y + ax1_h + height_between_ax1_axc + axc_h + height_between_axc_ax2
ax2_w = axc_w
# axcb - placement of the color legend #
[axcb_x, axcb_y, axcb_w, axcb_h] = [0.07, 0.88, 0.18, 0.09]
# Compute and plot top dendrogram #
if self.column_method:
d2 = dist.pdist(self.frame.transpose())
D2 = dist.squareform(d2)
ax2 = fig.add_axes([ax2_x, ax2_y, ax2_w, ax2_h], frame_on=True)
Y2 = sch.linkage(D2, method=self.column_method, metric=self.column_metric)
Z2 = sch.dendrogram(Y2)
ind2 = sch.fcluster(Y2, 0.7*max(Y2[:,2]), 'distance')
ax2.set_xticks([])
ax2.set_yticks([])
### apply the clustering for the array-dendrograms to the actual matrix data
idx2 = Z2['leaves']
self.frame = self.frame.iloc[:,idx2]
### reorder the flat cluster to match the order of the leaves the dendrogram
ind2 = ind2[idx2]
else: idx2 = range(self.frame.shape[1])
# Compute and plot left dendrogram #
if self.row_method:
d1 = dist.pdist(self.frame)
D1 = dist.squareform(d1)
ax1 = fig.add_axes([ax1_x, ax1_y, ax1_w, ax1_h], frame_on=True)
Y1 = sch.linkage(D1, method=self.row_method, metric=self.row_metric)
Z1 = sch.dendrogram(Y1, orientation='right')
ind1 = sch.fcluster(Y1, 0.7*max(Y1[:,2]), 'distance')
ax1.set_xticks([])
ax1.set_yticks([])
### apply the clustering for the array-dendrograms to the actual matrix data
idx1 = Z1['leaves']
self.frame = self.frame.iloc[idx1,:]
### reorder the flat cluster to match the order of the leaves the dendrogram
ind1 = ind1[idx1]
else: idx1 = range(self.frame.shape[0])
# Plot distance matrix #
axm = fig.add_axes([axm_x, axm_y, axm_w, axm_h])
axm.matshow(self.frame, aspect='auto', origin='lower', cmap=cmap, norm=norm)
axm.set_xticks([])
axm.set_yticks([])
# Add text #
new_row_header = []
new_column_header = []
for i in range(self.frame.shape[0]):
axm.text(self.frame.shape[1]-0.5, i, ' ' + row_header[idx1[i]], verticalalignment="center")
new_row_header.append(row_header[idx1[i]] if self.row_method else row_header[i])
for i in range(self.frame.shape[1]):
axm.text(i, -0.9, ' '+column_header[idx2[i]], rotation=90, verticalalignment="top", horizontalalignment="center")
new_column_header.append(column_header[idx2[i]] if self.column_method else column_header[i])
# Plot column side colorbar #
if self.column_method:
axc = fig.add_axes([axc_x, axc_y, axc_w, axc_h])
cmap_c = matplotlib.colors.ListedColormap(['r', 'g', 'b', 'y', 'w', 'k', 'm'])
dc = numpy.array(ind2, dtype=int)
dc.shape = (1,len(ind2))
axc.matshow(dc, aspect='auto', origin='lower', cmap=cmap_c)
axc.set_xticks([])
axc.set_yticks([])
# Plot column side colorbar #
if self.row_method:
axr = fig.add_axes([axr_x, axr_y, axr_w, axr_h])
dr = numpy.array(ind1, dtype=int)
dr.shape = (len(ind1),1)
cmap_r = matplotlib.colors.ListedColormap(['r', 'g', 'b', 'y', 'w', 'k', 'm'])
axr.matshow(dr, aspect='auto', origin='lower', cmap=cmap_r)
axr.set_xticks([])
axr.set_yticks([])
# Plot color legend #
### axes for colorbar
axcb = fig.add_axes([axcb_x, axcb_y, axcb_w, axcb_h], frame_on=False)
cb = matplotlib.colorbar.ColorbarBase(axcb, cmap=cmap, norm=norm, orientation='horizontal')
axcb.set_title("colorkey")
max_cb_ticks = 5
axcb.xaxis.set_major_locator(pyplot.MaxNLocator(max_cb_ticks))
# Render the graphic #
if len(row_header)>50 or len(column_header)>50: pyplot.rcParams['font.size'] = 5
else: pyplot.rcParams['font.size'] = 8
# Return figure #
return fig, axm, axcb, cb
class TestHeatmap(HiearchicalHeatmap):
short_name = 'test_heatmap'
def data(self, M, rows, columns):
"""Create some fake data in a dataframe"""
df = pandas.DataFrame(M, index=rows, columns=columns)
return df
def plot(self, path, M, rows, columns):
self.frame = self.data(M, rows, columns)
self.path = path
fig, axm, axcb, cb = HiearchicalHeatmap.plot(self)
cb.set_label("Random value")
pyplot.savefig(self.path)
```
#### File: scdecon/utils/misc.py
```python
import cPickle as pickle
import numpy as np
import networkx as nx
import operator
import sys
import itertools
from scipy.stats import pearsonr
def run_it_uconn(wdir, script_txt, n, m, k, missing=None):
# write it.
script_file = '%s/script.sh' % wdir
with open(script_file, "wb") as fout:
fout.write(script_txt)
# run it.
try:
retval = subprocess.check_output(["sh", script_file], stderr=subprocess.STDOUT)
except subprocess.CalledProcessError as e:
txt = "UCONN-script failed: %s\n" % script_file
txt += '%s\n' % e.output
txt += '=======================\n'
logging.error(txt)
return None
# load it.
try:
C = np.load('%s/C.npy' % wdir)
S = np.load('%s/S.npy' % wdir)
except:
txt = "UCONN-script failed: %s\n" % script_file
txt += "couldn't find matrix\n"
txt += '=======================\n'
logging.error(txt)
return None
# sanity check.
if missing == None:
if C.shape != (k, n) or S.shape != (m,k):
txt = "UCONN-script failed: %s\n" % script_file
txt += "bad dim\n"
txt += "expected: C=(%i,%i), S=(%i,%i)\n" % (k,n,m,k)
txt += "recieved: C=(%i,%i), S=(%i,%i)\n" % (C.shape[0], C.shape[1], S.shape[0], S.shape[1])
txt += '=======================\n'
return None
else:
if C.shape != (k + 1, n) or S.shape != (m, k + 1):
txt = "UCONN-script failed: %s\n" % script_file
txt += "bad dim\n"
txt += "expected: C=(%i,%i), S=(%i,%i)\n" % (k,n,m,k)
txt += "recieved: C=(%i,%i), S=(%i,%i)\n" % (C.shape[0], C.shape[1], S.shape[0], S.shape[1])
txt += '=======================\n'
return None
# return results.
return S, C
def write_r_mat(out_file, Z):
""" writes R formated matrix"""
# open file,
with open(out_file, "wb") as fout:
# write header.
fout.write('\t'.join(['sample_%i' % i for i in range(Z.shape[1])]) + '\n')
# write data.
for i in range(Z.shape[0]):
fout.write('gene_%i\t' % i + '\t'.join(['%f' % v for v in Z[i,:]]) + '\n')
def save_pickle(out_file, the_list):
""" saves list of numpy """
with open(out_file, "wb") as fout:
pickle.dump(the_list, fout)
def load_pickle(in_file):
""" saves the lists of numpy arrays using pickle"""
with open(in_file) as fin:
data = pickle.load(fin)
return data
def match_signatures(S, T):
''' user perason match columns '''
# sanity check columns.
assert S.shape[1] == T.shape[1], 'columns must have same dimension'
# labels.
labels = range(S.shape[1])
# build bipartite graph.
G = nx.Graph()
for l in labels:
G.add_node("S_%i" % l)
G.add_node("T_%i" % l)
# build edges.
for p in labels:
for q in labels:
# compute perasonr.
weight = pearsonr(S[:,p], T[:,q])
# add edge.
a = "S_%i" % p
b = "T_%i" % q
G.add_edge(a, b, weight=weight[0])
# find matching.
matches = nx.max_weight_matching(G, maxcardinality=True)
# record the sort order.
order = list()
for l in labels:
order.append(int(matches["T_%i" % l].replace("S_","")))
# return the final re-ordering matrix.
return order
def match_labels(truth, test):
''' max-weight matching to assign labels from clustering'''
# assert they are same length.
assert truth.shape == test.shape, 'cant match unequal length predictions'
# sanity check.
a = set(np.unique(truth))
b = set(np.unique(test))
if a != b:
logging.error("cant match on different label cardinatliyt")
sys.exit(1)
celltypes = list(a)
# build bipartite graph.
g = nx.Graph()
for l in celltypes:
g.add_node("true_%i" % l)
g.add_node("test_%i" % l)
# add edges based on count.
for i in range(len(test)):
test_n = "test_%i" % test[i]
true_n = "true_%i" % truth[i]
if g.has_edge(test_n, true_n) == True:
g[test_n][true_n]['weight'] += 1
else:
g.add_edge(test_n, true_n, weight=1)
# find matching.
matches = nx.max_weight_matching(g)
# re-assign test.
swap = dict()
for a,b in matches.items():
if a.count("true") > 0:
x = int(a.replace("true_",""))
y = int(b.replace("test_",""))
swap[y] = x
for l in range(len(test)):
test[l] = swap[test[l]]
# get indirect sort.
indir = [x[0] for x in sorted(swap.items(), key=operator.itemgetter(1))]
# return updated labels.
return test, indir
def _rand_mix(cell_count):
''' random mix of percentages '''
# get the keys.
keys = range(cell_count)
# choose randomly and sum to 1.
choices = np.arange(.1,1,.1)
picks = np.random.choice(choices, 4, replace=True)
picks = picks / np.sum(picks)
np.random.shuffle(picks)
# sanity
assert np.around(np.sum(picks)) == 1, """bad probability setup"""
# return it.
return picks
def load_example(args):
''' loads example using rbridge '''
assert False, 'not ported to new app'
sys.exit(1)
# load libraries.
_load_R_libraries()
# execute script.
gse_name = 'GSE20300'
query = """# load the ACR data (both exp and cbc)
data <- gedData("{gse_name}");
acr <- ExpressionMix('{gse_name}', verbose = 1);
res <- gedBlood(acr, verbose = TRUE);
# return data: X, C, labels
list(exprs(acr),coef(res),sampleNames(phenoData(acr)))
""".format(gse_name=gse_name)
pair = robjects.r(query)
# save data points.
X, C, labels = pair
data = CellMixData(X, None, C)
data.save(args.proj_key)
def _sample_S(targets, expr, p):
""" perform sampling to generate S"""
# create ordered list of categories.
cats = sorted(list(set(list(targets))))
# calculate dimensions.
m = expr.shape[1]
k = len(cats)
# create the array.
S = np.zeros((m,k), dtype=np.float)
# loop over each category.
for j in range(len(cats)):
# select just matches of this gene, category.
sub = expr[:,np.where(targets==cats[j])[0]]
# loop over each gene.
for i in range(m):
# count zeros.
genes = sub[i, :]
# sample randomly with replacement.
samples = np.random.choice(genes, size=p, replace=True)
# average.
S[i,j] = np.average(samples)
# return S
return S
def _measure_cluster(SC, Strue, sc_lbls, methods):
''' measure accuracy of clustering methods '''
# build results dict.
rmse_dict = dict()
match_dict = dict()
for name, meth in methods:
rmse_dict[name] = list()
match_dict[name] = list()
# loop for 100 times:
for t in range(100):
# loop over each method.
for name, meth in methods:
# cluster.
Spred, labels, cats = meth(SC, args.k)
Spred = norm_cols(Spred)
# match labels.
try:
labels, reorder = match_labels(sc_lbls, labels)
except KeyError:
continue
# assert shape.
assert Strue.shape == Spred.shape, 'S not same dim'
# re-order columns in Spred.
Spred = Spred[:,reorder]
# calculate accuracy.
rmse = rmse_cols(Strue, Spred)
# score matching.
match_score = float(np.sum(sc_lbls == labels)) / sc_lbls.shape[0]
# sanity check error calculation.
'''
if match_score == 1.0 and rmse != 0.0:
logging.error("why score bad")
sys.exit(1)
'''
# save it.
rmse_dict[name].append(rmse)
match_dict[name].append(match_score)
# return em.
return rmse_dict, match_dict
def compare_cluster_avg(args):
""" compare clustering across simulation parameters"""
# load the labels.
SC = np.load(args.SC)
sc_lbls = np.load(args.sc_lbls)
# compute S.
Strue = _avg_S(sc_lbls, SC, 0)
Strue = norm_cols(Strue)
# define available methods.
methods = list()
methods.append(("random",randclust))
methods.append(("kmeans",kmeans))
methods.append(("spectral",spectral))
methods.append(("ward",ward))
# loop over each subset of single cells.
for b in np.arange(1.0,0.0,-0.1):
# pick subset.
samples = np.random.choice(range(SC.shape[1]), size=int(SC.shape[1] * b))
subSC = SC[:,samples]
sublbl = sc_lbls[samples]
# compute accuracy.
rmse_dict, match_dict = _measure_cluster(subSC, Strue, sublbl, methods)
# print results.
for name in rmse_dict:
print "avg", b, name, '%.8f' % np.average(np.array(rmse_dict[name])), '%.8f' % np.average(np.array(match_dict[name]))
sys.exit()
def compare_cluster_sample(args):
""" compare clustering across simulation parameters"""
# load the labels.
SC = np.load(args.SC)
sc_lbls = np.load(args.sc_lbls)
# loop over sample parameters.
for p in range(1,50,5):
# compute S.
Strue = _sample_S(sc_lbls, SC, p)
Strue = norm_cols(Strue)
# define available methods.
methods = list()
methods.append(("random",randclust))
methods.append(("kmeans",kmeans))
methods.append(("spectral",spectral))
methods.append(("ward",ward))
# compute accuracy.
rmse_dict, match_dict = _measure_cluster(SC, Strue, sc_lbls, methods)
# print results.
for name in rmse_dict:
print "sample_%i" % p, name, '%.8f' % np.average(np.array(rmse_dict[name])), '%.8f' % np.average(np.array(match_dict[name]))
```
#### File: scdecon/utils/plotting.py
```python
import warnings
import os
warnings.filterwarnings("ignore")
import numpy as np
#import brewer2mpl
import pandas as pd
import matplotlib
matplotlib.use('Agg')
import matplotlib.pyplot as plt
import matplotlib.pylab as plb
import matplotlib.gridspec as gridspec
import matplotlib.cm as cm
import matplotlib.colors as colors
import scipy.spatial.distance as distance
import scipy.cluster.hierarchy as sch
# violin plot
from scipy.stats import gaussian_kde
from numpy.random import normal
from numpy import arange
# scikit
from sklearn.decomposition import PCA
# application.
from utils.matops import *
from utils.misc import *
import simulation
## high-level functions ##
def pca_X_Z(X, Z, y, figure_path):
""" plots the experiment """
# create color map.
unique_vals = sorted(list(np.unique(y)))
num_colors = len(unique_vals)
cmap = plt.get_cmap('gist_rainbow')
cnorm = colors.Normalize(vmin=0, vmax=num_colors-1)
scalarMap = cm.ScalarMappable(norm=cnorm, cmap=cmap)
# do PCA
pcaX = PCA(n_components=2)
pcaZ = PCA(n_components=2)
Xt = np.transpose(X)
Zt = np.transpose(Z)
Xp = pcaX.fit(Xt).transform(Xt)
Zp = pcaZ.fit(Zt).transform(Zt)
# plot pure.
for i in unique_vals:
# get color.
color = cmap(1.*i/num_colors)
label = str(i)
# plot it.
plt.scatter(Zp[y == i, 0], Zp[y == i, 1], c=color, label=label)
# plot mixed.
plt.scatter(Xp[:, 0], Zp[:, 1], c="black", label="mix")
# add legend.
plt.legend()
plt.savefig(figure_path)
def pca_sc(args):
""" plots the experiment """
# simplify.
Z = np.load(args.SC)
y = np.load(args.sc_lbls)
labels = np.load(args.c_lbls)
figure_path = args.fig_file
# create color map.
unique_vals = sorted(list(np.unique(y)))
num_colors = len(unique_vals)
cmap = plt.get_cmap('gist_rainbow')
cnorm = colors.Normalize(vmin=0, vmax=num_colors-1)
scalarMap = cm.ScalarMappable(norm=cnorm, cmap=cmap)
# do PCA
pcaZ = PCA(n_components=2)
Zt = np.transpose(Z)
Zp = pcaZ.fit(Zt).transform(Zt)
# plot pure.
for i in unique_vals:
# get color.
color = cmap(1.*i/num_colors)
label = labels[i]
# plot it.
plt.scatter(Zp[y == i, 0], Zp[y == i, 1], c=color, label=label)
# add labels.
plt.xlabel("component 1")
plt.ylabel("component 2")
# add legend.
plt.legend()
plt.savefig(figure_path)
def plot_scatter(args):
""" plots experiment as scatter plot """
# load data.
test = load_pickle(args.test_file)
ref = load_pickle(args.ref_file)
# simplify.
n = ref['Xs'][0].shape[1]
m = ref['Xs'][0].shape[0]
q = len(ref['Xs'])
method = args.method
# create list by cell-type.
bycell = dict()
# loop over each experiment.
for X_test, Z_test, y_test, wdir, C_path, S_path, idx in _sim_gen(test['Xs'], test['Zs'], test['ys'], method, args.work_dir):
# get elements.
X_ref = ref['Xs'][idx]
Z_ref = ref['Zs'][idx]
C_ref = ref['Cs'][idx]
y_ref = ref['ys'][idx]
# load the test matrix.
if os.path.isfile(C_path):
C_test = np.load(C_path)
else:
# silenty skip missing.
continue
# round to 5 decimals.
C_ref = np.round(C_ref, decimals=5)
C_test = np.round(C_test, decimals=5)
# add by cell type.
for j in range(C_ref.shape[1]):
for l in range(C_ref.shape[0]):
if l not in bycell:
bycell[l] = [list(), list()]
bycell[l][0].append(C_ref[l,j])
bycell[l][1].append(C_test[l,j])
# create color map.
unique_vals = sorted(bycell.keys())
num_colors = len(unique_vals)
cmap = plt.get_cmap('gist_rainbow')
cnorm = colors.Normalize(vmin=0, vmax=num_colors-1)
scalarMap = cm.ScalarMappable(norm=cnorm, cmap=cmap)
# print them
for l in bycell:
# get data.
x = np.array(bycell[l][0])
y = np.array(bycell[l][1])
# plot the regression.
fit = plb.polyfit(x, y, 1)
fit_fn = plb.poly1d(fit)
# compute r^2
yhat = fit_fn(x)
ybar = np.sum(y)/len(y)
ssreg = np.sum((yhat-ybar)**2)
sstot = np.sum((y-ybar)**2)
r2 = ssreg / sstot
# compute the color.
color = cmap(1.*l/num_colors)
# plot the points.
plt.plot(x, y, '.', color=color, label='%i, r^2=%.2f' % (l,r2))
# plot the regression.
plt.plot(x, fit_fn(x), '--', color=color)
# plot middle line.
plt.plot(np.arange(0,1.1,.1), np.arange(0,1.1,.1), '-', color='black')
# add legend.
plt.legend()
plt.savefig(args.fig_file)
def gene_histo(xlist, figure_path, title):
""" plots histogram for gene """
# extract data.
data = [x[0] for x in xlist]
lbls = [x[1] for x in xlist]
# create figure.
pos = range(len(xlist))
fig = plt.figure()
ax = fig.add_subplot(111)
_violin_plot(ax, data, pos, bp=1)
# finalize.
plt.title(title)
ax.set_xticks(pos)
ax.set_xticklabels(lbls)
ax.set_ylim([0,300])
plt.savefig(figure_path)
def heirheatmap(M, row_labels, path):
""" heirarchy heatmap """
# transpose data so samples - rows, cols - genes
M = np.transpose(M)
# convert numpy to DF.
testDF = pd.DataFrame(M)
# plot it.
axi = plt.imshow(testDF,interpolation='nearest',cmap=cm.RdBu)
ax = axi.get_axes()
_clean_axis(ax)
# calculate pairwise distances for rows
pairwise_dists = distance.squareform(distance.pdist(testDF))
# cluster
clusters = sch.linkage(pairwise_dists,method='complete')
# make dendrogram.
den = sch.dendrogram(clusters,color_threshold=np.inf)
# rename row clusters
row_clusters = clusters
col_pairwise_dists = distance.squareform(distance.pdist(testDF.T))
col_clusters = sch.linkage(col_pairwise_dists,method='complete')
## plot the heatmap and dendrogram ##
# plot the results
fig = plt.figure()
heatmapGS = gridspec.GridSpec(2,2,wspace=0.0,hspace=0.0,width_ratios=[0.25,1],height_ratios=[0.25,1])
### col dendrogram ####
col_denAX = fig.add_subplot(heatmapGS[0,1])
col_denD = sch.dendrogram(col_clusters,color_threshold=np.inf)
_clean_axis(col_denAX)
### row dendrogram ###
rowGSSS = gridspec.GridSpecFromSubplotSpec(1,2,subplot_spec=heatmapGS[1,0],wspace=0.0,hspace=0.0,width_ratios=[1,0.25])
row_denAX = fig.add_subplot(rowGSSS[0,0])
row_denD = sch.dendrogram(row_clusters,color_threshold=np.inf,orientation='right')
_clean_axis(row_denAX)
### row colorbar ###
row_cbAX = fig.add_subplot(rowGSSS[0,1])
tmp = [ [x] for x in row_labels[row_denD['leaves']] ]
row_axi = row_cbAX.imshow(tmp,interpolation='nearest',aspect='auto',origin='lower')
_clean_axis(row_cbAX)
print tmp
### heatmap ###
heatmapAX = fig.add_subplot(heatmapGS[1,1])
axi = heatmapAX.imshow(testDF.ix[den['leaves'],col_denD['leaves']],interpolation='nearest',aspect='auto',origin='lower',cmap=cm.RdBu)
_clean_axis(heatmapAX)
### scale colorbar ###
scale_cbGSSS = gridspec.GridSpecFromSubplotSpec(1,2,subplot_spec=heatmapGS[0,0],wspace=0.0,hspace=0.0)
scale_cbAX = fig.add_subplot(scale_cbGSSS[0,0]) # colorbar for scale in upper left corner
cb = fig.colorbar(axi,scale_cbAX) # note that we tell colorbar to use the scale_cbAX axis
cb.set_label('Measurements')
cb.ax.yaxis.set_ticks_position('left') # move ticks to left side of colorbar to avoid problems with tight_layout
cb.ax.yaxis.set_label_position('left') # move label to left side of colorbar to avoid problems with tight_layout
cb.outline.set_linewidth(0)
fig.tight_layout()
# save figure.
plt.savefig(path)
def plot_sim(args):
""" plot the simulation """
# load testing data.
data = load_pickle(args.test_file)
Xs = data['Xs']
Zs = data['Zs']
ys = data['ys']
k = args.k
# create master array for each.
Xfull = np.zeros((Xs[0].shape[0], Xs[0].shape[1]*len(Xs)), dtype=np.float)
Zfull = np.zeros((Zs[0].shape[0], Zs[0].shape[1]*len(Zs)), dtype=np.float)
yfull = np.zeros(ys[0].shape[0]*len(ys), dtype=np.int)
# loop over each experiment.
xj = 0
zj = 0
yi = 0
for X, Z, y, wdir, C_path, S_path, idx in _sim_gen(Xs, Zs, ys, "bla"):
# copy into.
for j in range(X.shape[1]):
Xfull[:,xj] = X[:,j]
xj += 1
for j in range(Z.shape[1]):
Zfull[:,zj] = Z[:,j]
zj += 1
for i in range(y.shape[0]):
yfull[yi] = y[i]
yi += 1
# call the plot command.
pca_X_Z(Xfull, Zfull, yfull, args.fig_file)
def plot_singlecell(args):
""" plot the simulation """
# simplify parameters.
base_dir = os.path.abspath(args.base_dir)
tlist = [int(x) for x in args.tlist.split(",")]
mlist = [x for x in args.mlist.split(",")]
c = args.c
e = args.e
g = args.g
# print mlist.
print ',' + ','.join(mlist)
# loop over each singlecell.
for t in tlist:
# set the reference files.
dat_dir = "%s/data/%i_%i_%i_%i_%i_%i" % (base_dir, t*5, args.q, t, c, e, g)
ref_file= "%s/ref_%i_%i_%i_%i_%i_%i.cpickle" % (dat_dir, t*5, args.q, t, c, e, g)
test_file= "%s/test_%i_%i_%i_%i_%i_%i.cpickle" % (dat_dir, t*5, args.q, t, c, e, g)
# load them.
test = load_pickle(test_file)
ref = load_pickle(ref_file)
# set the work dir.
work_dir = "%s/work/%i_%i_%i_%i_%i_%i" % (base_dir, t*5, args.q, t, c, e, g)
# loop over each test case.
lookup = dict()
for m in mlist:
# bootstrap.
if m not in lookup:
lookup[m] = list()
# loop over instances.
for X_test, Z_test, y_test, wdir, C_path, S_path, idx in _sim_gen(test['Xs'], test['Zs'], test['ys'], m, work_dir):
# simplify.
X_ref = ref['Xs'][idx]
Z_ref = ref['Zs'][idx]
C_ref = ref['Cs'][idx]
y_ref = ref['ys'][idx]
# load the test matrix.
if os.path.isfile(C_path):
C_test = np.load(C_path)
else:
# silenty skip missing.
continue
# round to 5 decimals.
C_ref = np.round(C_ref, decimals=5)
C_test = np.round(C_test, decimals=5)
# set the scoring function.
metric = rmse_vector
# compute column wise average.
vals = list()
for j in range(C_ref.shape[1]):
v = metric(C_ref[:,j], C_test[:,j])
vals.append(v)
total = np.average(np.array(vals))
# put into lookup.
lookup[m].append(total)
# print this row.
while lookup != {}:
byrow = list()
for m in mlist:
# add if present.
if m in lookup:
# add to this row.
if len(lookup[m]) > 0:
byrow.append('%.3f' % lookup[m].pop())
# clear if empty.
if len(lookup[m]) == 0:
del lookup[m]
# add empty.
else:
byrow.append("")
# print the row as a function of # single-cells.
print '%i,' % t + ','.join(byrow)
def plot_varygene(args):
""" plot the simulation """
# simplify parameters.
base_dir = os.path.abspath(args.base_dir)
glist = [int(x) for x in args.glist.split(",")]
mlist = [x for x in args.mlist.split(",")]
c = args.c
e = args.e
t = args.t
# print mlist.
print ',' + ','.join(mlist)
# loop over each singlecell.
for g in glist:
# set the reference files.
dat_dir = "%s/data/%i_%i_%i_%i_%i_%i" % (base_dir, t*5, args.q, t, c, e, g)
ref_file= "%s/ref_%i_%i_%i_%i_%i_%i.cpickle" % (dat_dir, t*5, args.q, t, c, e, g)
test_file= "%s/test_%i_%i_%i_%i_%i_%i.cpickle" % (dat_dir, t*5, args.q, t, c, e, g)
# load them.
test = load_pickle(test_file)
ref = load_pickle(ref_file)
# set the work dir.
work_dir = "%s/work/%i_%i_%i_%i_%i_%i" % (base_dir, t*5, args.q, t, c, e, g)
# loop over each test case.
lookup = dict()
for m in mlist:
# bootstrap.
if m not in lookup:
lookup[m] = list()
# loop over instances.
for X_test, Z_test, y_test, wdir, C_path, S_path, idx in _sim_gen(test['Xs'], test['Zs'], test['ys'], m, work_dir):
# simplify.
X_ref = ref['Xs'][idx]
Z_ref = ref['Zs'][idx]
C_ref = ref['Cs'][idx]
y_ref = ref['ys'][idx]
# load the test matrix.
if os.path.isfile(C_path):
C_test = np.load(C_path)
else:
# silenty skip missing.
continue
# round to 5 decimals.
C_ref = np.round(C_ref, decimals=5)
C_test = np.round(C_test, decimals=5)
# set the scoring function.
metric = rmse_vector
# compute column wise average.
vals = list()
for j in range(C_ref.shape[1]):
v = metric(C_ref[:,j], C_test[:,j])
vals.append(v)
total = np.average(np.array(vals))
# put into lookup.
lookup[m].append(total)
# print this row.
while lookup != {}:
byrow = list()
for m in mlist:
# add if present.
if m in lookup:
# add to this row.
if len(lookup[m]) > 0:
byrow.append('%.4f' % lookup[m].pop())
# clear if empty.
if len(lookup[m]) == 0:
del lookup[m]
# add empty.
else:
byrow.append("")
# print the row as a function of # single-cells.
print '%i,' % g + ','.join(byrow)
def plot_truepred(args):
""" plot the simulation """
# simplify parameters.
base_dir = os.path.abspath(args.base_dir)
tlist = [int(x) for x in args.tlist.split(",")]
mlist = [x for x in args.mlist.split(",")]
c = args.c
e = args.e
g = args.g
# print mlist.
print ',' + ','.join(mlist)
# loop over each singlecell.
for t in tlist:
# set the reference files.
dat_dir = "%s/data/%i_%i_%i_%i_%i_%i" % (base_dir, t*5, args.q, t, c, e, g)
ref_file= "%s/ref_%i_%i_%i_%i_%i_%i.cpickle" % (dat_dir, t*5, args.q, t, c, e, g)
test_file= "%s/test_%i_%i_%i_%i_%i_%i.cpickle" % (dat_dir, t*5, args.q, t, c, e, g)
# load them.
test = load_pickle(test_file)
ref = load_pickle(ref_file)
# set the work dir.
work_dir = "%s/work/%i_%i_%i_%i_%i_%i" % (base_dir, t*5, args.q, t, c, e, g)
# loop over each test case.
lookup = dict()
for m in mlist:
# bootstrap.
if m not in lookup:
lookup[m] = list()
# loop over instances.
for X_test, Z_test, y_test, wdir, C_path, S_path, idx in _sim_gen(test['Xs'], test['Zs'], test['ys'], m, work_dir):
# simplify.
X_ref = ref['Xs'][idx]
Z_ref = ref['Zs'][idx]
C_ref = ref['Cs'][idx]
y_ref = ref['ys'][idx]
# load the test matrix.
if os.path.isfile(C_path):
C_test = np.load(C_path)
else:
# silenty skip missing.
continue
# round to 5 decimals.
C_ref = np.round(C_ref, decimals=5)
C_test = np.round(C_test, decimals=5)
# set the scoring function.
metric = rmse_vector
# compute column wise average.
vals = list()
for j in range(C_ref.shape[1]):
v = metric(C_ref[:,j], C_test[:,j])
vals.append(v)
total = np.average(np.array(vals))
# put into lookup.
lookup[m].append(total)
def plot_genes(args):
""" plots expression values """
# load the data.
SC = np.load(args.SC)
sc_lbls = np.load(args.sc_lbls)
b_lbls = np.load(args.b_lbls)
c_lbls = np.load(args.c_lbls)
# get teh informative features.
clf = feature_selection.SelectKBest(score_func=feature_selection.f_classif, k=20)
clf.fit(np.transpose(SC), sc_lbls)
features = np.where(clf.get_support() == True)[0]
# simulate single-cells.
sim = simulation.SimSingleCell(SC, sc_lbls)
TMP, we = sim.sample_1(1000)
# loop over genes:
for i in features:
# set gene name.
gene_name = b_lbls[i]
# loop over each class.
xlist = list()
for c in range(len(c_lbls)):
# extract subset of SC.
SC_s = SC[:,np.where(sc_lbls == c)[0]]
TMP_s = TMP[:,np.where(we == c)[0]]
# extract subset of gene.
SC_s = SC_s[np.where(b_lbls == gene_name)[0],:]
TMP_s = TMP_s[np.where(b_lbls == gene_name)[0],:]
# make sure is 1d (some duplicate genes measured)
SC_s = np.ravel(SC_s)
TMP_s = np.ravel(TMP_s)
# store list.
xlist.append((SC_s, "%s:%s" % (str(c_lbls[c]), "t")))
xlist.append((TMP_s, "%s:%s" % (str(c_lbls[c]), "s")))
# plot it.
fname = '%s/%s.pdf' % (args.fig_dir, gene_name)
gene_histo(xlist, fname, gene_name)
def heatmap(args):
""" heatmap and clustering """
# load the data.
SC = np.load(args.SC)
sc_lbls = np.load(args.sc_lbls)
b_lbls = np.load(args.b_lbls)
c_lbls = np.load(args.c_lbls)
# get teh informative features.
clf = feature_selection.SelectKBest(score_func=feature_selection.f_classif, k=20)
clf.fit(np.transpose(SC), sc_lbls)
features = np.where(clf.get_support() == True)[0]
# extract subset.
SC = SC[features,:]
# create master S.
#S = _avg_S(sc_lbls, SC)
# make the heatmap.
print c_lbls
sys.exit()
heirheatmap(SC, sc_lbls, args.fig_path)
#graph = TestHeatmap()
#graph.plot(args.fig_path, SC, b_lbls, [str(x) for x in sc_lbls])
def plot_gene(args):
""" plots expression values """
# load the data.
SC = np.load(args.SC)
sc_lbls = np.load(args.sc_lbls)
b_lbls = np.load(args.b_lbls)
c_lbls = np.load(args.c_lbls)
# simulate single-cells.
sim = simulation.SimSingleCell(SC, sc_lbls, load=False)
TMP, we = sim.sample_1(1000)
# set gene name.
gene_name = args.gene_name
# loop over each class.
xlist = list()
for c in range(len(c_lbls)):
# extract subset of SC.
SC_s = SC[:,np.where(sc_lbls == c)[0]]
TMP_s = TMP[:,np.where(we == c)[0]]
# extract subset of gene.
SC_s = SC_s[np.where(b_lbls == gene_name)[0],:]
TMP_s = TMP_s[np.where(b_lbls == gene_name)[0],:]
# make sure is 1d (some duplicate genes measured)
SC_s = np.ravel(SC_s)
TMP_s = np.ravel(TMP_s)
# store list.
xlist.append((SC_s, "%s:%s" % (str(c_lbls[c]), "t")))
xlist.append((TMP_s, "%s:%s" % (str(c_lbls[c]), "s")))
# plot it.
fname = '%s/%s.pdf' % (args.fig_dir, gene_name)
gene_histo(xlist, fname, gene_name)
def plot_C(args):
""" evaluates the experiment for a given method """
# setup directory.
sim_dir = os.path.abspath(args.sim_dir)
mas_obj = '%s/mas.cpickle' % sim_dir
res_obj = '%s/res.cpickle' % sim_dir
c_lbls = np.load(args.c_lbls)
# extract method info.
method_name = args.method_name
# load the simulation data stuff.
master = load_pickle(mas_obj)
results = load_pickle(res_obj)
# sort the keys.
keys = sorted(results.keys(), key=operator.itemgetter(0,1,2,3,4,5))
# build the list.
true = dict()
pred = dict()
for l in range(args.k):
true[l] = list()
pred[l] = list()
# loop over each dependent.
r = -1
for dkey in keys:
# skip short keys.
#if len(dkey) != 6: continue
if len(dkey) != 7: continue
# expand the key.
n, k, e, c, r, q, m = dkey
#n, k, e, c, q, m = dkey
mkey = (n, k, e, c, r, q)
#mkey = (n, k, e, c, q)
skey = n, k, e, c, r, m # remove reference ot repeat variable
#skey = n, k, e, c, m # remove reference ot repeat variable and cell types
# skip till selected.
if n != args.n: continue
if k != args.k: continue
if e != args.e: continue
if c != args.c: continue
if m != args.m: continue
# load the true concentrations.
S_true = np.load('%s.npy' % master[mkey]['H'])
S_true = S_true[0:m,:]
C_true = np.load('%s.npy' % master[mkey]['C'])
# load the predicted.
S_pred = np.load(results[dkey][method_name]['S'])
C_pred = np.load(results[dkey][method_name]['C'])
# remap if its not DECONF
if method_name != "DECONF":
# remap to known order.
if r != -1:
C_pred, S_pred = simulation._remap_missing(C_pred, S_pred, r, k)
else:
# perform matching.
C_pred, S_pred = simulation._match_pred(C_pred, S_pred, C_true, S_true)
# add to data.
for j in range(n):
#for l in range(k):
# if l == r: continue
for l in [r]:
true[l].append(C_true[l,j])
pred[l].append(C_pred[l,j])
# cast to array.
for l in range(args.k):
true[l] = np.array(true[l])
pred[l] = np.array(pred[l])
# create color map.
num_colors = args.k
cmap = plt.get_cmap('gist_rainbow')
cnorm = colors.Normalize(vmin=0, vmax=num_colors-1)
scalarMap = cm.ScalarMappable(norm=cnorm, cmap=cmap)
# print them
for l in range(args.k):
# get data.
x = true[l]
y = pred[l]
# plot the regression.
fit = plb.polyfit(x, y, 1)
fit_fn = plb.poly1d(fit)
# compute r^2
yhat = fit_fn(x)
ybar = np.sum(y)/len(y)
ssreg = np.sum((yhat-ybar)**2)
sstot = np.sum((y-ybar)**2)
r2 = ssreg / sstot
# compute the color.
color = cmap(1.*l/num_colors)
# plot the points.
plt.plot(x, y, '.', color=color, label='%s, r^2=%.2f' % (c_lbls[l],r2))
# plot the regression.
plt.plot(x, fit_fn(x), '--', color=color)
# plot middle line.
plt.plot(np.arange(0,1.1,.1), np.arange(0,1.1,.1), '-', color='black')
# add legend.
plt.legend(numpoints=1)
plt.ylim([0, 1.0])
plt.xlim([0, 1.0])
# add labels.
plt.xlabel("observed")
plt.ylabel("predicted")
# add legend.
#plt.legend()
plt.savefig(args.fig_file)
def plot_S(args):
""" evaluates the experiment for a given method """
# setup directory.
sim_dir = os.path.abspath(args.sim_dir)
mas_obj = '%s/mas.cpickle' % sim_dir
res_obj = '%s/res.cpickle' % sim_dir
c_lbls = np.load(args.c_lbls)
b_lbls = np.load(args.b_lbls)
# extract method info.
method_name = args.method_name
# load the simulation data stuff.
master = load_pickle(mas_obj)
results = load_pickle(res_obj)
# sort the keys.
keys = sorted(results.keys(), key=operator.itemgetter(0,1,2,3,4,5))
# build the list.
true = dict()
pred = dict()
for l in range(args.k):
true[l] = list()
pred[l] = list()
# create the geene tracker.
genet = list()
for i in range(args.m):
genet.append(list())
# loop over each dependent.
r = -1
for dkey in keys:
# skip short keys.
#if len(dkey) != 6: continue
if len(dkey) != 7: continue
# expand the key.
n, k, e, c, r, q, m = dkey
#n, k, e, c, q, m = dkey
mkey = (n, k, e, c, r, q)
#mkey = (n, k, e, c, q)
skey = n, k, e, c, r, m # remove reference ot repeat variable
#skey = n, k, e, c, m # remove reference ot repeat variable and cell types
# skip till selected.
if n != args.n: continue
if k != args.k: continue
if e != args.e: continue
if c != args.c: continue
if m != args.m: continue
# load the true concentrations.
S_true = np.load('%s.npy' % master[mkey]['H'])
S_true = S_true[0:m,:]
C_true = np.load('%s.npy' % master[mkey]['C'])
# load the predicted.
S_pred = np.load(results[dkey][method_name]['S'])
C_pred = np.load(results[dkey][method_name]['C'])
# remap if its not DECONF
if method_name != "DECONF":
# remap to known order.
if r != -1:
C_pred, S_pred = simulation._remap_missing(C_pred, S_pred, r, k)
else:
# perform matching.
C_pred, S_pred = simulation._match_pred(C_pred, S_pred, C_true, S_true)
# compute absolute difference.
s = S_true[:, r] - S_pred[:, r]
for i in range(m):
genet[i].append(s[i])
# create stuff.
tmp = list()
for i in range(args.m):
tmp.append(genet[i])
# create figure.
pos = range(args.m)
fig = plt.figure()
ax = fig.add_subplot(111)
_violin_plot(ax, tmp, pos)
# finalize.
#plt.title("stuff")
ax.set_xticks(pos)
ax.set_xticklabels(b_lbls, rotation=90)
ax.set_ylabel('absolute difference')
#ax.set_ylim([0,300])
plt.savefig(args.fig_file)
## low-level functions ##
# helper for cleaning up axes by removing ticks, tick labels, frame, etc.
def _clean_axis(ax):
"""Remove ticks, tick labels, and frame from axis"""
ax.get_xaxis().set_ticks([])
ax.get_yaxis().set_ticks([])
for sp in ax.spines.values():
sp.set_visible(False)
def _violin_plot(ax, data, pos, bp=False):
'''
create violin plots on an axis
'''
dist = max(pos)-min(pos)
w = min(0.15*max(dist,1.0),0.5)
for d,p in zip(data,pos):
try:
k = gaussian_kde(d) #calculates the kernel density
m = k.dataset.min() #lower bound of violin
M = k.dataset.max() #upper bound of violin
x = arange(m,M,(M-m)/100.) # support for violin
v = k.evaluate(x) #violin profile (density curve)
v = v/v.max()*w #scaling the violin to the available space
ax.fill_betweenx(x,p,v+p,facecolor='y',alpha=0.3)
ax.fill_betweenx(x,p,-v+p,facecolor='y',alpha=0.3)
except:
continue
if bp:
ax.boxplot(data,notch=1,positions=pos,vert=1)
```
#### File: scdecon/utils/rfuncs.py
```python
import numpy as np
import rpy2.robjects as R
import rpy2.robjects.numpy2ri
rpy2.robjects.numpy2ri.activate()
def load_R_libraries():
LIBRARIES = ("CellMix", "GEOquery")
#LIBRARIES = ("CellMix",)
load_str = ";".join(map(lambda x: "suppressMessages(library({0}))".format(x), LIBRARIES))
R.r(load_str)
return
def r2npy(m):
"""Convert an R matrix to a 2D numpy array.
Parameters
----------
m: rpy2.robject
an R matrix.
Returns
-------
triple: (numpy.array, list, list)
A triple consisting of a 2D numpy array, a list of row names, and a
list of column names.
"""
if m is None:
raise ValueError("m must be valid R matrix!")
matrix = np.array(m)
rownames = list(m.rownames) if m.rownames else []
colnames = list(m.colnames) if m.colnames else []
return matrix, rownames, colnames
def R_DECONF(X_path, Z_path, y_path, k, S_path, C_path, wdir):
""" run DECONF using rpy2 """
# extend paths.
Stmp = '%s/S.txt' % wdir
Ctmp = '%s/C.txt' % wdir
# run deconvolution.
txt = '''# load libraries.
#suppressMessages(library(CellMix));
#suppressMessages(library(GEOquery));
# load data.
exprsFile <- file.path("{X_path}");
exprs <- as.matrix(read.table(exprsFile, header=TRUE, sep="\\t", row.names=1, as.is=TRUE));
eset <- ExpressionSet(assayData=exprs);
# run deconvolution.
res <- ged(eset, {num}, method='deconf');
# write matrix.
write.table(coef(res), file="{Ctmp}", row.names=FALSE, col.names=FALSE)
write.table(basis(res), file="{Stmp}", row.names=FALSE, col.names=FALSE)
'''.format(X_path=X_path, Stmp=Stmp, Ctmp=Ctmp, num=k)
# execute it.
try:
R.r(txt)
except rpy2.rinterface.RRuntimeError as e:
return False, False, False
# it worked
return True, Stmp, Ctmp
def R_SSKL(X_path, Z_path, y_path, k, S_path, C_path, wdir):
""" run DECONF using rpy2 """
# extend paths.
Stmp = '%s/S.txt' % wdir
Ctmp = '%s/C.txt' % wdir
# simplify labels.
y = np.load(y_path)
lbls = ','.join(['%i' % x for x in y])
# run deconvolution.
txt = '''
# load pure single-cells
sigFile <- file.path("{Z_path}");
Z <- as.matrix(read.table(sigFile, header=TRUE, sep="\\t", row.names=1, as.is=TRUE));
Z <- Z + 1
# labels
y <- c({y});
# perform extraction.
sml <- extractMarkers(Z, data=y, method='HSD')
# load the mixture data.
exprsFile <- file.path("{X_path}");
exprs <- as.matrix(read.table(exprsFile, header=TRUE, sep="\\t", row.names=1, as.is=TRUE));
eset <- ExpressionSet(assayData=exprs);
# perform deconvolution.
res <- ged(eset, sml, 'ssKL')
# write matrix.
write.table(coef(res), file="{Ctmp}", row.names=FALSE, col.names=FALSE)
write.table(basis(res), file="{Stmp}", row.names=FALSE, col.names=FALSE)
'''.format(X_path=X_path, Z_path=Z_path, Stmp=Stmp, Ctmp=Ctmp, num=k, y=lbls)
# execute it.
try:
R.r(txt)
except rpy2.rinterface.RRuntimeError as e:
return False, False, False
# it worked
return True, Stmp, Ctmp
def R_DSA(X_path, Z_path, y_path, k, S_path, C_path, wdir):
""" run DECONF using rpy2 """
# extend paths.
Stmp = '%s/S.txt' % wdir
Ctmp = '%s/C.txt' % wdir
# simplify labels.
y = np.load(y_path)
lbls = ','.join(['%i' % x for x in y])
# run deconvolution.
txt = '''
# load pure single-cells
sigFile <- file.path("{Z_path}");
Z <- as.matrix(read.table(sigFile, header=TRUE, sep="\\t", row.names=1, as.is=TRUE));
Z <- Z + 1
# labels
y <- c({y});
# perform extraction.
sml <- extractMarkers(Z, data=y, method='HSD')
# load the mixture data.
exprsFile <- file.path("{X_path}");
exprs <- as.matrix(read.table(exprsFile, header=TRUE, sep="\\t", row.names=1, as.is=TRUE));
eset <- ExpressionSet(assayData=exprs);
# perform deconvolution.
res <- ged(eset, sml, 'DSA')
# write matrix.
write.table(coef(res), file="{Ctmp}", row.names=FALSE, col.names=FALSE)
write.table(basis(res), file="{Stmp}", row.names=FALSE, col.names=FALSE)
'''.format(X_path=X_path, Z_path=Z_path, Stmp=Stmp, Ctmp=Ctmp, num=k, y=lbls)
# execute it.
try:
R.r(txt)
except rpy2.rinterface.RRuntimeError as e:
return False, False, False
# it worked
return True, Stmp, Ctmp
``` |
{
"source": "jim-bo/silp2",
"score": 2
} |
#### File: silp2/creation/bundles.py
```python
import sys
import os
import logging
import networkx as nx
import numpy as np
import scipy.stats as stats
import cPickle
import helpers.io as io
import helpers.misc as misc
### definitions ###
### functions ###
def compress_edges(MG, p, q):
''' compresses the edges '''
# check for types.
bcnts = [0, 0, 0, 0]
for z in MG[p][q]:
bcnts[MG[p][q][z]['state']] += 1
# build numpy arrays for each distance type.
bdists = list()
for i in range(4):
bdists.append(np.zeros(bcnts[i], dtype=np.float))
# populate array with distances.
bidxs = [0, 0, 0, 0]
for z in MG[p][q]:
state = MG[p][q][z]['state']
dist = MG[p][q][z]['dist']
bdists[state][bidxs[state]] = dist
bidxs[state] += 1
# compute bundle info.
devs = list()
means = list()
mins = list()
maxs = list()
for i in range(4):
if bdists[i].shape[0] <= 0:
devs.append(-1)
means.append(-1)
mins.append(-1)
maxs.append(-1)
else:
devs.append(np.std(bdists[i]))
means.append(np.mean(bdists[i]))
mins.append(bdists[i].min())
maxs.append(bdists[i].max())
# return summaries.
return bcnts, bdists, devs, means, mins, maxs
def _load_reps(file_path):
''' loads repeat info from cpickle'''
# no weights.
if file_path == None:
return dict()
# try dictionary emthod.
if os.path.isdir(file_path) == True:
reps = dict()
for f in os.listdir(file_path):
n = f.replace(".npy","")
try:
reps[n] = np.load("%s/%s" % (file_path, f))
except:
continue
return reps
# get weights.
try:
with open(file_path) as fin:
return cPickle.load(fin)
except:
logging.warning("unable to load repeat pickle, ignoring weights")
return dict()
def create_bundles(paths, args):
""" creates bundles
Parameters
----------
paths.edge_file : string
args.bundle_size : int
args.pthresh : int
args.bup : int
"""
# load repeat annotations.
repcnts = _load_reps(args.rep_file)
# load the multi graph.
MG = nx.read_gpickle(paths.edge_file)
# create bundle graph.
BG = nx.Graph()
# add nodes.
for n in MG.nodes():
BG.add_node(n, MG.node[n])
# build set of adjacencies.
adjset = set()
for p, nbrs in MG.adjacency_iter():
for q in nbrs:
adjset.add(tuple(sorted([p,q])))
# compute bundles from adjacencies.
zerod = 0
zcnt = 0
ztot = len(adjset)
for p, q in adjset:
#logging.info("progress: %d of %d" % (zcnt, ztot))
zcnt += 1
# sanity check.
if MG.node[p]['cov'] == 0.0 or MG.node[q]['cov'] == 0.0:
logging.error("how can this happen?")
sys.exit()
# bundle size check.
bsize = len(MG[p][q])
if bsize < args.bundle_size:
continue
# group by insert size.
groups = dict()
std_devs = dict()
for z in MG[p][q]:
ins_size = MG[p][q][z]['ins_size']
if ins_size not in groups:
groups[ins_size] = list()
std_devs[ins_size] = MG[p][q][z]['std_dev']
groups[ins_size].append(z)
# loop over groups.
for ins_size in groups:
# compress info.
bcnts, bdists, devs, means, mins, maxs = compress_edges(MG, p, q)
# compute weights.
cov = 1 - abs(MG.node[p]['cov'] - MG.node[q]['cov']) / (MG.node[p]['cov'] + MG.node[q]['cov'])
# swap bdists for python lists.
for i in range(len(bdists)):
bdists[i] = list(bdists[i])
# add start stop info.
poses1 = list()
poses2 = list()
for z in MG[p][q]:
tmp = MG[p][q][z]
poses1.append((tmp['left1'], tmp['right1']))
poses2.append((tmp['left2'], tmp['right2']))
# create bundle.
if BG.has_edge(p, q):
logging.error("can't have multiple insert sizes between same node")
sys.exit(1)
# zero out negative distances.
avgs = [np.average(bdists[i]) for i in range(4)]
for i in range(4):
if avgs[i] == np.nan:
bcnts[i] = 0.0
if avgs[i] < -2 * args.bundle_size:
bcnts[i] = 0.0
zerod += 1
# don't add it if no support.
if np.sum(bcnts) == 0:
continue
#BG.add_edge(p, q, bcnts=bcnts, bdists=bdists, devs=devs, means=means, mins=mins, maxs=maxs, ins_size=ins_size, std_dev=std_devs[ins_size], poses1=poses1, poses2=poses2)
BG.add_edge(p, q, bcnts=bcnts, bdists=bdists, ins_size=ins_size, std_dev=std_devs[ins_size], cov=cov)
# start the slimming.
logging.info("starting repeat based slimming")
# do repeat mods.
track_upped = 0
track_remed = 0
track_ogedg = len(BG.edges())
idxs = np.zeros(1)
if repcnts != dict():
# create repeat distrib.
repavgs = np.zeros(len(repcnts), dtype=np.dtype([('name','S256'),('avg',np.float)]))
i = 0
for name in repcnts:
# save the name.
repavgs[i]['name'] = name
# skip no repeat info.
if name not in repcnts or repcnts[name] == None:
repavgs[i]['avg'] = 0
i += 1
continue
# take the average over ins_size + 6 (std_dev)
d = args.ins_size + (6 * args.std_dev)
if repcnts[name].shape[0] < d:
repavgs[i]['avg'] = np.average(repcnts[name])
else:
r = range(0,d)+range(len(repcnts[name])-d,len(repcnts[name]))
repavgs[i]['avg'] = np.average(repcnts[name][r])
i += 1
# compute the cutoff threshold.
score = stats.scoreatpercentile(repavgs[:]['avg'], args.pthresh)
idxs = repavgs[:]['avg'] > score
# look at each bundle and see if the repeats necessitates attention.
for p, q in BG.edges():
# get index of pairs.
idp = np.where(repavgs[:]['name'] == p)[0]
idq = np.where(repavgs[:]['name'] == q)[0]
# skip if both not high.
if idxs[idp] == False and idxs[idq] == False:
continue
# get score.
scp = repavgs[idp]['avg']
scq = repavgs[idq]['avg']
# check if this bundle needs attention.
if max(scp, scq) > score:
track_upped += 1
# it gets its minumm bundle size upped.
for i in range(len(BG[p][q]['bcnts'])):
# clear if it doesn't meet criteria.
if BG[p][q]['bcnts'][i] < args.bundle_size + args.bup:
BG[p][q]['bcnts'][i] = 0
# remove bundle if no support.
if np.sum(BG[p][q]['bcnts']) == 0:
track_remed += 1
BG.remove_edge(p,q)
else:
logging.info('no repeat information supplied')
# add repeat weights.
for p, q in BG.edges():
# create weight.
BG[p][q]['u'] = [0.0] * 4
# sum weights.
for z in MG[p][q]:
left1 = MG[p][q][z]['left1']
left2 = MG[p][q][z]['left2']
right1 = MG[p][q][z]['right1']
right2 = MG[p][q][z]['right2']
cntl = np.sum(repcnts[p][left1:left2])
cntr = np.sum(repcnts[p][right1:right2])
try:
propl = 1.0 - (float(cntl) / float(left2-left1))
propr = 1.0 - (float(cntr) / float(right2-right1))
except:
continue
# add average.
p_k = (propl + propr) / 2.0
# add it.
BG[p][q]['u'][MG[p][q][z]['state']] += p_k
# note the modifications due to filtering.
logging.info("contigs with repeat regions in %.2f threshold: %i of %i" % (args.pthresh, np.sum(idxs), len(idxs)))
logging.info("bundles effected by repeats: %i of %i" % (track_upped, track_ogedg))
logging.info("bundles removed by repeats: %i of %i" % (track_remed, track_ogedg))
logging.info("bundles removed by neg dist: %i" % (zerod))
logging.info("total bundles: %i" % (len(BG.edges())))
# write to disk.
nx.write_gpickle(BG, paths.bundle_file)
```
#### File: silp2/creation/edges.py
```python
import sys
import os
import logging
import networkx as nx
import helpers.misc as misc
### private functions ###
class SamToken(object):
QNAME = ""
OFLAG = ""
RNAME = ""
LPOS = 0
RPOS = 0
def pop_sam(token, sam):
''' populates object '''
sam.QNAME = token[0]
sam.OFLAG = token[1]
sam.RNAME = token[2]
sam.LPOS = int(token[3])
sam.RPOS = sam.LPOS + len(token[9])
def sam_gen(file_path):
''' generator for sam files '''
# create the SAM object.
sam = SamToken()
# start the token generator.
for token, pos in token_gen(file_path, "\t"):
# fill the object.
try:
pop_sam(token, sam)
except:
continue
# yield it.
yield sam, pos
def pair_gen(file_path1, file_path2):
''' generator for sam files '''
# create the SAM object.
sama = SamToken()
samb = SamToken()
# start the token generator.
gena = token_gen(file_path1, "\t")
genb = token_gen(file_path2, "\t")
# loop over first iterator.
for tokena, posa in gena:
tokenb, posb = genb.next()
# fill the object.
pop_sam(tokena, sama)
pop_sam(tokenb, samb)
# yield it.
yield sama, samb, posa, posb
def openmm(file_path):
fin = open(file_path, "r")
mmin = mmap.mmap(fin.fileno(), 0, access=mmap.ACCESS_COPY)
return fin, mmin
def closemm(fin, mmin):
mmin.close()
fin.close()
def token_gen(file_path, delim):
''' generates tokens by delim '''
# open the file and memory map.
with open(file_path, "rb") as fin:
# begin yielding tokens.
pos = 0
for line in fin:
# yield the line.
yield line.strip().split(delim), pos
# update info.
pos += len(line)
### public functions ###
def create_edges(paths, args):
""" creates edges
Parameters
----------
paths.node_file : file
args.sam1_file_file : file
args.sam2_file_file : file
args.pair_mode : string
args.ins_size : int
args.std_dev : int
paths.edge_file : string
"""
# load the multi graph.
EG = nx.read_gpickle(paths.node_file)
# add edges to the multigraph.
#fin1 = open(args.sam1_file, "rb")
#fin2 = open(args.sam2_file, "rb")
#for sam1, sam2, pos1, pos2 in pair_gen(fin1, fin2):
for sam1, sam2, pos1, pos2 in pair_gen(args.sam1_file, args.sam2_file):
# get distance.
p = sam1.RNAME
q = sam2.RNAME
width1 = EG.node[p]['width']
width2 = EG.node[q]['width']
# simplify.
p = sam1.RNAME
q = sam2.RNAME
op, oq = misc.get_orien(sam1, sam2, args.pair_mode)
state = misc.get_state(p, q, op, oq)
dist = misc.get_dist(p, q, state, width1, width2, sam1.LPOS, sam1.RPOS, sam2.LPOS, sam2.RPOS, args.ins_size)
# increment coverage.
EG.node[p]['cov'] += sam1.RPOS - sam1.LPOS
EG.node[q]['cov'] += sam2.RPOS - sam2.LPOS
# skip self edges.
if p == q:
continue
# add edge accordinly.
EG.add_edge(p, q, dist=dist, state=state, left1=sam1.LPOS, right1=sam1.RPOS, left2=sam2.LPOS, right2=sam2.RPOS, ins_size=args.ins_size, std_dev=args.std_dev)
# compute average coverage.
for p in EG.nodes():
EG.node[p]['cov'] = float(EG.node[p]['cov']) / float(EG.node[p]['width'])
# ensure all edges have stuff.
for p, q in EG.edges():
if EG.node[p]['cov'] == 0.0 or EG.node[q]['cov'] == 0.0:
print p, q, EG.node[p]['cov'], EG.node[q]['cov']
for z in EG[p][q]:
print EG[p][q][z]
logging.error("shouldn't have happened")
sys.exit(1)
# write to disk.
nx.write_gpickle(EG, paths.edge_file)
```
#### File: silp2/helpers/evaluate_bundles.py
```python
import sys
import os
import logging
import networkx as nx
import numpy as np
import helpers.io as io
import helpers.misc as misc
import helpers.graphs as gphs
### parameters ###
BGRAPH_FILE = sys.argv[1]
AGP_FILE = sys.argv[2]
INSERT_SIZE = int(sys.argv[3])
STD_DEV = int(sys.argv[4])
### functions ###
def find_adj(G):
'''uses DFS from every node to find all nodes within reach.'''
# make a new graph to track edges.
UG = nx.Graph()
# loop over each node.
for n in G.nodes():
# DFS from this node.
stack = [n]
visited = set()
while len(stack) > 0:
# peek at stack.
p = stack[-1]
# add edge to this.
if UG.has_edge(n, p) == False:
UG.add_edge(n, p)
# mark as visited.
visited.add(p)
# loop over neighbors.
hit = False
for q in G.neighbors(p):
# check if we visited this already.
if q not in visited:
# find shortest path.
path = nx.shortest_path(G, n, q)
size = 0
for z in path[1:-1]:
size += G.node[z]['width']
for i in range(0, len(path[1:-1])-1):
size += G[path[i]][path[i+1]]['gap']
# see if we can reach it via dist.
if size <= INSERT_SIZE + (3 * STD_DEV):
stack.append(q)
hit = True
# continue if a hit.
if hit == True:
continue
# pop it from stack.
stack.pop()
# return graph.
return UG
### script ###
# load the graphs.
BG = nx.read_gpickle(BGRAPH_FILE)
AG = gphs.agp_graph_undirected(AGP_FILE)
# remove large edges from AGP.
to_remove = list()
for p, q in AG.edges():
if AG[p][q]['gap'] > INSERT_SIZE + (3 * STD_DEV):
to_remove.append((p,q))
AG.remove_edges_from(to_remove)
# build transitive adjacencies.
UG = find_adj(AG)
# turn edges into sets.
BGset = set([tuple(sorted([p,q])) for p,q in BG.edges()])
AGset = set([tuple(sorted([p,q])) for p,q in AG.edges()])
UGset = set([tuple(sorted([p,q])) for p,q in UG.edges()])
# count adjacencies.
basic = len(AGset.intersection(BGset))
trans = len(UGset.intersection(BGset))
# report.
print "Actual Edges: %i" % len(AG.edges())
print "Chosen Edges: %i" % len(BG.edges())
print "Basic Matching: %i" % basic
print "Transitive Matching: %i" % trans
```
#### File: silp2/helpers/to_agp.py
```python
import sys
import os
import logging
import networkx as nx
import helpers.io as io
### definitions ###
### functions ###
def write_agp(order_file, agp_file):
''' translates results to agp '''
# load the oriented graph.
SG = nx.read_gpickle(order_file)
# ensure node degree is low.
deg_list = [len(SG.neighbors(x)) for x in SG.nodes()]
if max(deg_list) > 2:
logging.error("is not a path")
sys.exit(1)
# ensure its a DAG.
if nx.is_directed_acyclic_graph(SG) == False:
logging.error("not a DAG?")
sys.exit(1)
# save it to disk.
io.to_agp(SG, agp_file)
```
#### File: silp2/optimize/order.py
```python
import sys
import logging
import numpy as np
import cplex
import math
import networkx as nx
from cplex.exceptions import CplexSolverError
class OrderIlp(object):
'''
solves bi-partite matching using ILP
'''
def __init__(self, log_file, err_file):
'''
constructor
'''
# save file ptrs.
self._log_file = log_file
self._err_file = err_file
# clear logs.
tmp = open(self._log_file, "w")
tmp.close()
tmp = open(self._err_file, "w")
tmp.close()
# set loaded var.
self._loaded = False
self._solved = False
def load(self, matching_type, DG, card_val=False):
''' loads variables from flow graph '''
# sanity check.
if self._loaded == True:
logging.error("ILP already loaded.")
sys.exit(1)
# save reference ot graph.
self._graph = DG
# initiate cplex object.
self._cpx = cplex.Cplex()
# set log files.
self._cpx.set_log_stream(self._log_file)
self._cpx.set_results_stream(self._log_file)
self._cpx.set_warning_stream(self._err_file)
self._cpx.set_error_stream(self._err_file)
# prepare lookup structures.
self._var_defined = set()
# add Xij variables.
self._add_pair_vars()
# constrain paths.
self._constrain_paths()
# build the objective.
if matching_type == "weight":
self._obj_weight()
elif matching_type == "card":
self._obj_card()
elif matching_type == "mcmw":
self._constrain_card(card_val)
self._obj_weight()
# set loaded.
self._loaded = True
def solve(self, file_path=False):
''' runs the ilp on loaded info '''
# sanity check.
if self._loaded == False:
logging.error("ILP not loaded.")
sys.exit(1)
# sanity check.
if self._solved == True:
logging.error("shouldn't solve ILP twice.")
sys.exit(1)
# write ILP to file.
if file_path != False:
self._cpx.write(file_path, filetype="lp")
# call the solve code.
try:
# call the solve method.
self._cpx.solve()
# populate solution.
self._DG = self._populate_sol()
except CplexSolverError, e:
# if no solution found return empty sol and -1.
self._sol = None
logging.error("exception raised during solve: " + str(e))
sys.exit(1)
# set solved to true.
self._solved = True
# return solution.
#return self._sol, self._cpx.solution.get_objective_value()
return self._DG
def clear(self):
''' resets ILP completely '''
# sanity.
if self._cpx == None:
logging.error("ILP already deleted")
sys.exit(1)
# sanity.
if self._solved == False:
logging.error("ILP not solved")
sys.exit(1)
# remove cplex and other vars.
del self._cpx
del self._var_defined
self._cpx = None
# clear loaded.
self._loaded = False
self._solved = False
def _obj_weight(self):
''' sets objective '''
# loop over bundles.
for e in self._graph.edges():
# simplify.
idxa = e[0]
idxb = e[1]
# build vars.
Xij = "X#%s#%s" % (str(idxa), str(idxb))
# get weight.
Wij = self._graph[idxa][idxb]['weight']
# add to objective.
self._cpx.objective.set_linear(Xij, Wij)
# set objective type.
self._cpx.objective.set_sense(self._cpx.objective.sense.maximize)
def _obj_card(self):
''' sets objective '''
# loop over bundles.
for e in self._graph.edges():
# simplify.
idxa = e[0]
idxb = e[1]
# build vars.
Xij = "X#%s#%s" % (str(idxa), str(idxb))
# set simple weight.
self._cpx.objective.set_linear(Xij, 1)
# set objective type.
self._cpx.objective.set_sense(self._cpx.objective.sense.maximize)
def _constrain_paths(self):
''' ensures each variable has in/out degree at most 1'''
# loop over each node.
for p in self._graph.nodes():
# constrain in degree.
inds = list()
for q in self._graph.predecessors(p):
inds.append("X#%s#%s" % (str(q), str(p)))
vals = [1] * len(inds)
# build constraint.
c1 = cplex.SparsePair( ind = inds, val = vals )
# constrain out degree.
inds = list()
for q in self._graph.successors(p):
inds.append("X#%s#%s" % (str(p), str(q)))
vals = [1] * len(inds)
# build constraint.
c2 = cplex.SparsePair( ind = inds, val = vals )
# add them.
self._cpx.linear_constraints.add( \
lin_expr = [c1, c2],\
senses = ["L", "L"],\
rhs = [1, 1],\
names = ['pair', 'pair']\
)
def _constrain_card(self, card_val):
''' ensures paths have atleast a certain cardinality'''
# loop over bundles.
inds = list()
for e in self._graph.edges():
# simplify.
idxa = e[0]
idxb = e[1]
# build vars.
inds.append("X#%s#%s" % (str(idxa), str(idxb)))
# build constraint.
c = cplex.SparsePair( ind = inds, val = [1] * len(inds) )
# add it.
self._cpx.linear_constraints.add( \
lin_expr = [c],\
senses = ["E"],\
rhs = [card_val],\
names = ['card_val']\
)
def _add_pair_vars(self):
''' adds pair variables '''
# loop over bundles.
for e in self._graph.edges():
# simplify.
idxa = e[0]
idxb = e[1]
# build vars.
Xij = "X#%s#%s" % (str(idxa), str(idxb))
# add the variables.
self._cpx.variables.add( lb = [0], ub = [1], types = ["B"], names = [Xij] )
# populate lookup.
self._var_defined.add(Xij)
def _populate_sol(self):
''' populates solution object after running '''
# loop over bundles.
elist = list()
for e in self._graph.edges():
# simplify.
idxa = e[0]
idxb = e[1]
# build vars.
Xij = "X#%s#%s" % (str(idxa), str(idxb))
# get result.
val = int(self._cpx.solution.get_values(Xij))
# add to list if chosen.
if val == 1:
elist.append((idxa, idxb))
# create a new graph.
DG = nx.DiGraph()
for n in self._graph.nodes():
DG.add_node(n, self._graph.node[n])
for p, q in elist:
DG.add_edge(p, q, self._graph[p][q])
# return the new graph.
return DG
#self._sol = elist
``` |
{
"source": "jimboslicethat/python-katas",
"score": 4
} |
#### File: fizz_buzz/_4/fizz_buzz.py
```python
VALUE_FIZZ = "FIZZ"
VALUE_BUZZ = "BUZZ"
VALUE_FIZZBUZZ = f'{VALUE_FIZZ}{VALUE_BUZZ}'
def fizz_buzz(upper_bound: int) -> []:
_fizz_buzz = []
numbers = get_fizz_buzz_numbers(upper_bound)
for number in numbers:
value = number
if _is_fizz_buzz(number):
value = VALUE_FIZZBUZZ
elif _is_fizz(number):
value = VALUE_FIZZ
elif _is_buzz(number):
value = VALUE_BUZZ
_fizz_buzz.append(value)
return _fizz_buzz
def get_fizz_buzz_numbers(upper_bound: int) -> []:
loop_upper_bound = upper_bound + 1
numbers = range(1, loop_upper_bound)
return numbers
def _is_fizz_buzz(number: int) -> bool:
return _is_fizz(number) and _is_buzz(number)
def _is_fizz(number: int) -> bool:
return number % 3 == 0
def _is_buzz(number: int) -> bool:
return number % 5 == 0
```
#### File: python-katas/sandbox/csv_to_mssql_insert.py
```python
import csv
import sys
"""
Given a standard mssql exported query results csv file,
Write a function that turns each exported record into an insert statement
"""
def create_insert_script(csv_file_path, table_name, database_name, *, add_audit_information=True):
with open(csv_file_path, "r", encoding="utf-8") as csv_file:
statements = []
csv_file_reader = csv.reader(csv_file)
row_headers = next(csv_file_reader) # first row assumed to be column headers
if add_audit_information:
row_headers.extend(['createdAt', 'updatedAt']) # specific to my use case, can be removed
sql = f'INSERT INTO {database_name}.dbo.{table_name}\n' \
f' ({", ".join(row_headers)})\n' \
f'VALUES ('
for row in csv_file_reader:
column_count = len(row)
values = ''
for index, column in enumerate(row):
try:
values += f'{int(column)}'
except Exception:
if column == "NULL":
values += column
else:
column = column.replace("'", "''")
values += f"'{column}'"
if index + 1 < column_count:
values += ', '
if add_audit_information:
values += ', CURRENT_TIMESTAMP, CURRENT_TIMESTAMP'
values += ')'
statements.append(f'{sql}{values}')
return statements
statements = create_insert_script(
sys.argv[1] if len(sys.argv) > 2 else './project_sample.csv',
sys.argv[2] if len(sys.argv) > 3 else 'Projects',
sys.argv[3] if len(sys.argv) > 4 else 'tempdb')
print("--- SQL INSERT SCRIPT --")
print("SET IDENTITY_INSERT Projects ON")
print("GO")
[print(statement) for statement in statements]
print("GO")
``` |
{
"source": "jimbozhang/GigaSpeech",
"score": 3
} |
#### File: GigaSpeech/utils/gigaspeech_scoring.py
```python
import os
import argparse
conversational_filler = ['UH', 'UHH', 'UM', 'EH', 'MM', 'HM', 'AH', 'HUH', 'HA', 'ER', 'OOF', 'HEE' , 'ACH', 'EEE', 'EW']
unk_tags = ['<UNK>', '<unk>']
gigaspeech_punctuations = ['<COMMA>', '<PERIOD>', '<QUESTIONMARK>', '<EXCLAMATIONPOINT>']
gigaspeech_garbage_utterance_tags = ['<SIL>', '<NOISE>', '<MUSIC>', '<OTHER>']
non_scoring_words = conversational_filler + unk_tags + gigaspeech_punctuations + gigaspeech_garbage_utterance_tags
def asr_text_post_processing(text):
# 1. convert to uppercase
text = text.upper()
# 2. remove hyphen
# "E-COMMERCE" -> "E COMMERCE", "STATE-OF-THE-ART" -> "STATE OF THE ART"
text = text.replace('-', ' ')
# 3. remove non-scoring words from evaluation
remaining_words = []
for word in text.split():
if word in non_scoring_words:
continue
remaining_words.append(word)
return ' '.join(remaining_words)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="This script evaluates GigaSpeech ASR result via SCTK's tool sclite")
parser.add_argument('ref', type=str, help="sclite's standard transcription(trn) reference file")
parser.add_argument('hyp', type=str, help="sclite's standard transcription(trn) hypothesis file")
parser.add_argument('work_dir', type=str, help='working dir')
args = parser.parse_args()
if not os.path.isdir(args.work_dir):
os.mkdir(args.work_dir)
REF = os.path.join(args.work_dir, 'REF')
HYP = os.path.join(args.work_dir, 'HYP')
RESULT = os.path.join(args.work_dir, 'RESULT')
for io in [(args.ref, REF), (args.hyp, HYP)]:
with open(io[0], 'r', encoding='utf8') as fi, open(io[1], 'w+', encoding='utf8') as fo:
for line in fi:
line = line.strip()
if line:
cols = line.split()
text = asr_text_post_processing(' '.join(cols[0:-1]))
uttid_field = cols[-1]
print(F'{text} {uttid_field}', file=fo)
os.system(F'sclite -r {REF} trn -h {HYP} trn -i swb | tee {RESULT}') # GigaSpeech's uttid comforms to swb
```
#### File: GigaSpeech/utils/opus_to_wav.py
```python
import os
import argparse
import re
def get_args():
parser = argparse.ArgumentParser(description="""
This script is used to convert opus file into wav file.""")
parser.add_argument('--remove-opus', action='store_true', default='False',
help="""If true, remove opus files""")
parser.add_argument('opus_scp', help="""Input opus scp file""")
args = parser.parse_args()
return args
def convert_opus2wav(opus_scp, rm_opus):
with open(opus_scp, 'r') as oscp:
for line in oscp:
line = line.strip()
utt, opus_path = re.split('\s+', line)
wav_path = opus_path.replace('.opus', '.wav')
cmd = f'ffmpeg -y -i {opus_path} -ac 1 -ar 16000 {wav_path}'
try:
os.system(cmd)
except:
sys.exit(f'Failed to run the cmd: {cmd}')
if rm_opus is True:
os.remove(opus_path)
def main():
args = get_args()
convert_opus2wav(args.opus_scp, args.remove_opus)
if __name__ == '__main__':
main()
``` |
{
"source": "jimbozhang/lhotse",
"score": 2
} |
#### File: modes/recipes/gigaspeech.py
```python
import logging
from typing import List
import click
from lhotse.bin.modes import download, prepare
from lhotse.recipes.gigaspeech import GIGASPEECH_PARTS, download_gigaspeech, prepare_gigaspeech
from lhotse.utils import Pathlike
@prepare.command(context_settings=dict(show_default=True))
@click.argument('corpus_dir', type=click.Path(exists=True, dir_okay=True))
@click.argument('output_dir', type=click.Path())
@click.option('--subset', type=click.Choice(('auto',) + GIGASPEECH_PARTS), multiple=True,
default=['auto'], help='Which parts of Gigaspeech to download (by default XL + DEV + TEST).')
@click.option('-j', '--num-jobs', type=int, default=1,
help='How many threads to use (can give good speed-ups with slow disks).')
def gigaspeech(
corpus_dir: Pathlike,
output_dir: Pathlike,
subset: List[str],
num_jobs: int
):
"""Gigaspeech ASR data preparation."""
logging.basicConfig(level=logging.INFO)
if 'auto' in subset:
subset = 'auto'
prepare_gigaspeech(corpus_dir, output_dir=output_dir, dataset_parts=subset, num_jobs=num_jobs)
@download.command(context_settings=dict(show_default=True))
@click.argument('password', type=str)
@click.argument('target_dir', type=click.Path())
@click.option('--subset', type=click.Choice(('auto',) + GIGASPEECH_PARTS), multiple=True,
default=['auto'], help='Which parts of Gigaspeech to download (by default XL + DEV + TEST).')
@click.option('--host', type=str, default='tsinghua', help='Which host to download Gigaspeech.')
def gigaspeech(
password: str,
target_dir: Pathlike,
subset: List[str],
host: str
):
"""Gigaspeech download."""
# Convert (likely one-element) list with "auto" into a string.
logging.basicConfig(level=logging.INFO)
if 'auto' in subset:
subset = 'auto'
download_gigaspeech(password, target_dir, dataset_parts=subset, host=host)
```
#### File: test/dataset/test_cut_transforms.py
```python
import random
from math import isclose
import pytest
from lhotse import CutSet
from lhotse.cut import MixedCut
from lhotse.dataset import CutMix, ExtraPadding
from lhotse.dataset import PerturbSpeed, PerturbVolume
from lhotse.testing.dummies import DummyManifest
def test_perturb_speed():
tfnm = PerturbSpeed(factors=[0.9, 1.1], p=0.5, randgen=random.Random(42))
cuts = DummyManifest(CutSet, begin_id=0, end_id=10)
cuts_sp = tfnm(cuts)
assert all(
# The duration will not be exactly 0.9 and 1.1 because perturb speed
# will round to a physically-viable duration based on the sampling_rate
# (i.e. round to the nearest sample count).
any(isclose(cut.duration, v, abs_tol=0.0125) for v in [0.9, 1.0, 1.1])
for cut in cuts_sp
)
def test_perturb_volume():
tfnm = PerturbVolume(factors=[0.125, 2.], p=0.5, randgen=random.Random(42))
cuts = DummyManifest(CutSet, begin_id=0, end_id=10)
cuts_vp = tfnm(cuts)
assert all(
cut.duration == 1. and
cut.start == 0. and
cut.recording.sampling_rate == 16000 and
cut.recording.num_samples == 16000 and
cut.recording.duration == 1.0 for cut in cuts_vp
)
def test_cutmix():
speech_cuts = DummyManifest(CutSet, begin_id=0, end_id=10)
for c in speech_cuts:
c.duration = 10.0
noise_cuts = DummyManifest(CutSet, begin_id=100, end_id=102)
for c in noise_cuts:
c.duration = 1.5
tfnm = CutMix(noise_cuts, snr=None, prob=1.0)
tfnm_cuts = tfnm(speech_cuts)
for c in tfnm_cuts:
assert isinstance(c, MixedCut)
assert c.tracks[0].cut.duration == 10.0
assert sum(t.cut.duration for t in c.tracks[1:]) == 10.0
@pytest.mark.parametrize('randomized', [False, True])
def test_extra_padding_frames(randomized):
cuts = DummyManifest(CutSet, begin_id=0, end_id=10)
transform = ExtraPadding(
extra_frames=4,
randomized=randomized
)
padded_cuts = transform(cuts)
# Non-randomized test -- check that all cuts are processed
# in the same way.
if not randomized:
for cut, padded in zip(cuts, padded_cuts):
# first track is for padding
assert padded.tracks[0].cut.num_frames == 2
# second track is for padding
assert padded.tracks[-1].cut.num_frames == 2
# total num frames is OK
assert padded.num_frames == cut.num_frames + 4
# Randomized test -- check that cuts have different properties.
if randomized:
nums_frames = [c.num_frames for c in padded_cuts]
assert len(set(nums_frames)) > 1
@pytest.mark.parametrize('randomized', [False, True])
def test_extra_padding_samples(randomized):
cuts = DummyManifest(CutSet, begin_id=0, end_id=10)
transform = ExtraPadding(
extra_samples=320,
randomized=randomized
)
padded_cuts = transform(cuts)
# Non-randomized test -- check that all cuts are processed
# in the same way.
if not randomized:
for cut, padded in zip(cuts, padded_cuts):
# first track is for padding
assert padded.tracks[0].cut.num_samples == 160
# second track is for padding
assert padded.tracks[-1].cut.num_samples == 160
# total num frames is OK
assert padded.num_samples == cut.num_samples + 320
# Randomized test -- check that cuts have different properties.
if randomized:
nums_samples = [c.num_samples for c in padded_cuts]
assert len(set(nums_samples)) > 1
@pytest.mark.parametrize('randomized', [False, True])
def test_extra_padding_seconds(randomized):
cuts = DummyManifest(CutSet, begin_id=0, end_id=10)
transform = ExtraPadding(
extra_seconds=0.04,
randomized=randomized
)
padded_cuts = transform(cuts)
# Non-randomized test -- check that all cuts are processed
# in the same way.
if not randomized:
for cut, padded in zip(cuts, padded_cuts):
# first track is for padding
assert padded.tracks[0].cut.duration == 0.02
# second track is for padding
assert padded.tracks[-1].cut.duration == 0.02
# total num frames is OK
assert isclose(padded.duration, cut.duration + 0.04)
# Randomized test -- check that cuts have different properties.
if randomized:
durations = [c.duration for c in padded_cuts]
assert len(set(durations)) > 1
```
#### File: test/known_issues/test_augment_with_executor.py
```python
import multiprocessing
import sys
from concurrent.futures import ProcessPoolExecutor, ThreadPoolExecutor
from functools import partial
from tempfile import TemporaryDirectory
import pytest
from lhotse import CutSet, Fbank
from lhotse.testing.fixtures import RandomCutTestCase
torchaudio = pytest.importorskip('torchaudio', minversion='0.7.1')
class TestAugmentationWithExecutor(RandomCutTestCase):
@pytest.mark.parametrize(
'exec_type',
[
# Multithreading works
ThreadPoolExecutor,
# Multiprocessing works, but only when using the "spawn" context (in testing)
pytest.param(
partial(ProcessPoolExecutor, mp_context=multiprocessing.get_context("spawn")),
marks=pytest.mark.skipif(
sys.version_info[0] == 3 and sys.version_info[1] < 7,
reason="The mp_context argument is introduced in Python 3.7"
)
),
]
)
def test_wav_augment_with_executor(self, exec_type):
cut = self.with_cut(sampling_rate=16000, num_samples=16000)
with TemporaryDirectory() as d, \
exec_type(max_workers=4) as ex:
cut_set_speed = CutSet.from_cuts(
cut.with_id(str(i)) for i in range(100)
).perturb_speed(1.1) # perturb_speed uses torchaudio SoX effect that could hang
# Just test that it runs and does not hang.
cut_set_speed_feats = cut_set_speed.compute_and_store_features(
extractor=Fbank(),
storage_path=d,
executor=ex
)
@pytest.mark.parametrize(
'exec_type',
[
# Multithreading works
ThreadPoolExecutor,
# Multiprocessing works, but only when using the "spawn" context (in testing)
pytest.param(
partial(ProcessPoolExecutor, mp_context=multiprocessing.get_context("spawn")),
marks=pytest.mark.skipif(
sys.version_info[0] == 3 and sys.version_info[1] < 7,
reason="The mp_context argument is introduced in Python 3.7"
)
),
]
)
def test_wav_augment_with_executor(self, exec_type):
cut = self.with_cut(sampling_rate=16000, num_samples=16000)
with TemporaryDirectory() as d, \
exec_type(max_workers=4) as ex:
cut_set_volume = CutSet.from_cuts(
cut.with_id(str(i)) for i in range(100)
).perturb_volume(0.125) # perturb_volume uses torchaudio SoX effect that could hang
# Just test that it runs and does not hang.
cut_set_volume_feats = cut_set_volume.compute_and_store_features(
extractor=Fbank(),
storage_path=d,
executor=ex
)
``` |
{
"source": "jimbozhang/PySpeechColab",
"score": 2
} |
#### File: PySpeechColab/test/test_common.py
```python
def test_always_pass():
assert True
def test_imports():
import speechcolab
assert speechcolab
from speechcolab import datasets
assert datasets
from speechcolab.datasets import gigaspeech
assert gigaspeech
```
#### File: test/utils/test_download.py
```python
from tempfile import NamedTemporaryFile
from speechcolab.utils import download
def test_download_from_http_to_buffer():
url = 'https://raw.githubusercontent.com/SpeechColab/PySpeechColab/main/LICENSE'
with NamedTemporaryFile(delete=True) as f:
download.download_from_http(f.name, url)
data = f.read().decode()
assert data.strip().startswith('Apache License')
def test_download_from_ftp_to_buffer():
host, username, password = '<PASSWORD>', '<PASSWORD>', 'password'
remote_path = '/pub/example/readme.txt'
with NamedTemporaryFile(delete=True) as f:
download.download_from_ftp(f.name, host, remote_path, username, password)
data = f.read().decode()
assert data.startswith('Welcome')
``` |
{
"source": "jimbrayrcp/snake_game",
"score": 4
} |
#### File: snake_game/snake_game/scoreboard.py
```python
from turtle import Turtle
FONT = ("courier", 22, "normal")
ALIGN = "center"
class Scoreboard(Turtle):
def __init__(self):
super(Scoreboard, self).__init__()
self.penup()
self.color("white")
self.setposition(0, 270)
self.hideturtle()
self.score = 0
self.update_scoreboard()
def update_scoreboard(self):
text = f"SCORE: {self.score}"
self.write(text, move=False, align=ALIGN, font=FONT)
def game_over(self):
text = f"GAME OVER"
self.setposition(0, 0)
self.write(text, move=False, align=ALIGN, font=FONT)
def add_to_score(self):
self.score += 1
self.clear()
self.update_scoreboard()
if __name__ == "__main__":
from turtle import Screen
from time import sleep
screen = Screen()
screen.setup(height=600, width=600)
screen.bgcolor("black")
screen.tracer(0)
score = Scoreboard()
sleep(3)
score.add_to_score()
screen.exitonclick()
``` |
{
"source": "jimbrayrcp/US_States_Game",
"score": 3
} |
#### File: US_States_Game/States_Memory_Game/data_worker.py
```python
import pandas
import os
import platform
import subprocess
data_dictionary = {
"state": [],
"was_correct": [],
"total_correct": []
}
class States:
def __init__(self):
self.compare = False
self.guess = ""
self.answers_given = 0
self.answers_correct = 0
def check_state(self, guess):
self.guess = guess.title()
df = pandas.read_csv("50_states.csv")
st = df.state
value = df[st == self.guess]
self.found_result(value)
self.append_answers_data()
try:
state = value.state.to_string(index=False)
# state = value.state.item()
x_state = int(value.x)
y_state = int(value.y)
return x_state, y_state, state
except KeyError as ke:
print(f"error {ke}")
except TypeError as te:
print(f"TYPE: {te}")
def found_result(self, value):
self.answers_given += 1
df = pandas.read_csv("../correct_answers.csv")
found = df[df['state'].str.contains(self.guess).to_list()]
if len(value) and not found.empty:
if not found.count()[0]:
self.compare = True
self.answers_correct = self.last_score()
self.answers_correct += 1
else:
self.compare = False
self.answers_correct = self.last_score()
elif len(value) and found.empty:
self.compare = True
self.answers_correct = self.last_score()
self.answers_correct += 1
else:
self.compare = False
@staticmethod
def last_score():
df = pandas.read_csv("../correct_answers.csv")
if df.total_correct.notnull().any():
row_max_score = df.total_correct.max()
else:
row_max_score = 0
return row_max_score
def append_answers_data(self):
data_to_save = {
"state": [self.guess],
"was_correct": [self.compare],
"total_correct": [self.answers_correct]
}
df = pandas.DataFrame(data_to_save)
df.to_csv('correct_answers.csv', mode='a', header=False, sep=",")
@staticmethod
def new_game_memory():
new_data = pandas.DataFrame(data_dictionary)
new_data.to_csv("correct_answers.csv")
@staticmethod
def states_to_learn():
df1 = pandas.read_csv("50_states.csv")
df2 = pandas.read_csv("../correct_answers.csv")
col1 = df1.state.to_list()
col2 = df2.state.to_list()
set1 = set(col1)
set2 = set(col2)
missing = list(sorted(set1 - set2))
data_to_save = {
"state": missing
}
df = pandas.DataFrame(data_to_save)
df.to_csv('states_to_learn.csv')
@staticmethod
def open_states_to_learn():
filepath = "../states_to_learn.csv"
if platform.system() == 'Darwin':
subprocess.call(('open', filepath))
elif platform.system() == 'Windows':
os.startfile(filepath)
else:
subprocess.call(('xdg-open', filepath))
if __name__ == "__main__":
states = States()
# states.new_game_memory()
# answer = "tennessee"
# state_returned = states.check_state(answer)
# if state_returned:
# print(f"RETURNED: {state_returned[0]} {state_returned[1]} {state_returned[2]}")
# states.create_answers_data()
# states.append_answers_data()
# score = states.last_score()
# print(f"SCORE RESULT {score}")
states.states_to_learn()
``` |
{
"source": "jimbrig/fx",
"score": 2
} |
#### File: python/assets/app.py
```python
from fx import fx
from flask import Flask, request, jsonify
app = Flask(__name__)
@app.route('/', methods=['POST', 'GET'])
def handle():
return fx(request)
``` |
{
"source": "jimbro1000/cas2bas",
"score": 3
} |
#### File: cas2bas/bas2cas/Main.py
```python
import sys
import formats.Cas_Format
from formats.Tokeniser_Factory import find_tokeniser
from formats.Utility import find_verbosity, find_base_load_address, \
find_header_length
LOAD_ADDRESS = 0x1E00
HEADER_LENGTH = 128
def usage():
print("Dragon ASCII BASIC listing to CAS format")
print("Version 2.0.0")
print("Usage:")
print(
"bas2cas [input_filename] [output_filename] [cassette_filename] ["
"options] ")
print("Options:")
print(" -dd --dragondos : use DragonDos extended BASIC")
print(" -cc --coco : use Coco BASIC")
print(" -rd --rsdos : use Coco Rsdos extended BASIC")
print(" -b2 --basic2 : use Trs80 Basic II")
print("If none of the token options are given, Dragon tokens are used")
print(" -s --silent : suppress all console output")
print(" -q --quiet : only show errors in console")
print(" -v --verbose : show all messages")
print("Default messages are informational only")
print(" -b --base xxxx : set base load address (default is 0x1e00)")
print(" -h --header xx : set header run length")
class Main(object):
def __init__(self):
self.result = ""
self.mode = 0
self.verbose = 1
def run(self) -> object:
# Process parameters
if len(sys.argv) < 4:
usage()
return
filename = sys.argv[1]
output = sys.argv[2]
coded_filename = sys.argv[3]
tokeniser = find_tokeniser(sys.argv[4:])
self.verbosity = find_verbosity(sys.argv[4:])
load_address = find_base_load_address(sys.argv[4:], LOAD_ADDRESS)
header_length = find_header_length(sys.argv[4:], HEADER_LENGTH)
# Read file
with open(filename, "rb") as sourceFile:
file_data = sourceFile.read().decode()
self.report(1, f"Located program {filename}")
result, token_stream = tokeniser.parse_program(file_data, load_address)
if result == 0:
self.report(1, "file successfully encoded")
formatter = formats.Cas_Format.CasFormat(
[], tokeniser, self.verbosity
)
output_data = formatter.build_file(coded_filename, token_stream,
header_length)
with open(output, "wb") as f:
f.write(output_data)
self.report(1, f"cas file written as {output}")
else:
self.report(2, "file processing failed")
def report(self, level, message):
if level >= self.verbosity:
print(message)
def main():
app = Main()
app.run()
if __name__ == "__main__":
main()
```
#### File: cas2bas/formats/Cas_Format.py
```python
import sys
from formats.Block_Builder import FileBlock
PENDING = 0
EXPECTING_LINE_ADDRESS_HIGH = 1
EXPECTING_LINE_ADDRESS_LOW = 2
EXPECTING_LINE_NUMBER_HIGH = 3
EXPECTING_LINE_NUMBER_LOW = 4
LINE_DATA = 5
FAILED = -1
LEADER = 0x55
SYNC = 0x3C
NAME_FILE_BLOCK = 0x00
DATA_BLOCK = 0x01
END_OF_FILE_BLOCK = 0xFF
BASIC_FILE_IDENTIFIER = 0x00
DATA_FILE_IDENTIFIER = 0x01
BINARY_FILE_IDENTIFIER = 0x02
ASCII_FILE_FLAG = 0xFF
BINARY_FILE_FLAG = 0x00
CONTINUOUS_FILE = 0x00
DRAGON32_LEADER_SIZE = 128
DRAGON64_LEADER_SIZE = 256
DEFAULT_LEADER_SIZE = DRAGON32_LEADER_SIZE
FILENAME_LENGTH = 8
EOL = '\n'
class CasFormat(object):
"""Processes a file stream of byte data according to the CAS format for
BASIC source code."""
def __init__(self, file_data, tokeniser, verbosity):
self.state = PENDING
self.tokeniser = tokeniser
self.data = file_data
self.state = -1
self.byte_index = 0
self.file_name = ""
self.current_line = ""
self.listing = []
self.line_number = 0
self.next_line = 0
self.exec_address = 0
self.load_address = 0
self.verbosity = verbosity
self.leader_length = DEFAULT_LEADER_SIZE
def next_byte(self):
"""Provides the next byte from the loaded byte array.
This is a forward only operation."""
if self.byte_index < len(self.data):
value = self.data[self.byte_index]
self.byte_index += 1
return value
else:
self.report(2, "file length exceeded (" + str(self.byte_index) +
" of " + str(len(self.data)) + ")")
sys.exit(-1)
def process_header(self):
"""Processes the file header to verify file type and find file
data start point."""
head = self.next_byte()
leader_length = 0
while head == LEADER:
leader_length += 1
head = self.next_byte()
if head != SYNC:
self.report(2,
"unknown file type, invalid sync byte: " + str(head))
return -1
head = self.next_byte()
if head != NAME_FILE_BLOCK:
self.report(2, "illegal file type")
return -1
self.next_byte()
# header length - don't need it
# self.next_byte()
# this byte is unidentified
head = self.next_byte()
name_index = 0
while name_index < 8:
if head != 0x20:
self.file_name += chr(head)
head = self.next_byte()
name_index += 1
# file id byte
if head != BASIC_FILE_IDENTIFIER:
self.report(2, "not a basic listing")
return -1
head = self.next_byte()
# ascii flag
if head != ASCII_FILE_FLAG and head != BINARY_FILE_FLAG:
self.report(2, "not a valid byte format - must be ascii or binary")
return -1
head = self.next_byte()
# gap flag
if head != CONTINUOUS_FILE:
self.report(2, "not a continuous file")
return -1
head = self.next_byte()
# exec address
self.exec_address = head
head = self.next_byte()
self.exec_address = self.exec_address * 256 + head
head = self.next_byte()
# load address
self.load_address = head
head = self.next_byte()
self.load_address = self.load_address * 256 + head
self.next_byte()
# this byte is the checksum of the block (bytes+type+length)
self.state = EXPECTING_LINE_ADDRESS_HIGH
return 0
def process_file(self):
"""Processes the file body to extract the token stream.
File is in blocks so operates as a block iterator with
the content being processed in a slim state machine."""
head = self.next_byte()
while head == LEADER:
head = self.next_byte()
if head != SYNC:
self.report(2,
"unknown file type, invalid sync byte: " + str(head))
return -1
head = self.next_byte()
while head == DATA_BLOCK:
head = self.next_byte()
length = head
head = self.next_byte()
while length > 0:
length -= 1
self.build_listing(head)
head = self.next_byte()
# skip checksum byte
head = self.next_byte()
# process two leaders
if head != LEADER:
self.report(2, "invalid block leader")
return -1
head = self.next_byte()
if head != LEADER:
self.report(2, "invalid block leader")
return -1
head = self.next_byte()
if head != SYNC:
self.report(2, "unknown file type")
return -1
head = self.next_byte()
if head != END_OF_FILE_BLOCK:
self.report(2, "invalid end of file block")
return -1
self.state = 100
return self.generate_final_listing()
def build_listing(self, next_byte):
"""Turns block contents into a string formatted, de-tokenised list"""
def next_line_high():
self.next_line = next_byte * 256
self.state = EXPECTING_LINE_ADDRESS_LOW
def next_line_low():
self.next_line += next_byte
self.state = EXPECTING_LINE_NUMBER_HIGH
def line_number_high():
self.line_number = next_byte * 256
self.state = EXPECTING_LINE_NUMBER_LOW
def line_number_low():
self.line_number += next_byte
self.current_line = str(self.line_number) + " "
self.state = LINE_DATA
def process_byte():
if next_byte == 0:
self.current_line += EOL
self.listing.append(self.current_line)
self.current_line = ""
self.state = EXPECTING_LINE_ADDRESS_HIGH
else:
self.current_line += self.tokeniser.convert(next_byte)
if self.state == EXPECTING_LINE_ADDRESS_HIGH:
next_line_high()
elif self.state == EXPECTING_LINE_ADDRESS_LOW:
next_line_low()
elif self.state == EXPECTING_LINE_NUMBER_HIGH:
line_number_high()
elif self.state == EXPECTING_LINE_NUMBER_LOW:
line_number_low()
elif self.state == LINE_DATA:
process_byte()
def generate_final_listing(self):
"""Turns the list of lines into a single string."""
result = ""
return result.join(self.listing)
def report(self, level, message):
if level >= self.verbosity:
print(message)
def build_header(self, filename):
result = []
filename_list = list(filename)
del filename_list[FILENAME_LENGTH:]
while len(filename_list) < FILENAME_LENGTH:
filename_list.append(" ")
for x in range(self.leader_length):
result.append(LEADER)
block = FileBlock(NAME_FILE_BLOCK)
for x in range(FILENAME_LENGTH):
block.append(ord(filename_list[x]))
block.append(BASIC_FILE_IDENTIFIER)
block.append(BINARY_FILE_FLAG)
block.append(CONTINUOUS_FILE)
block.append(0)
block.append(0)
block.append(0x7c)
block.append(0xaf)
result += block.seal_block()
for x in range(self.leader_length):
result.append(LEADER)
return result
def build_file(self, filename, data, header_length=DEFAULT_LEADER_SIZE):
self.leader_length = header_length
result = self.build_header(filename)
loop = len(data) > 0
block = FileBlock(DATA_BLOCK)
while loop:
if block.capacity() == 0:
result += block.seal_block()
result.append(LEADER)
result.append(LEADER)
block = FileBlock(DATA_BLOCK)
block.append(data.pop(0))
loop = len(data) > 0
result += block.seal_block()
result.append(LEADER)
result.append(LEADER)
block = FileBlock(END_OF_FILE_BLOCK)
result += block.seal_block()
result.append(LEADER)
return bytearray(result)
```
#### File: cas2bas/formats/Coco_Tokens.py
```python
from formats.Dragon_Tokens import DragonToken
from formats.Utility import invert_dictionary
KEYWORD = 0
FUNCTION = 1
MAXIMUM_KEYWORD = 0xcd
MAXIMUM_FUNCTION = 0xa1
class CoCoToken(DragonToken):
"""Converts byte codes into tokens, or more accurately detokenises
a byte stream one byte at a time."""
keyword_token_dictionary = {
0x80: "FOR",
0x81: "GO",
0x82: "REM",
0x83: "'",
0x84: "ELSE",
0x85: "IF",
0x86: "DATA",
0x87: "PRINT",
0x88: "ON",
0x89: "INPUT",
0x8a: "END",
0x8b: "NEXT",
0x8c: "DIM",
0x8d: "READ",
0x8e: "RUN",
0x8f: "RESTORE",
0x90: "RETURN",
0x91: "STOP",
0x92: "POKE",
0x93: "CONT",
0x94: "LIST",
0x95: "CLEAR",
0x96: "NEW",
0x97: "CLOAD",
0x98: "CSAVE",
0x99: "OPEN",
0x9a: "CLOSE",
0x9b: "LLIST",
0x9c: "SET",
0x9d: "RESET",
0x9e: "CLS",
0x9f: "MOTOR",
0xa0: "SOUND",
0xa1: "AUDIO",
0xa2: "EXEC",
0xa3: "SKIPF",
0xa4: "TAB(",
0xa5: "TO",
0xa6: "SUB",
0xa7: "THEN",
0xa8: "NOT",
0xa9: "STEP",
0xaa: "OFF",
0xab: "+",
0xac: "-",
0xad: "*",
0xae: "/",
0xaf: "^",
0xb0: "AND",
0xb1: "OR",
0xb2: ">",
0xb3: "=",
0xb4: "<",
0xb5: "DEL",
0xb6: "EDIT",
0xb7: "TRON",
0xb8: "TROFF",
0xb9: "DEF",
0xbb: "LINE",
0xbc: "PCLS",
0xbd: "PSET",
0xbe: "PRESET",
0xbf: "SCREEN",
0xc0: "PCLEAR",
0xc1: "COLOR",
0xc2: "CIRCLE",
0xc3: "PAINT",
0xc4: "GET",
0xc5: "PUT",
0xc6: "DRAW",
0xc7: "PCOPY",
0xc8: "PMODE",
0xc9: "PLAY",
0xca: "DLOAD",
0xcb: "RENUM",
0xcc: "FN",
0xcd: "USING"
}
function_token_dictionary = {
0x80: "SGN",
0x81: "INT",
0x82: "ABS",
0x83: "USR",
0x84: "RND",
0x85: "SIN",
0x86: "PEEK",
0x87: "LEN",
0x88: "STR$",
0x89: "VAL",
0x8a: "ASC",
0x8b: "CHR$",
0x8c: "EOF",
0x8d: "JOYSTK",
0x8e: "LEFT$",
0x8f: "RIGHT$",
0x90: "MID$",
0x91: "POINT",
0x92: "INKEY$",
0x93: "MEM",
0x94: "ATN",
0x95: "COS",
0x96: "TAN",
0x97: "EXP",
0x98: "FIX",
0x99: "LOG",
0x9a: "POS",
0x9b: "SQR",
0x9c: "HEX$",
0x9d: "VARPTR",
0x9e: "INSTR",
0x9f: "TIMER",
0xa0: "PPOINT",
0xa1: "STRING$"
}
def __init__(self):
super().__init__()
self.max_keyword = MAXIMUM_KEYWORD
self.max_function = MAXIMUM_FUNCTION
self.name = "Coco Tokens"
self.keyword_dictionary = invert_dictionary(
self.keyword_token_dictionary)
self.function_dictionary = invert_dictionary(
self.function_token_dictionary)
```
#### File: cas2bas/formats/Empty_Tokens.py
```python
from formats.Utility import invert_dictionary
KEYWORD = 0
FUNCTION = 1
MAXIMUM_KEYWORD = 0x80
MAXIMUM_FUNCTION = 0x80
MAXIMUM_TOKEN_LENGTH = 7
FUNCTION_OFFSET = 0xff00
EXPECTING_LINE_NUMBER = 1
EXPECTING_INITIAL_WHITE_SPACE = 2
EXPECTING_TOKEN = 3
EXPECTING_LITERAL_OR_WHITE_SPACE = 4
EXPECTING_STRING_LITERAL = 5
EXPECTING_LITERAL_TO_EOL = 6
CLOSE_LINE = 7
MATCH_NEXT_SINGLE_CHARACTER = 5
MATCH_NUMERIC_CHARACTER = 4
MATCH_RESERVED_CHARACTER = 3
MATCH_TOKEN = 2
NO_MATCH = 1
TAB = "\t"
EOL = "\n"
CR = "\r"
SPACE = " "
STRING_DELIMITER = '"'
COLON = ":"
SEMICOLON = ";"
COMMA = ","
STRING_IDENTIFIER = "$"
OPEN_BRACKET = "("
CLOSE_BRACKET = ")"
class EmptyToken(object):
keyword_token_dictionary = {}
function_token_dictionary = {}
reserved_literals = [
SPACE,
STRING_DELIMITER,
COLON,
CR,
EOL,
SEMICOLON,
COMMA
]
def __init__(self):
self.state = KEYWORD
self.max_keyword = MAXIMUM_KEYWORD
self.max_function = MAXIMUM_FUNCTION
self.name = "Empty tokens"
self.keyword_dictionary = invert_dictionary(
self.keyword_token_dictionary)
self.function_dictionary = invert_dictionary(
self.function_token_dictionary)
def convert(self, byte):
"""Translates a byte to a string. Ascii characters are literal,
values over 127 are tokens or token sequences.
Not all token values are valid."""
if byte < 128:
return chr(byte)
if self.state == FUNCTION:
if byte <= self.max_function:
function = self.function_token_dictionary.get(byte)
self.state = KEYWORD
return function
else:
return "invalid function token"
if byte == 255:
if self.max_function > 0:
self.state = FUNCTION
return ""
else:
return "invalid extension token"
if byte <= self.max_keyword:
return self.keyword_token_dictionary.get(byte)
else:
return "invalid keyword token"
def match(self, sample):
valid = False
token = sample
if self.keyword_dictionary.get(sample) is not None:
valid = True
token = self.keyword_dictionary.get(sample)
if not valid and self.function_dictionary.get(sample) is not None:
valid = True
token = FUNCTION_OFFSET + self.function_dictionary.get(sample)
return valid, token
def is_reserved(self, sample):
result = False
reserved = None
for item in self.reserved_literals:
if item == sample:
result = True
reserved = item
return result, reserved
@staticmethod
def is_numeric(sample):
return "0" <= sample <= "9"
@staticmethod
def word_to_bytes(word):
msb = (word & 0xff00) >> 8
lsb = word & 0xff
return msb, lsb
def parse_line(self, plain):
""" parse_line "assumes" that the plain input is a correctly
constructed basic program statement """
def process_line(char, line_number):
if self.is_numeric(char):
line_number += char
return 1, line_number
return 0, line_number
def process_white_space(char):
if char == SPACE or char == TAB:
return 1
else:
return 0
def append_to_stream(value, stream):
if type(value) == str:
value = ord(value)
if value > 0xff:
msb, lsb = self.word_to_bytes(value)
stream.append(msb)
stream.append(lsb)
else:
stream.append(value)
return stream
def build_token(char, sample):
is_reserved = False
numeric = self.is_numeric(char)
any_reserved, not_used = self.is_reserved(char)
sample += char
if sample == char:
is_reserved = any_reserved
valid, test_key = self.match(sample)
single_valid, single_key = self.match(char)
if numeric:
return MATCH_NUMERIC_CHARACTER, sample, None
if is_reserved:
return MATCH_RESERVED_CHARACTER, sample, None
elif valid:
return MATCH_TOKEN, sample, test_key
elif single_valid:
return MATCH_NEXT_SINGLE_CHARACTER, sample[:-1], single_key
elif any_reserved:
return MATCH_NEXT_SINGLE_CHARACTER, sample[:-1], char
else:
return NO_MATCH, sample, None
plain_array = list(plain)
state = EXPECTING_LINE_NUMBER
line = ""
statement = []
token = ""
result = 0
loop = len(plain_array) > 0
next_char = plain_array.pop(0)
while loop:
if state == EXPECTING_LINE_NUMBER:
outcome, line = process_line(next_char, line)
if outcome == 0:
state = EXPECTING_INITIAL_WHITE_SPACE
else:
next_char = plain_array.pop(0)
elif state == EXPECTING_INITIAL_WHITE_SPACE:
outcome = process_white_space(next_char)
if outcome == 0:
state = EXPECTING_TOKEN
else:
next_char = plain_array.pop(0)
elif state == EXPECTING_TOKEN:
outcome, token, key = build_token(next_char, token)
if outcome == MATCH_NEXT_SINGLE_CHARACTER:
while len(token) > 0:
statement = append_to_stream(
token[0], statement
)
token = token[1:]
if key == EOL or key == CR:
state = CLOSE_LINE
elif key == COLON:
statement = append_to_stream(key, statement)
token = ""
next_char = plain_array.pop(0)
elif key == STRING_DELIMITER:
state = EXPECTING_STRING_LITERAL
statement = append_to_stream(key, statement)
token = ""
next_char = plain_array.pop(0)
elif key == SEMICOLON \
or key == COMMA:
statement = append_to_stream(key, statement)
token = ""
next_char = plain_array.pop(0)
else:
statement = append_to_stream(key, statement)
token = ""
next_char = plain_array.pop(0)
elif outcome == MATCH_NUMERIC_CHARACTER:
while len(token) > 0:
statement = append_to_stream(
token[0], statement
)
token = token[1:]
token = ""
next_char = plain_array.pop(0)
elif outcome == MATCH_RESERVED_CHARACTER:
if token == COLON \
or token == SEMICOLON \
or token == COMMA:
statement = append_to_stream(token, statement)
token = ""
next_char = plain_array.pop(0)
elif token == EOL or token == CR:
state = CLOSE_LINE
elif token == SPACE:
statement = append_to_stream(token, statement)
next_char = plain_array.pop(0)
token = ""
elif token == STRING_DELIMITER:
statement = append_to_stream(token, statement)
next_char = plain_array.pop(0)
token = ""
state = EXPECTING_STRING_LITERAL
elif outcome == MATCH_TOKEN:
state = EXPECTING_TOKEN
if token == "ELSE":
if statement[-1:][0] != ord(COLON):
statement = append_to_stream(COLON, statement)
elif token == "REM":
state = EXPECTING_LITERAL_TO_EOL
elif token == "'":
state = EXPECTING_LITERAL_TO_EOL
elif token == "DATA":
state = EXPECTING_LITERAL_TO_EOL
token = ""
statement = append_to_stream(key, statement)
next_char = plain_array.pop(0)
elif outcome == NO_MATCH:
if next_char == STRING_IDENTIFIER \
or next_char == OPEN_BRACKET \
or next_char == CLOSE_BRACKET:
while len(token) > 0:
statement = append_to_stream(
token[0], statement
)
token = token[1:]
token = ""
next_char = plain_array.pop(0)
elif state == EXPECTING_LITERAL_OR_WHITE_SPACE:
reserved, token = self.is_reserved(next_char)
if reserved:
if token == EOL:
state = CLOSE_LINE
elif token == CR:
next_char = plain_array.pop(0)
elif token == ":":
statement = append_to_stream(token, statement)
state = EXPECTING_TOKEN
next_char = plain_array.pop(0)
token = ""
elif token == '"':
statement = append_to_stream(token, statement)
state = EXPECTING_STRING_LITERAL
next_char = plain_array.pop(0)
else:
statement = append_to_stream(next_char, statement)
next_char = plain_array.pop(0)
elif state == EXPECTING_STRING_LITERAL:
reserved, token = self.is_reserved(next_char)
if token == EOL or token == CR:
statement = append_to_stream(0, statement)
loop = False
result = -1
elif token == STRING_DELIMITER:
statement = append_to_stream(token, statement)
state = EXPECTING_TOKEN
token = ""
next_char = plain_array.pop(0)
else:
statement = append_to_stream(next_char, statement)
next_char = plain_array.pop(0)
elif state == EXPECTING_LITERAL_TO_EOL:
if next_char == EOL:
state = CLOSE_LINE
elif next_char == CR:
next_char = plain_array.pop(0)
else:
statement = append_to_stream(next_char, statement)
next_char = plain_array.pop(0)
elif state == CLOSE_LINE:
statement = append_to_stream(0, statement)
loop = False
result = 0
return result, line, statement
def parse_program(self, program, load_address):
def extract_line(plain_text):
next_eol = plain_text.find(EOL)
if next_eol == -1:
if len(plain_text) > 0:
next_line = plain_text + EOL
else:
next_line = ""
remaining = ""
else:
next_line = plain_text[:next_eol + 1]
remaining = plain_text[next_eol + 1:]
return next_line, remaining
result = 0
loop = len(program) > 0
stream = []
load_address += 1
while loop:
sample, program = extract_line(program)
if len(sample) > 0:
result, line_number, line_bytes = self.parse_line(sample)
if result == 0:
load_address += 4 + len(line_bytes)
msb, lsb = self.word_to_bytes(load_address)
stream += [msb, lsb]
msb, lsb = self.word_to_bytes(int(line_number))
stream += [msb, lsb]
stream += line_bytes
loop = result == 0 and len(program) > 0
stream += [0, 0]
return result, bytearray(stream)
```
#### File: cas2bas/tests/test_Dragon_Tokens.py
```python
import pytest
from formats import Dragon_Dos_Tokens
from formats import Dragon_Tokens
tokeniser = Dragon_Tokens.DragonToken()
dos_tokeniser = Dragon_Dos_Tokens.DragonDosToken()
EOL = chr(10)
@pytest.fixture(autouse=True)
def before_each():
tokeniser.state = Dragon_Tokens.KEYWORD
dos_tokeniser.state = Dragon_Tokens.KEYWORD
@pytest.mark.parametrize("test_input,expected",
[(0x96, "CLEAR"), (0xa9, "TROFF"), (0x83, "'")])
def test_when_a_single_byte_token_is_supplied_return_the_token_keyword(
test_input,
expected):
actual = tokeniser.convert(test_input)
assert actual == expected
def test_given_a_255_byte_set_extended_mode_and_return_none():
expected = ''
token_value = 0xFF
actual = tokeniser.convert(token_value)
assert actual == expected
assert tokeniser.state == Dragon_Tokens.FUNCTION
@pytest.mark.parametrize("test_input,expected",
[(0x93, "JOYSTK"), (0xA1, "USR"), (0x9C, "VARPTR")])
def test_when_a_two_byte_token_is_supplied_return_the_token_keyword(
test_input,
expected):
tokeniser.convert(0xFF)
actual = tokeniser.convert(test_input)
assert actual == expected
@pytest.mark.parametrize("test_input,expected",
[(0x41, "A"), (0x42, "B"), (0x53, "S"), (0x28, "(")])
def test_when_an_ascii_value_byte_is_supplied_return_the_equivalent_character(
test_input,
expected):
actual = tokeniser.convert(test_input)
assert actual == expected
def test_when_an_illegal_single_byte_token_supplied_return_an_error_message():
expected = "invalid keyword token"
token_value = 0xFE
actual = tokeniser.convert(token_value)
assert actual == expected
def test_when_an_illegal_double_byte_token_supplied_return_an_error_message():
expected = "invalid function token"
token_value = 0xB0
tokeniser.convert(0xFF)
actual = tokeniser.convert(token_value)
assert actual == expected
@pytest.mark.parametrize("test_input,expected",
[(0x96, "CLEAR"), (0xCE, "AUTO"),
(0xDD, "MERGE"), (0xE7, "SWAP")])
def test_when_a_dos_token_is_used_return_a_dos_keyword(test_input, expected):
actual = dos_tokeniser.convert(test_input)
assert actual == expected
@pytest.mark.parametrize("test_input,expected",
[(0x93, "JOYSTK"), (0xA2, "LOF"),
(0xA4, "ERL"), (0xA8, "FRE$")])
def test_when_a_two_byte_dos_token_is_supplied_return_the_token_function(
test_input,
expected):
dos_tokeniser.convert(0xFF)
actual = dos_tokeniser.convert(test_input)
assert actual == expected
@pytest.mark.parametrize("test_input,expected",
[("invalid", (False, "invalid")),
("bad", (False, "bad"))])
def test_when_invalid_string_is_supplied_to_match_return_a_false_string_tuple(
test_input,
expected):
actual = tokeniser.match(test_input)
assert actual == expected
@pytest.mark.parametrize("test_input,expected",
[("PRINT", (True, 0x87)),
("USING", (True, 0xcd))])
def test_when_a_known_keyword_string_is_supplied_return_a_true_token_tuple(
test_input,
expected):
actual = tokeniser.match(test_input)
assert actual == expected
@pytest.mark.parametrize("test_input,expected",
[("SGN", (True, 0xff80)), ("VARPTR", (True, 0xff9c))])
def test_when_a_known_function_string_is_supplied_return_a_true_token_tuple(
test_input,
expected):
actual = tokeniser.match(test_input)
assert actual == expected
def test_given_a_valid_line_string_build_a_tokenised_string():
sample = "10 STOP\n"
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "10"
assert actual == [0x92, 0]
def test_given_an_input_without_a_terminated_string_result_is_negative():
sample = '10 PRINT"HELLO WORL\n'
result, line, actual = tokeniser.parse_line(sample)
assert result == -1
def test_given_a_goto_statement_result_is_correctly_two_tokens():
sample = "10 GOTO 10\n"
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "10"
assert actual == [0x81, 0xbc, 0x20, 0x31, 0x30, 0]
def test_given_a_gosub_statment_result_is_correctly_two_tokens():
sample = "10 GOSUB20\n"
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "10"
assert actual == [0x81, 0xbd, 0x32, 0x30, 0]
def test_given_a_valid_program_build_a_token_stream():
load_address = 0x1E20
sample = '10 PRINT"HELLO WORLD";\n20 GOTO 10\n'
result, stream = tokeniser.parse_program(sample, load_address)
assert result == 0
assert len(stream) > 0
def test_given_a_variable_assignment_result_is_correctly_encoded():
sample = "10 A=B+C\n"
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "10"
assert actual == [0x41, 0xcb, 0x42, 0xc3, 0x43, 0]
def test_given_multiple_sublines_encode_tokens_correctly():
expected = [0x80, 0x49, 0xcb, 0x31, 0xbc, 0x31, 0x32, 0x3a,
0x8d, 0x41, 0x24, 0x28, 0x49, 0x29, 0x3a,
0x8b, 0x3a,
0x80, 0x49, 0xcb, 0x31, 0xbc, 0x37, 0x3a,
0x8d, 0x42, 0x24, 0x28, 0x49, 0x29, 0x2c, 0x42,
0x4d, 0x28, 0x49, 0x29, 0x3a,
0x8b, 0x3a,
0x80, 0x49, 0xcb, 0x31, 0xbc, 0x37, 0x3a,
0x8d, 0x43, 0x24, 0x28, 0x49, 0x29, 0x3a,
0x8b, 0x3a,
0x81, 0xbd, 0x32, 0x30, 0x32, 0x30, 0x00]
sample = '20 FORI=1TO12:READA$(I):NEXT:FORI=1TO7:READB$(I),' \
'BM(I):NEXT:FORI=1TO7:READC$(I):NEXT:GOSUB2020\n'
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "20"
assert actual == expected
def test_string_literal_at_end_of_subline_is_one_byte():
expected = [0x87, 0x22, 0x48, 0x45, 0x4c, 0x4c, 0x4f, 0x20,
0x57, 0x4f, 0x52, 0x4c, 0x44, 0x22, 0x3b, 0x3a,
0x81, 0xbc, 0x31, 0x30, 0]
sample = '10 PRINT"HELLO WORLD";:GOTO10\n'
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "10"
assert actual == expected
def test_dont_add_extra_colon_before_else_if_not_needed():
expected = [0x85, 0x41, 0x53, 0x20, 0xBF, 0x31, 0x38, 0x30,
0x3A, 0x84, 0x31, 0x30, 0x30, 0x00]
sample = "300 IFAS THEN180:ELSE100\n"
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "300"
assert actual == expected
def test_semicolon_resets_token_search():
expected = [0x87, 0x40, 0x36, 0x34, 0x2C, 0x22, 0x58, 0x20,
0x43, 0x4F, 0x2D, 0x4F, 0x52, 0x44, 0x22, 0x3B,
0xFF, 0x8C, 0x28, 0x33, 0x32, 0x32, 0x36, 0x36,
0x29, 0x00]
sample = '310 PRINT@64,"X CO-ORD";PEEK(32266)\n'
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "310"
assert actual == expected
def test_comma_resets_token_search():
expected = [0x87, 0x40, 0x36, 0x34, 0x2C, 0x22, 0x22, 0x3A,
0x87, 0x3A, 0x87, 0x20, 0xCD, 0x22, 0x25, 0x20,
0x20, 0x20, 0x25, 0x23, 0x23, 0x23, 0x2C, 0x23,
0x23, 0x23, 0x2C, 0x23, 0x23, 0x23, 0x2C, 0x23,
0x23, 0x23, 0x22, 0x3B, 0x22, 0x43, 0x41, 0x53,
0x48, 0x3A, 0x22, 0x2C, 0xFF, 0x8C, 0x28, 0x33,
0x32, 0x30, 0x35, 0x33, 0x29, 0xC5, 0x31, 0x36,
0x37, 0x37, 0x37, 0x32, 0x31, 0x36, 0xC3, 0xFF,
0x8C, 0x28, 0x33, 0x32, 0x30, 0x35, 0x34, 0x29,
0xC5, 0x36, 0x35, 0x35, 0x33, 0x36, 0xC3, 0xFF,
0x8C, 0x28, 0x33, 0x32, 0x30, 0x35, 0x35, 0x29,
0xC5, 0x32, 0x35, 0x36, 0xC3, 0xFF, 0x8C, 0x28,
0x33, 0x32, 0x30, 0x35, 0x36, 0x29, 0x00]
sample = '380 PRINT@64,"":PRINT:PRINT USING"% %###,###,###,###";' \
'"CASH:",PEEK(32053)*16777216+PEEK(32054)*65536+PEEK(32055)' \
'*256+PEEK(32056)\n'
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "380"
assert actual == expected
def test_dollar_always_means_end_of_string_variable_or_token():
expected = [0x85, 0xFF, 0x96, 0x28, 0x42, 0x24, 0x28, 0x49,
0x29, 0x2C, 0x31, 0x29, 0xCB, 0x41, 0x53, 0x24,
0xBF, 0x49, 0x54, 0xCB, 0x49, 0x3A, 0x49, 0xCB,
0x31, 0x30, 0x00]
sample = '510 IFLEFT$(B$(I),1)=AS$THENIT=I:I=10\n'
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "510"
assert actual == expected
def test_parentheses_are_delimiters_too():
expected = [0x85, 0x20, 0x41, 0x53, 0xCC, 0x30, 0x20, 0xC9,
0x20, 0x41, 0x53, 0xCA, 0x42, 0x4D, 0x28, 0x49,
0x54, 0x29, 0xBF, 0x35, 0x34, 0x30, 0x00]
sample = '550 IF AS<0 OR AS>BM(IT)THEN540\n'
result, line, actual = tokeniser.parse_line(sample)
assert result == 0
assert line == "550"
assert actual == expected
def test_end_of_program_needs_double_null():
expected = bytearray(b'\x1e\x0b\x00\n\x81\xbc 10\x00\x00\x00')
sample = "10 GOTO 10\n"
result, actual = tokeniser.parse_program(sample, 0x1e00)
assert result == 0
assert actual == expected
```
#### File: cas2bas/tests/test_Utility.py
```python
import pytest
from formats.Utility import invert_dictionary, find_verbosity
def test_given_an_empty_dictionary_invert_dictionary_returns_empty():
source = {}
expected = {}
actual = invert_dictionary(source)
assert actual == expected
def test_given_a_dictionary_invert_dictionary_returns_inverse_dictionary():
source = {
0x80: "FOR",
0x81: "GO"
}
expected = {
"FOR": 0x80,
"GO": 0x81
}
actual = invert_dictionary(source)
assert actual == expected
@pytest.mark.parametrize("test_input", ["-s", "--silent"])
def test_given_a_silent_option_find_verbosity_sets_to_silent(test_input):
opts = [test_input]
actual = find_verbosity(opts)
assert actual == 3
@pytest.mark.parametrize("test_input", ["--quiet", "-q"])
def test_given_a_quiet_option_find_verbosity_sets_to_quiet(test_input):
opts = [test_input]
actual = find_verbosity(opts)
assert actual == 2
@pytest.mark.parametrize("test_input", ["--verbose", "-v"])
def test_given_a_verbose_option_find_verbosity_sets_to_noisy(test_input):
opts = [test_input]
actual = find_verbosity(opts)
assert actual == 0
@pytest.mark.parametrize("test_input", ["--any", "-dd"])
def test_not_given_any_verbosity_options_find_verbosity_sets_to_normal(
test_input):
opts = [test_input]
actual = find_verbosity(opts)
assert actual == 1
``` |
{
"source": "jimbrowne/pynagios",
"score": 3
} |
#### File: pynagios/pynagios/perf_data.py
```python
import re
from pynagios.range import Range
class PerfData(object):
"""
This class represents performance data for a response. Since
performance data has a non-trivial response format, this class
is meant to ease the formation of performance data.
"""
def __init__(self, label, value, uom=None, warn=None, crit=None,
minval=None, maxval=None):
"""Creates a new object representing a single performance data
item for a Nagios response.
Performance data is extra key/value data that can be returned
along with a response. The performance data is not used immediately
by Nagios itself, but can be extracted by 3rd party tools and can
often be helpful additional information for system administrators
to view. The `label` can be any string, but `value` must be a
numeric value.
Raises :class:`ValueError` if any of the parameters are invalid.
The exact nature of the error is in the human readable message
attribute of the exception.
:Parameters:
- `label`: Label for the performance data. This must be a
string.
- `value`: Value of the data point. This must be a number whose
characters are in the class of `[-0-9.]`
- `uom` (optional): Unit of measure. This must only be `%`, `s`
for seconds, `c` for continous data, or a unit of bit space
measurement ('b', 'kb', etc.)
- `warn` (optional): Warning range for this metric.
- `crit` (optional): Critical range for this metric.
- `minval` (optional): Minimum value possible for this metric,
if one exists.
- `maxval` (optional): Maximum value possible for this metric,
if one exists.
"""
self.label = label
self.value = value
self.uom = uom
self.warn = warn
self.crit = crit
self.minval = minval
self.maxval = maxval
@property
def value(self):
"""The value of this metric."""
return self._value
@value.setter
def value(self, value):
if value is None:
raise ValueError("value must not be None")
elif not self._is_valid_value(value):
raise ValueError("value must be in class [-0-9.]")
self._value = value
@property
def warn(self):
"""
The warning range of this metric. This return value of this
will always be a :py:class:`~pynagios.range.Range` object, even
if it was set with a string.
"""
return self._warn
@warn.setter
def warn(self, value):
if value is not None and not isinstance(value, Range):
value = Range(value)
self._warn = value
@property
def crit(self):
"""
The critical range of this metric. This return value of this
will always be a :py:class:`~pynagios.range.Range` object,
even if it was set with a string.
"""
return self._crit
@crit.setter
def crit(self, value):
if value is not None and not isinstance(value, Range):
value = Range(value)
self._crit = value
@property
def minval(self):
"""
The minimum value possible for this metric. This doesn't make
a lot of sense if the `uom` is '%', since that is obviously going
to be 0, but this will return whatever was set.
"""
return self._minval
@minval.setter
def minval(self, value):
if not self._is_valid_value(value):
raise ValueError("minval must be in class [-0-9.]")
self._minval = value
@property
def maxval(self):
"""
The maximum value possible for this metric. This doesn't make
a lot of sense if the `uom` is '%', since that is obviously going
to be 100, but this will return whatever was set.
"""
return self._maxval
@maxval.setter
def maxval(self, value):
if not self._is_valid_value(value):
raise ValueError("maxval must be in class [-0-9.]")
self._maxval = value
@property
def uom(self):
"""
The unit of measure (UOM) for this metric.
"""
return self._uom
@uom.setter
def uom(self, value):
valids = ['', 's', '%', 'b', 'kb', 'mb', 'gb', 'tb', 'c']
if value is not None and not str(value).lower() in valids:
raise ValueError("uom must be in: %s" % valids)
self._uom = value
def __str__(self):
"""
Returns the proper string format that should be outputted
in the plugin response string. This format is documented in
depth in the Nagios developer guidelines, but in general looks
like this:
| 'label'=value[UOM];[warn];[crit];[min];[max]
"""
# Quotify the label
label = self._quote_if_needed(self.label)
# Check for None in each and make it empty string if so
uom = self.uom or ''
warn = self.warn or ''
crit = self.crit or ''
minval = self.minval or ''
maxval = self.maxval or ''
# Create the proper format and return it
return "%s=%s%s;%s;%s;%s;%s" % (label, self.value, uom, warn, crit, minval, maxval)
def _is_valid_value(self, value):
"""
Returns boolean noting whether a value is in the proper value
format which certain values for the performance data must adhere to.
"""
value_format = re.compile(r"[-0-9.]+$")
return value is None or value_format.match(str(value))
def _quote_if_needed(self, value):
"""
This handles single quoting the label if necessary. The reason that
this is not done all the time is so that characters can be saved
since Nagios only reads 80 characters and one line of stdout.
"""
if '=' in value or ' ' in value or "'" in value:
# Quote the string and replace single quotes with double single
# quotes and return that
return "'%s'" % value.replace("'", "''")
else:
return value
```
#### File: pynagios/pynagios/status.py
```python
class Status(object):
"""
Encapsulates a Nagios status, which holds a name and
an exit code.
"""
def __init__(self, name, exit_code):
"""
Creates a new status object for Nagios with the given name and
exit code.
**Note**: In general, this should never be called since the standard
statuses are exported from ``pynagios``.
"""
assert isinstance(exit_code, int)
assert isinstance(name, str)
self.name = name
self.exit_code = exit_code
def __repr__(self):
return "Status(name=%s, exit_code=%d)" % (repr(self.name), self.exit_code)
def __lt__(self, other):
return (self.exit_code < other.exit_code)
def __eq__(self, other):
return (self.exit_code == other.exit_code)
def __ne__(self, other):
return (self.exit_code != other.exit_code)
def __gt__(self, other):
return (self.exit_code > other.exit_code)
```
#### File: pynagios/tests/test_status.py
```python
import pytest
from pynagios.status import Status
class TestStatus(object):
def test_status_comparison(self):
"""
Tests __cmp__ operator of Status class
"""
a = Status('OK',0)
b = Status('OK',0)
assert a == b
assert not a is b
assert Status('Test',0) < Status('Test',1)
assert Status('Test',1) > Status('Test',0)
``` |
{
"source": "jimbudarz/alibi",
"score": 3
} |
#### File: alibi/api/interfaces.py
```python
import abc
import json
from collections import ChainMap
from typing import Any
import logging
from functools import partial
import pprint
import attr
import numpy as np
logger = logging.getLogger(__name__)
# default metadata
def default_meta() -> dict:
return {
"name": None,
"type": [],
"explanations": [],
"params": {},
}
class AlibiPrettyPrinter(pprint.PrettyPrinter):
"""
Overrides the built in dictionary pretty representation to look more similar to the external
prettyprinter libary.
"""
_dispatch = {}
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# `sort_dicts` kwarg was only introduced in Python 3.8 so we just override it here.
# Before Python 3.8 the printing was done in insertion order by default.
self._sort_dicts = False
def _pprint_dict(self, object, stream, indent, allowance, context, level):
# Add a few newlines and the appropriate indentation to dictionary printing
# compare with https://github.com/python/cpython/blob/3.9/Lib/pprint.py
write = stream.write
indent += self._indent_per_level
write('{\n' + ' ' * (indent + 1))
if self._indent_per_level > 1:
write((self._indent_per_level - 1) * ' ')
length = len(object)
if length:
if self._sort_dicts:
items = sorted(object.items(), key=pprint._safe_tuple)
else:
items = object.items()
self._format_dict_items(items, stream, indent, allowance + 1,
context, level)
write('}\n' + ' ' * (indent - 1))
_dispatch[dict.__repr__] = _pprint_dict
alibi_pformat = partial(AlibiPrettyPrinter().pformat)
@attr.s
class Explainer(abc.ABC):
"""
Base class for explainer algorithms
"""
meta = attr.ib(default=attr.Factory(default_meta), repr=alibi_pformat) # type: dict
def __attrs_post_init__(self):
# add a name to the metadata dictionary
self.meta["name"] = self.__class__.__name__
# expose keys stored in self.meta as attributes of the class.
for key, value in self.meta.items():
setattr(self, key, value)
@abc.abstractmethod
def explain(self, X: Any) -> "Explanation":
pass
def reset_predictor(self, predictor: Any) -> None:
raise NotImplementedError
def _update_metadata(self, data_dict: dict, params: bool = False) -> None:
"""
Updates the metadata of the explainer using the data from the `data_dict`. If the params option
is specified, then each key-value pair is added to the metadata `'params'` dictionary.
Parameters
----------
data_dict
Contains the data to be stored in the metadata.
params
If True, the method updates the `'params'` attribute of the metatadata.
"""
if params:
for key in data_dict.keys():
self.meta['params'].update([(key, data_dict[key])])
else:
self.meta.update(data_dict)
class FitMixin(abc.ABC):
@abc.abstractmethod
def fit(self, X: Any) -> "Explainer":
pass
@attr.s
class Explanation:
"""
Explanation class returned by explainers.
"""
meta = attr.ib(repr=alibi_pformat) # type: dict
data = attr.ib(repr=alibi_pformat) # type: dict
def __attrs_post_init__(self):
"""
Expose keys stored in self.meta and self.data as attributes of the class.
"""
for key, value in ChainMap(self.meta, self.data).items():
setattr(self, key, value)
def to_json(self) -> str:
"""
Serialize the explanation data and metadata into a json format.
Returns
-------
String containing json representation of the explanation
"""
return json.dumps(attr.asdict(self), cls=NumpyEncoder)
@classmethod
def from_json(cls, jsonrepr) -> "Explanation":
"""
Create an instance of an Explanation class using a json representation of the Explanation.
Parameters
----------
jsonrepr
json representation of an explanation
Returns
-------
An Explanation object
"""
dictrepr = json.loads(jsonrepr)
try:
meta = dictrepr['meta']
data = dictrepr['data']
except KeyError:
logger.exception("Invalid explanation representation")
return cls(meta=meta, data=data)
def __getitem__(self, item):
"""
This method is purely for deprecating previous behaviour of accessing explanation
data via items in the returned dictionary.
"""
import warnings
msg = "The Explanation object is not a dictionary anymore and accessing elements should " \
"be done via attribute access. Accessing via item will stop working in a future version."
warnings.warn(msg, DeprecationWarning, stacklevel=2)
return getattr(self, item)
class NumpyEncoder(json.JSONEncoder):
def default(self, obj):
if isinstance(
obj,
(
np.int_,
np.intc,
np.intp,
np.int8,
np.int16,
np.int32,
np.int64,
np.uint8,
np.uint16,
np.uint32,
np.uint64,
),
):
return int(obj)
elif isinstance(obj, (np.float_, np.float16, np.float32, np.float64)):
return float(obj)
elif isinstance(obj, (np.ndarray,)):
return obj.tolist()
return json.JSONEncoder.default(self, obj)
```
#### File: explainers/tests/test_integrated_gradients.py
```python
import numpy as np
import pytest
from alibi.explainers import IntegratedGradients
from alibi.api.interfaces import Explanation
import tensorflow as tf
from tensorflow.keras import Model
# generate some dummy data
N = 100
N_TRAIN = 90
N_FEATURES = 4
N_TEST = N - N_TRAIN
BASELINES = [None, 1, np.random.rand(N_TEST, N_FEATURES)]
X = np.random.rand(N, N_FEATURES)
X_train, X_test = X[:N_TRAIN, :], X[N_TRAIN:, :]
# multi inputs features
X0 = np.random.rand(N, 10, N_FEATURES)
X_multi_inputs = [X0, X]
X_train_multi_inputs, X_test_multi_inputs = [X0[:N_TRAIN, :], X[:N_TRAIN, :]], [X0[N_TRAIN:, :], X[N_TRAIN:, :]]
BASELINES_MULTI_INPUTS = [None, [1, 1],
[np.random.random(X_test_multi_inputs[0].shape),
np.random.random(X_test_multi_inputs[1].shape)]]
# regression labels
y_regression = X[:, 0] + X[:, 1]
y_train_regression = y_regression[:N_TRAIN]
# classification labels
y_classification_ordinal = (X[:, 0] + X[:, 1] > 1).astype(int)
y_classification_categorical = tf.keras.utils.to_categorical(y_classification_ordinal)
y_train_classification_ordinal = y_classification_ordinal[:N_TRAIN]
y_train_classification_categorical = y_classification_categorical[:N_TRAIN, :]
test_labels = y_classification_ordinal[N_TRAIN:]
# integral method used shouldn't affect wrapper functionality
INTEGRAL_METHODS = ['gausslegendre', 'riemann_middle']
@pytest.fixture()
def ffn_model(request):
"""
Simple feed-forward model with configurable data, loss function, output activation and dimension
"""
config = request.param
inputs = tf.keras.Input(shape=config['X_train'].shape[1:])
x = tf.keras.layers.Dense(20, activation='relu')(inputs)
x = tf.keras.layers.Dense(20, activation='relu')(x)
outputs = tf.keras.layers.Dense(config['output_dim'], activation=config['activation'])(x)
if config.get('squash_output', False):
outputs = tf.keras.layers.Reshape(())(outputs)
model = tf.keras.models.Model(inputs=inputs, outputs=outputs)
model.compile(loss=config['loss'],
optimizer='adam')
model.fit(config['X_train'], config['y_train'], epochs=1, batch_size=256, verbose=0)
return model
@pytest.fixture()
def ffn_model_multi_inputs(request):
"""
Simple multi-inputs feed-forward model with configurable data, loss function, output activation and dimension
"""
config = request.param
input0 = tf.keras.Input(shape=config['X_train_multi_inputs'][0].shape[1:])
input1 = tf.keras.Input(shape=config['X_train_multi_inputs'][1].shape[1:])
x = tf.keras.layers.Flatten()(input0)
x = tf.keras.layers.Concatenate()([x, input1])
x = tf.keras.layers.Dense(20, activation='relu')(x)
outputs = tf.keras.layers.Dense(config['output_dim'], activation=config['activation'])(x)
if config.get('squash_output', False):
outputs = tf.keras.layers.Reshape(())(outputs)
model = tf.keras.models.Model(inputs=[input0, input1], outputs=outputs)
model.compile(loss=config['loss'],
optimizer='adam')
model.fit(config['X_train_multi_inputs'], config['y_train'], epochs=1, batch_size=256, verbose=0)
return model
@pytest.fixture()
def ffn_model_subclass(request):
"""
Simple subclassed feed-forward model with configurable data, loss function, output activation and dimension
"""
config = request.param
class Linear(Model):
def __init__(self, output_dim, activation):
super(Linear, self).__init__()
self.dense_1 = tf.keras.layers.Dense(20, activation='relu')
self.dense_2 = tf.keras.layers.Dense(20, activation='relu')
self.dense_3 = tf.keras.layers.Dense(output_dim, activation)
def call(self, inputs):
x = self.dense_1(inputs)
x = self.dense_2(x)
outputs = self.dense_3(x)
return outputs
model = Linear(config['output_dim'], activation=config['activation'])
model.compile(loss=config['loss'],
optimizer='adam')
model.fit(config['X_train'], config['y_train'], epochs=1, batch_size=256, verbose=1)
return model
@pytest.fixture()
def ffn_model_subclass_list_input(request):
"""
Simple subclassed, multi-input feed-forward model with configurable data,
loss function, output activation and dimension
"""
config = request.param
class Linear(Model):
def __init__(self, output_dim, activation):
super(Linear, self).__init__()
self.flat = tf.keras.layers.Flatten()
self.concat = tf.keras.layers.Concatenate()
self.dense_1 = tf.keras.layers.Dense(20, activation='relu')
self.dense_2 = tf.keras.layers.Dense(output_dim, activation)
def call(self, inputs):
inp0 = self.flat(inputs[0])
inp1 = self.flat(inputs[1])
x = self.concat([inp0, inp1])
x = self.dense_1(x)
outputs = self.dense_2(x)
return outputs
model = Linear(config['output_dim'], activation=config['activation'])
model.compile(loss=config['loss'],
optimizer='adam')
model.fit(config['X_train_multi_inputs'], config['y_train'], epochs=1, batch_size=256, verbose=1)
return model
@pytest.fixture()
def ffn_model_sequential(request):
"""
Simple sequential feed-forward model with configurable data, loss function, output activation and dimension
"""
config = request.param
layers = [
tf.keras.layers.InputLayer(input_shape=config['X_train'].shape[1:]),
tf.keras.layers.Dense(20, activation='relu'),
tf.keras.layers.Dense(config['output_dim'], activation=config['activation'])
]
if config.get('squash_output', False):
layers.append(tf.keras.layers.Reshape(()))
model = tf.keras.models.Sequential(layers)
model.compile(loss=config['loss'],
optimizer='adam')
model.fit(config['X_train'], config['y_train'], epochs=1, batch_size=256, verbose=1)
return model
@pytest.mark.parametrize('ffn_model_sequential', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_categorical})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS, ids='method={}'.format)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_model_sequential(ffn_model_sequential, method, baselines):
model = ffn_model_sequential
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert explanations['data']['attributions'][0].shape == X_test.shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == X_test.shape[0]
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == X_test.shape[0]
@pytest.mark.parametrize('ffn_model_subclass', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_categorical})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS, ids='method={}'.format)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_model_subclass(ffn_model_subclass, method, baselines):
model = ffn_model_subclass
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert explanations['data']['attributions'][0].shape == X_test.shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == X_test.shape[0]
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == X_test.shape[0]
@pytest.mark.parametrize('ffn_model_subclass_list_input', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train_multi_inputs': X_train_multi_inputs,
'y_train': y_train_classification_categorical})],
indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS, ids='method={}'.format)
@pytest.mark.parametrize('baselines', BASELINES_MULTI_INPUTS)
def test_integrated_gradients_model_subclass_list_input(ffn_model_subclass_list_input, method, baselines):
model = ffn_model_subclass_list_input
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test_multi_inputs,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert max([len(x) for x in X_test_multi_inputs]) == min([len(x) for x in X_test_multi_inputs])
assert (max([len(x) for x in explanations['data']['attributions']]) ==
min([len(x) for x in explanations['data']['attributions']]))
assert len(explanations['data']['attributions'][0]) == N_TEST
assert len(X_test_multi_inputs[0]) == N_TEST
attrs = explanations['data']['attributions']
for i in range(len(attrs)):
assert attrs[i].shape == X_test_multi_inputs[i].shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == N_TEST
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == N_TEST
@pytest.mark.parametrize('ffn_model_multi_inputs', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train_multi_inputs': X_train_multi_inputs,
'y_train': y_train_classification_categorical})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS, ids='method={}'.format)
@pytest.mark.parametrize('baselines', BASELINES_MULTI_INPUTS)
def test_integrated_gradients_binary_classification_multi_inputs(ffn_model_multi_inputs, method, baselines):
model = ffn_model_multi_inputs
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test_multi_inputs,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert max([len(x) for x in X_test_multi_inputs]) == min([len(x) for x in X_test_multi_inputs])
assert (max([len(x) for x in explanations['data']['attributions']]) ==
min([len(x) for x in explanations['data']['attributions']]))
assert len(explanations['data']['attributions'][0]) == N_TEST
assert len(X_test_multi_inputs[0]) == N_TEST
attrs = explanations['data']['attributions']
for i in range(len(attrs)):
assert attrs[i].shape == X_test_multi_inputs[i].shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == N_TEST
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == N_TEST
@pytest.mark.parametrize('ffn_model_multi_inputs', [({'output_dim': 1,
'activation': 'sigmoid',
'loss': 'binary_crossentropy',
'X_train_multi_inputs': X_train_multi_inputs,
'y_train': y_train_classification_ordinal})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS)
@pytest.mark.parametrize('baselines', BASELINES_MULTI_INPUTS)
def test_integrated_gradients_binary_classification_single_output_multi_inputs(ffn_model_multi_inputs,
method,
baselines):
model = ffn_model_multi_inputs
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test_multi_inputs,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert max([len(x) for x in X_test_multi_inputs]) == min([len(x) for x in X_test_multi_inputs])
assert (max([len(x) for x in explanations['data']['attributions']]) ==
min([len(x) for x in explanations['data']['attributions']]))
assert len(explanations['data']['attributions'][0]) == N_TEST
assert len(X_test_multi_inputs[0]) == N_TEST
attrs = explanations['data']['attributions']
for i in range(len(attrs)):
assert attrs[i].shape == X_test_multi_inputs[i].shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == N_TEST
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == N_TEST
@pytest.mark.parametrize('ffn_model', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_categorical})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS, ids='method={}'.format)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_binary_classification(ffn_model, method, baselines):
model = ffn_model
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert explanations['data']['attributions'][0].shape == X_test.shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == X_test.shape[0]
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == X_test.shape[0]
@pytest.mark.parametrize('ffn_model', [({'output_dim': 1,
'activation': 'sigmoid',
'loss': 'binary_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_ordinal})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_binary_classification_single_output(ffn_model, method, baselines):
model = ffn_model
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert explanations['data']['attributions'][0].shape == X_test.shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == X_test.shape[0]
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == X_test.shape[0]
@pytest.mark.parametrize('ffn_model', [({'output_dim': 1,
'activation': 'sigmoid',
'loss': 'binary_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_ordinal,
'squash_output': True})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_binary_classification_single_output_squash_output(ffn_model, method, baselines):
model = ffn_model
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
assert explanations['data']['attributions'][0].shape == X_test.shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == X_test.shape[0]
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == X_test.shape[0]
@pytest.mark.parametrize('ffn_model', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_categorical})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS)
@pytest.mark.parametrize('layer_nb', (None, 1))
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_binary_classification_layer(ffn_model, method, layer_nb, baselines):
model = ffn_model
if layer_nb is not None:
layer = model.layers[layer_nb]
else:
layer = None
ig = IntegratedGradients(model, layer=layer,
n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
if layer is not None:
layer_out = layer(X_test).numpy()
assert explanations['data']['attributions'][0].shape == layer_out.shape
else:
assert explanations['data']['attributions'][0].shape == X_test.shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == X_test.shape[0]
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == X_test.shape[0]
@pytest.mark.parametrize('ffn_model', [({'output_dim': 1,
'activation': 'linear',
'loss': 'mean_squared_error',
'X_train': X_train,
'y_train': y_train_regression})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_regression(ffn_model, method, baselines):
model = ffn_model
ig = IntegratedGradients(model, n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=None)
assert isinstance(explanations, Explanation)
assert explanations['data']['attributions'][0].shape == X_test.shape
assert 'deltas' in explanations['data'].keys()
assert explanations['data']['deltas'].shape[0] == X_test.shape[0]
assert 'predictions' in explanations['data'].keys()
assert explanations['data']['predictions'].shape[0] == X_test.shape[0]
@pytest.mark.skip(reason='Not testing as multi-layers will not be supported in the future')
@pytest.mark.parametrize('ffn_model', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_categorical})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_binary_classification_multi_layer(ffn_model, method, baselines):
model = ffn_model
layer = [model.layers[1], model.layers[2]]
ig = IntegratedGradients(model, layer=layer,
n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
@pytest.mark.skip(reason='Not testing as multi-layers will not be supported in the future')
@pytest.mark.parametrize('ffn_model_subclass', [({'output_dim': 2,
'activation': 'softmax',
'loss': 'categorical_crossentropy',
'X_train': X_train,
'y_train': y_train_classification_categorical})], indirect=True)
@pytest.mark.parametrize('method', INTEGRAL_METHODS)
@pytest.mark.parametrize('baselines', BASELINES)
def test_integrated_gradients_binary_classification_multi_layer_subclassed(ffn_model_subclass, method, baselines):
model = ffn_model_subclass
layer = [model.layers[0], model.layers[1]]
ig = IntegratedGradients(model, layer=layer,
n_steps=50, method=method)
explanations = ig.explain(X_test,
baselines=baselines,
target=test_labels)
assert isinstance(explanations, Explanation)
``` |
{
"source": "jimbuho/django-brain",
"score": 3
} |
#### File: django-brain/brainutils/ajax.py
```python
import uuid
class AjaxBase:
params = {}
exist_all = True
DEBUG = False
def ID(self):
"""
Description
Devuelve un ID Unico a ser usado por el resto de metodos
para evitar obtener parametros con valores cambiados
:return:
"""
id = uuid.uuid4().int & (1<<32)-1
id = str(id)[:8]
if self.DEBUG:print('==> {AJAX REQUEST #', id,'}')
return id
def GET(self, id, request, attributes):
"""
GET Params
Description
Dada una lista de atributos, construye un diccionario
de los parametros pasados por GET
@param attributes List - String
"""
for p in attributes:
if p in request.GET:
self.params['%s-%s' % (p, id)] = request.GET.get(p)
else:
if self.DEBUG:print('<== [AjaxBase.GET] No fue dado', p)
self.exist_all = False
def E(self, id, names=None):
"""
Exist
Description
Existe una lista de parametros en params
:param names:
:return: True Si existen todos, False si no existe al menos uno
"""
if names:
for name in names:
key = '%s-%s' % (name, id)
if key not in self.params:
return False
return True
else:
return self.exist_all
def V(self, id, name):
"""
Value
Description
Devuelve el valor de un parametro
:param name:
:return:
"""
key = '%s-%s' % (name, id)
if key in self.params:
return self.params[key]
return None
def fail(self, error_msg='', method='', info=None):
"""
Fail
Description
Respuesta de error
:param error_msg:
:param method:
:return:
"""
info = info if info else self.params
return {
'success': False,
'message': error_msg,
'info':info,
'method': method,
}
def success(self, info=None, method=''):
"""
Fail
Description
Respuesta de error
:param info: Dictionary
:param method:
:return:
"""
info = info if info else self.params
return {
'success': True,
'message':'OK',
'info':info,
'method':method
}
```
#### File: django-brain/brainutils/configuration.py
```python
from . import models
class ConfigurationManager:
"""
Configuration Manager
===================
Description
Manejador de variables de configuracion, esta clase permite
acceder a dichas variables y mantenerlas en cache.
Solo se puede reflejar el cambio del valor de la configuracion
desde el administrador luego de reiniciar el servidor de aplicaciones.
"""
# Valor por defecto al crear una variable de configuracion
DEFAULT_VALUE = '0'
# Longitud minima de los nombres
MAX_NAME_SIZE = 50
# Longitud maxima de los nombres
MAX_DESC_SIZE = 150
# Valor por defecto usado en la configuracion del MODO
MAIN_CONFIG_WORMODE_DEFAULT = 'TEST'
# El sistema se encuentra en modo mantenimiento
MAIN_CONFIG_WORMODE_MAINTENANCE = 'MANT'
# Diccionario de Configuraciones principales de Multipasajes
# Siempre el primero de los valores es el valor por defecto
MAIN_CONFIGURATION = [
{
'name': 'common.workmode',
'desc': 'Multipasajes Execution Mode',
'values': [MAIN_CONFIG_WORMODE_DEFAULT, 'PROD']
}
]
def get_value(self, name, description='N/A', default=None):
"""
Get Value
Description
Dado el nombre, devuelve el valor de un registro de
configuracion, mantiene en memoria los valores.
:return
String -- El texto del mensaje
"""
try:
value = self.DEFAULT_VALUE
if hasattr(get_value, 'values'):
value_dict = get_value.values
else:
value_dict = {}
if name in value_dict:
value = value_dict[name]
else:
values = models.Configuration.objects.get_active(name=name)
if values.count() > 0:
value = values.first().value
value_dict[name] = value
get_value.values = value_dict
else:
self.create_default(name, description, default)
value = default
return value
except Exception as e:
return ''
def create_default(self, name, description, default):
"""
Create Default
Description
Permite crear un parametro de configuracion por defecto en caso
de que no exista, asi estara disponible para que el administrador
ingrese el valor correcto
@param name:
@param description:
@param default: Este es el valor por defecto para la creacion de una variable
"""
try:
if len(name) <= self.MAX_NAME_SIZE:
is_config, main_config = self.is_main_config(name)
if is_config:
models.Configuration.objects.create(name=name, value=main_config['values'][0],
description=self.get_main_config_description(main_config))
else:
value = default if default else ''
models.Configuration.objects.create(name=name, value=value,
description=description[:self.MAX_DESC_SIZE]
if description else 'N/A')
except Exception as e:
print('<-- Error al crear la configuracion por defecto: %s' % str(e))
def is_main_config(self, name):
"""
Is Main Config
Description
Verifica si la configuracion es principal
@param name:
@return True si es principal
"""
for mc in self.MAIN_CONFIGURATION:
if mc['name'] == name:
return True, mc
return False, None
def get_main_config_description(self, main_config):
"""
Get Main Config Description
Description
Arma una descripcion con los posibles valores de la configuracion
@param main_config:
@return String -- Formato de descripcion estandar
"""
return '%s [%s]' % (main_config['desc'], ','.join([v for v in main_config['values']]))
cmanager = ConfigurationManager()
def get_value(name, default=None, description=None):
"""
Get Value
Description
Acceso rapido a las configuraciones
:param name: El nombre unico del parametro
:return: String -- El valor
"""
return ConfigurationManager().get_value(name, description=description, default=default)
def get_float(name, default=None, description=None):
"""
Get Float
Description
Acceso rapido a una configuracion como numero flotante
:param name: El nombre unico del parametro
:return: Float -- El valor
"""
try:
return float(ConfigurationManager().get_value(name, description=description, default=default))
except:
return 0
def get_integer(name, default=None, description=None):
"""
Get Float
Description
Acceso rapido a una configuracion como numero entero
:param name: El nombre unico del parametro
:return: Integer -- El valor
"""
try:
return int(ConfigurationManager().get_value(name, description=description, default=default))
except:
return 0
def isTESTMode():
"""
Is Test Mode
Description
Valida si la configuracion del sistema se encuentra en modo TEST
:return: True si estamos en modo TEST
"""
return str(ConfigurationManager().get_value('common.workmode')) == \
str(ConfigurationManager.MAIN_CONFIG_WORMODE_DEFAULT)
def isMaintenanceMode():
"""
Is Maintenance Mode
Description
Valida si la configuracion del sistema se encuentra en modo MANTENIMIENTO
:return: True si estamos en modo MANTENIMIENTO
"""
return str(ConfigurationManager().get_value('common.workmode')) == \
str(ConfigurationManager.MAIN_CONFIG_WORMODE_MAINTENANCE)
```
#### File: django-brain/brainutils/context.py
```python
from . import configuration
from . import models
def load_context(request):
"""
Load Context
Description
Carga las variables de contexto principales
:param request:
:return:
"""
IS_TEST_MODE = configuration.isTESTMode()
IS_MAINTENANCE = configuration.isMaintenanceMode()
try:
LANGUAGES = models.Language.objects.get_active()
except:
LANGUAGES = []
return {
'IS_TEST_MODE' : IS_TEST_MODE,
'IS_MAINTENANCE' : IS_MAINTENANCE,
'LANGUAGES' : LANGUAGES
}
```
#### File: django-brain/brainutils/gen_engine.py
```python
import os
from django.template.loader import render_to_string
from django.apps import apps
from . import gen_models
DJANGO_APPS_PREFIX = 'django.contrib'
def get_apps():
"""
Get All Apps in Context
:return:
"""
for app_config in apps.get_app_configs():
yield app_config.name, app_config
class Template:
"""
Template Render
"""
def __init__(self, app_name, template_name, ext='py', mode=None):
"""
Construye la app
:param app_name:
"""
self.app_name = app_name
self.template_name = template_name
self.ext = ext
self.mode = mode
installed_apps = dict(get_apps())
self.app = installed_apps.get(app_name)
if self.app is None:
raise Exception('App {} is not available'.format(app_name))
def render(self):
"""
Renderiza el template dado en el archivo solicitado
:return:
"""
if self.ext == 'py':
to_path = os.path.join(self.app.path, '%s.py' % self.template_name)
rendered = render_to_string('code/%s_template.html' % self.template_name,
{'models': gen_models.Models(self.app), 'app': self.app})
with open(to_path, 'w') as f:
f.write(rendered)
else: # html
models = gen_models.Models(self.app)
for model in models:
path = os.path.join(self.app.path, 'templates', self.app.name)
try:
os.mkdir(path)
except:
pass
if self.template_name == 'delete':
file_name = '%s_delete_confirm.html' % str(model.name).lower()
else:
file_name = '%s_%s.html' % (self.template_name, str(model.name).lower())
to_path = os.path.join(path, file_name)
rendered = render_to_string('code/%s_html_template.html' % self.template_name,
{'model': model, 'app': self.app, 'mode': self.mode})
with open(to_path, 'w') as f:
f.write(rendered)
class AdminGenerator:
"""
Generadir de Admin Clases para una App
"""
def __init__(self, app_name):
"""
Constructor
:param app_name:
"""
self.app_name = app_name
def generate(self):
"""
Construye admin
:return:
"""
Template(self.app_name, 'admin').render()
print('<== Success Generated [Admins] for', self.app_name)
class CRUDGenerator:
"""
Generador de CRUD Clases para una App
"""
def __init__(self, app_name):
"""
Constructor
:param app_name:
"""
self.app_name = app_name
def generate(self):
"""
Construye admin
:return:
"""
Template(self.app_name, 'forms').render()
print('<== Success Generated FORMS for', self.app_name)
Template(self.app_name, 'urls').render()
print('<== Success Generated URLS for', self.app_name)
Template(self.app_name, 'views').render()
print('<== Success Generated VIEWS for', self.app_name)
print('<== Success Generated [all CRUD] for', self.app_name)
class CRUDHTMLGenerator:
"""
Generador de CRUD HTML para una App
"""
def __init__(self, app_name):
"""
Constructor
:param app_name:
"""
self.app_name = app_name
def generate(self):
"""
Construye admin
:return:
"""
list_mode = str(input('Choice a list template mode: [A] Table, [B] Article:'))
list_mode = list_mode.upper() if list_mode else None
list_mode = 'article' if list_mode and list_mode == 'B' else 'table'
Template(self.app_name, 'create', 'html').render()
print('<== Success Generated CREATE HTML for', self.app_name)
Template(self.app_name, 'update', 'html').render()
print('<== Success Generated UPDATE HTML for', self.app_name)
Template(self.app_name, 'list', 'html', list_mode).render()
print('<== Success Generated LIST HTML for', self.app_name)
Template(self.app_name, 'delete', 'html').render()
print('<== Success Generated DELETE HTML for', self.app_name)
print('<== Success Generated [all CRUD HTML] for', self.app_name)
class GeneratorEngine:
"""
Motor de Generacion
"""
def run(self, option, app_name=None):
"""
Generador de Codigo para una app y del tipo dado por option
:param option:
:param app_name:
:return:
"""
if option == 'admin':
if app_name:
AdminGenerator(app_name).generate()
else:
apps = get_apps()
for k,v in apps:
if DJANGO_APPS_PREFIX not in k:
yes_not = str(input('Do you want to generate code for app %s? '
'(This gonna replace your code if exists) [y/N]:' % k))
if yes_not == 'y':
AdminGenerator(k).generate()
elif option == 'crud':
if app_name:
CRUDGenerator(app_name).generate()
else:
apps = get_apps()
for k,v in apps:
if DJANGO_APPS_PREFIX not in k:
yes_not = str(input('Do you want to generate code for app %s? '
'(This gonna replace your code if exists: views, forms and urls) [y/N]:' % k))
if yes_not == 'y':
CRUDGenerator(k).generate()
elif option == 'crud_html':
if app_name:
CRUDHTMLGenerator(app_name).generate()
else:
apps = get_apps()
for k,v in apps:
if DJANGO_APPS_PREFIX not in k:
yes_not = str(input('Do you want to generate html for app %s? '
'(This gonna replace your code if exists: update, create, list, delete) [y/N]:' % k))
if yes_not == 'y':
CRUDHTMLGenerator(k).generate()
```
#### File: brainutils/templatetags/butils.py
```python
from django import template
from django.utils.safestring import mark_safe
from brainutils import messages, configuration
register = template.Library()
@register.simple_tag
def display_message(request, name):
"""
Display
Description
Tag para mostrar un mensaje previamente almacenado en BDD
y memoria, en pantalla formateado en HTML
:param request: Request Actual
:param name: Nombre del Mensaje
:return: String -- Mensaje HTML
"""
language = messages.languages.get_language(request)
return mark_safe( messages.get_message(name, language) )
@register.simple_tag
def display_configuration(name, default=None):
"""
Display
Description
Tag para mostrar una configuracion previamente almacenada en BDD
y memoria, en pantalla formateado en HTML
:param name: Nombre de la configuracion
:param default: Valor por defecto
:return: String -- Mensaje HTML
"""
return mark_safe( configuration.get_value(name, default) )
```
#### File: django-brain/brainutils/vmixins.py
```python
from django.utils.decorators import method_decorator
from django.contrib.auth.decorators import login_required
from django.urls import reverse
from django.http import HttpResponseRedirect
class LoginRequiredSecurityMixin:
"""
Clase base para validar que el usuario este logueado
"""
@method_decorator(login_required(login_url='/accounts/login/'))
def dispatch(self, request, *args, **kwargs):
"""
Default dispath
:param request:
:param args:
:param kwargs:
:return:
"""
return super().dispatch(request, *args, **kwargs)
class SpecialSecurityMixin:
"""
Clase base para revisar la seguridad de acceso a la vista que hereda
de esta clase con un control especial.
Se debe implementar en la clase que hereda:
security_test(profile): Devuelve True/False
get_fail_security_test_url(): Devuelve URL
"""
def security_test(self, profile):
"""
To Override
:param profile:
:return:
"""
return profile is not None
def get_fail_security_test_url(self, profile):
"""
To Override
:return:
"""
return reverse('home')
@method_decorator(login_required(login_url='/accounts/login/'))
def dispatch(self, request, *args, **kwargs):
"""
Valida si la condicion de seguridad no se cumple para redireccionar
a la URL de fallo de seguridad
:param request:
:param args:
:param kwargs:
:return:
"""
profile = self.request.user.customerprofile
if not self.security_test(profile):
return HttpResponseRedirect(redirect_to=self.get_fail_security_test_url(profile))
return super().dispatch(request, *args, **kwargs)
``` |
{
"source": "jimbunny/AdminSystem",
"score": 2
} |
#### File: backend/common/email.py
```python
from flask_mail import Message
import yagmail
from flask import current_app
def send_email(to, subject, template):
yagmail.SMTP(current_app.config['MAIL_USERNAME'], current_app.config['MAIL_PASSWORD']).send(to, subject, template)
# msg = Message(
# subject,
# recipients=[to],
# html=template,
# sender=current_app.config['MAIL_DEFAULT_SENDER']
# )
# mail.send(msg)
# wozhdpvhetjtbpjr
```
#### File: backend/common/token.py
```python
from itsdangerous import URLSafeTimedSerializer
from flask import current_app
def generate_confirmation_token(email):
serializer = URLSafeTimedSerializer(current_app.config['SECRET_KEY'])
return serializer.dumps(email, salt=current_app.config['SECURITY_PASSWORD_SALT'])
def confirm_token(token, expiration=3600):
serializer = URLSafeTimedSerializer(current_app.config['SECRET_KEY'])
try:
email = serializer.loads(
token,
salt=current_app.config['SECURITY_PASSWORD_SALT'],
max_age=expiration
)
except Exception as e:
return False
return email
```
#### File: AdminSystem/backend/manager.py
```python
from gevent import monkey; monkey.patch_all()
import logging.handlers
from abc import ABC
from flask_script import Manager
from flask_migrate import Migrate, MigrateCommand
from gunicorn.app.base import BaseApplication
from gunicorn.six import iteritems
from multiprocessing import cpu_count
from app import create_app, db
from config import config
import os
import logging
from logging.handlers import RotatingFileHandler
level_relations = {
'DEBUG':logging.DEBUG,
'INFO':logging.INFO,
'WARNING':logging.WARNING,
'ERROR':logging.ERROR,
'CRIT':logging.CRITICAL
}#日志级别关系映射
app = create_app(config)
manager = Manager(app)
migrate = Migrate(app, db)
manager.add_command('db', MigrateCommand)
log_dirs = app.config.get('LOG_DIR_PATH', 'logs')
if not os.path.exists(log_dirs):
os.makedirs(log_dirs)
formatter = logging.Formatter('%(asctime)s - %(filename)s[line:%(lineno)d] - %(levelname)s: %(message)s')
class StandaloneApplication(BaseApplication, ABC):
"""
gunicorn服务器启动类
"""
def __init__(self, application, options):
self.application = application
self.options = options or {}
super(StandaloneApplication, self).__init__()
def load_config(self):
config = dict([(key, value) for key, value in iteritems(self.options)
if key in self.cfg.settings and value is not None])
for key, value in iteritems(config):
self.cfg.set(key.lower(), value)
def load(self):
return self.application
@manager.command
def run():
"""
生产模式启动命令函数
To use: python3 manager.py run
"""
# app.logger.setLevel(app.config.get('LOG_LEVEL', logging.INFO))
# 日至等级的设置
logging.basicConfig(level=level_relations.get(app.config.get('LOG_LEVEL', 'INFO')))
# 创建日志记录器,指明日志保存路径,每个日志的大小,保存日志的上限
file_log_handler = RotatingFileHandler(os.path.join(log_dirs, 'prod.logs'),
maxBytes=app.config.get('LOG_FILE_MAX_BYTES', 1024 * 1024 * 100),
backupCount=app.config.get('LOG_FILE_BACKUP_COUNT', 10))
# 将日志记录器指定日志的格式
file_log_handler.setFormatter(formatter)
# 为全局的日志工具对象添加日志记录器
logging.getLogger().addHandler(file_log_handler)
service_config = {
'bind': app.config.get('BIND', '0.0.0.0:5555'),
'workers': app.config.get('WORKERS', cpu_count() * 2 + 1),
'worker_class': 'gevent',
'worker_connections': app.config.get('WORKER_CONNECTIONS', 10000),
'backlog': app.config.get('BACKLOG', 2048),
'timeout': app.config.get('TIMEOUT', 60),
'loglevel': app.config.get('LOG_LEVEL', 'info'),
'pidfile': app.config.get('PID_FILE', 'run.pid'),
}
StandaloneApplication(app, service_config).run()
@manager.command
def debug():
"""
debug模式启动命令函数
To use: python3 manager.py debug
"""
logging.basicConfig(level=level_relations.get(app.config.get('LOG_LEVEL', 'DEBUG')))
file_log_handler = RotatingFileHandler(os.path.join(log_dirs, 'debug.logs'),
maxBytes=app.config.get('LOG_FILE_MAX_BYTES', 1024 * 1024),
backupCount=app.config.get('LOG_FILE_BACKUP_COUNT', 1))
file_log_handler.setFormatter(formatter)
logging.getLogger().addHandler(file_log_handler)
app.run(debug=True, port=5555)
if __name__ == '__main__':
manager.run()
```
#### File: backend/models/carts.py
```python
from sqlalchemy import and_
from flask import current_app
from . import db
from .base import BaseModel
from sqlalchemy.exc import SQLAlchemyError
from werkzeug.security import generate_password_hash, check_password_hash
from sqlalchemy.sql import func
import time
class CartsModel(db.Model, BaseModel):
__tablename__ = 'carts'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(250), nullable=False)
no = db.Column(db.String(50), nullable=False)
updateTime = db.Column(db.Integer, default=int(time.time()))
def __init__(self, email, no):
self.no = no
self.email = email
def __str__(self):
return "Carts(id='%s')" % self.id
def paginate(self, page, per_page):
return self.query.paginate(page=page, per_page=per_page, error_out=False)
def get_id(self):
return db.session.query(func.max(self.id)).all()
def get(self, _id):
return self.query.filter_by(id=_id).first()
def add(self, product):
db.session.add(product)
return session_commit()
def update(self):
return session_commit()
def delete(self, no):
self.query.filter(self.no == no).delete(synchronize_session=False)
return session_commit()
def session_commit():
try:
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
reason = str(e)
current_app.logger.info(e)
return reason
```
#### File: backend/models/roles.py
```python
from flask import current_app
from . import db
from .base import BaseModel
from sqlalchemy.exc import SQLAlchemyError
from werkzeug.security import generate_password_hash, check_password_hash
import time
class RolesModel(db.Model, BaseModel):
__tablename__ = 'roles'
id = db.Column(db.Integer, primary_key=True)
# department = db.Column(db.String(250), unique=True, nullable=False)
# departmentID = db.Column(db.String(250), unique=True, nullable=False)
# postion = db.Column(db.String(250), unique=True, nullable=False)
# postionID = db.Column(db.String(250), unique=True, nullable=False)
description = db.Column(db.String(50), nullable=False)
permission = db.Column(db.String(50), default='test', nullable=False)
def __init__(self, description, permission):
# self.department = department
# self.departmentID = departmentID
# self.postion = postion
# self.postionID = postionID
self.description = description
self.permission = permission
def __str__(self):
return "Roles(id='%s')" % self.id
def paginate(self, page, per_page):
return self.query.paginate(page=page, per_page=per_page, error_out=False)
def filter_by_description(self, description):
return self.query.filter(self.description.like("%" + description + "%")).all()
def filter_by_permission(self, permission):
return self.query.filter(self.permission.like("%" + permission + "%")).all()
def get(self, _id):
return self.query.filter_by(id=_id).first()
def add(self, role):
db.session.add(role)
return session_commit()
def update(self):
return session_commit()
def delete(self, ids):
# self.query.filter_by(id=id).delete()
self.query.filter(self.id.in_(ids)).delete(synchronize_session=False)
return session_commit()
def session_commit():
try:
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
reason = str(e)
current_app.logger.info(e)
return reason
```
#### File: backend/resources/auths.py
```python
from datetime import datetime, timedelta
from flask_restful import Resource
from common.jwt_util import generate_jwt
import jwt, datetime, time
from flask import current_app, g
from models.users import UsersModel
from common import code, pretty_result
from datetime import datetime
class AuthorizationResource(Resource):
"""
登录认证
"""
@staticmethod
def _generate_tokens(user_id, with_refresh_token=True):
"""
生成token 和refresh_token
:param user_id: 用户id
:return: token, refresh_token
"""
# 颁发JWT
now = datetime.utcnow()
expiry = now + timedelta(hours=current_app.config['JWT_EXPIRY_HOURS'])# 短期token
# expiry = now + timedelta(seconds=10)
token = generate_jwt({'user_id': user_id, 'refresh': False}, expiry)
refresh_token = None
if with_refresh_token:
refresh_expiry = now + timedelta(days=current_app.config['JWT_REFRESH_DAYS']) # 长期token
# refresh_expiry = now + timedelta(seconds=20) # 长期token
refresh_token = generate_jwt({'user_id': user_id, 'refresh': True}, refresh_expiry)
return token, refresh_token
def post(self, username, password):
"""
用户登录创建token
"""
userInfo = UsersModel.query.filter_by(username=username).first()
if (userInfo is None):
return pretty_result(code.ERROR, data='', msg='找不到用户')
else:
if (UsersModel.check_password(UsersModel, userInfo.password, password)):
login_time = int(time.time())
userInfo.login_time = login_time
UsersModel.update(UsersModel)
user_id = userInfo.id
token, refresh_token = self._generate_tokens(user_id)
return pretty_result(code.OK, data={'access_token': token, 'refresh_token': refresh_token}, msg='登录成功')
else:
return pretty_result(code.ERROR, data='', msg='密码不正确')
# 补充put方式 更新token接口
def put(self):
"""
刷新token
"""
user_id = g.user_id
if user_id and g.is_refresh_token:
token, refresh_token = self._generate_tokens(user_id, with_refresh_token=False)
return pretty_result(code.OK, data={'access_token': token})
else:
return pretty_result(code.AUTHORIZATION_ERROR, data='', msg='Wrong refresh token.')
# 没用refresh重刷机制
class AuthResource(Resource):
@staticmethod
def encode_auth_token(user_id, login_time):
"""
生成认证Token
:param user_id: int
:param login_time: int(timestamp)
:return: string
"""
try:
payload = {
'exp': datetime.datetime.utcnow() + datetime.timedelta(days=0, seconds=10),
'iat': datetime.datetime.utcnow(),
'iss': 'jim',
'data': {
'id': user_id,
'login_time': login_time
}
}
return jwt.encode(
payload,
# config.SECRET_KEY,
current_app.config.get('SECRET_KEY', ''),
algorithm='HS256'
)
except Exception as e:
return e
@staticmethod
def decode_auth_token(auth_token):
"""
验证Token
:param auth_token:
:return: integer|string
"""
try:
# payload = jwt.decode(auth_token, app.config.get('SECRET_KEY'), leeway=datetime.timedelta(seconds=10))
# 取消过期时间验证
payload = jwt.decode(auth_token, current_app.config.get('SECRET_KEY', ''), options={'verify_exp': False})
if ('data' in payload and 'id' in payload['data']):
return payload
else:
raise jwt.InvalidTokenError
except jwt.ExpiredSignatureError:
return 'Token过期'
except jwt.InvalidTokenError:
return '无效Token'
def authenticate(self, username, password):
"""
用户登录,登录成功返回token,写将登录时间写入数据库;登录失败返回失败原因
:param password:
:return: json
"""
userInfo = UsersModel.query.filter_by(username=username).first()
if (userInfo is None):
return pretty_result(code.OK, data='', msg='找不到用户')
else:
if (UsersModel.check_password(UsersModel, userInfo.password, password)):
login_time = int(time.time())
userInfo.login_time = login_time
UsersModel.update(UsersModel)
token = self.encode_auth_token(userInfo.id, login_time)
return pretty_result(code.OK, data=token.decode(), msg='登录成功')
else:
return pretty_result(code.OK, data='', msg='密码不正确')
def identify(self, request):
"""
用户鉴权
:return: list
"""
data = ''
msg = ''
status = code.AUTHORIZATION_ERROR
auth_header = request.headers.get('Authorization')
if (auth_header):
auth_tokenArr = auth_header.split(" ")
if (not auth_tokenArr or auth_tokenArr[0] != 'JWT' or len(auth_tokenArr) != 2):
msg = '请传递正确的验证头信息'
else:
auth_token = auth_tokenArr[1]
payload = self.decode_auth_token(auth_token)
if not isinstance(payload, str):
user = UsersModel.get(UsersModel, payload['data']['id'])
if (user is None):
msg = '找不到该用户信息'
else:
if (user.login_time == payload['data']['login_time']):
status = code.OK
data = user.id
msg = '请求成功'
else:
msg = 'Token已更改,请重新登录获取'
else:
msg = payload
else:
msg = '没有提供认证token'
return pretty_result(status, data=data, msg=msg)
```
#### File: backend/resources/carts.py
```python
from flask import g
from flask_restful import Resource, inputs
from flask_restful.reqparse import RequestParser
from sqlalchemy.exc import SQLAlchemyError
from app import hash_ids
from models import db
from common import code, pretty_result
from models.carts import CartsModel
from models.products import ProductsModel
from common.decorators import login_required
import datetime
from config import config
class CartsResource(Resource):
"""
示例cart list资源类
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def get(self):
self.parser.add_argument("email", type=inputs.regex(r'(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)'),
required=True, location="args",
help='email format is incorrect')
args = self.parser.parse_args()
cartInfo = CartsModel.query.filter_by(email=args.email).all()
data=[]
for item in cartInfo:
if (datetime.datetime.now() - item.update_time).total_seconds() < config.countDown:
data.append(item)
return pretty_result(code.OK, data={"length": len(data)}, msg="get cart count successful!")
@login_required
def post(self):
self.parser.add_argument("email", type=inputs.regex(r'(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)'),
required=True, location="json",
help='email format is incorrect')
args = self.parser.parse_args()
cartInfo = CartsModel.query.filter_by(email=args.email).all()
data = []
for item in cartInfo:
countDown = (datetime.datetime.now() - item.update_time).total_seconds()
if countDown < config.countDown:
product = ProductsModel.query.filter_by(no=item.no).first()
url = ''
pictures = product.picture.split(",")
for i in pictures:
url = config.domain + "/api/v1/pictureManagement/get?type=product&id=" + i
break
data.append(
{
'id': product.id,
'no': product.no,
'name': product.name,
'description': product.description,
'position': product.position,
'picture': url,
'gender': product.gender,
'size': product.size,
'age': product.age,
'Pclass': product.Pclass,
'type': product.type,
'status': product.status,
'inPrice': product.inPrice,
'outPrice': product.outPrice,
'price': product.price,
'level': product.level,
'showTimes': product.showTimes,
'remark': product.remark,
'time': (config.countDown - countDown) * 1000,
'updateUser': product.updateUser,
'updateTime': product.update_time.strftime("%m/%d/%Y %H:%M:%S")
}
)
return pretty_result(code.OK, data=data, msg="get cart info successful!")
@login_required
def put(self):
self.parser.add_argument("cartItemIds", type=list, location="json", help='cartItemIds format is incorrect')
args = self.parser.parse_args()
if args.cartItemIds:
productInfo = ProductsModel.get_ids(ProductsModel, args.cartItemIds)
data = []
for product in productInfo:
url = ''
countDown = (datetime.datetime.now() - product.update_time).total_seconds()
pictures = product.picture.split(",")
for i in pictures:
url = config.domain + "/api/v1/pictureManagement/get?type=product&id=" + i
break
data.append(
{
'id': product.id,
'no': product.no,
'name': product.name,
'description': product.description,
'position': product.position,
'picture': url,
'gender': product.gender,
'size': product.size,
'age': product.age,
'Pclass': product.Pclass,
'type': product.type,
'status': product.status,
'inPrice': product.inPrice,
'outPrice': product.outPrice,
'price': product.price,
'level': product.level,
'showTimes': product.showTimes,
'remark': product.remark,
'time': (config.countDown - countDown) * 1000,
'updateUser': product.updateUser,
'updateTime': product.update_time.strftime("%m/%d/%Y %H:%M:%S")
}
)
return pretty_result(code.OK, data=data, msg="get create order info successful!")
else:
return pretty_result(code.OK, data=[], msg="get create order info successful!")
@login_required
def delete(self):
self.parser.add_argument("no", type=str, required=True, location="json", help='no is required')
args = self.parser.parse_args()
CartsModel.delete(CartsModel, args.no)
return pretty_result(code.OK, msg='购物车商品信息删除成功!')
```
#### File: backend/resources/orders.py
```python
from flask import g
from flask_restful import Resource, inputs
from flask_restful.reqparse import RequestParser
from sqlalchemy.exc import SQLAlchemyError
from app import hash_ids
from common.utils import get_order_code
from common import code, pretty_result
from models.orders import OrdersModel
from models.products import ProductsModel
from models.mallUsers import MallUsersModel
from common.decorators import login_required
import math
import time
from config import config
class OrdersResource(Resource):
"""
示例orders list资源类
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def get(self):
self.parser.add_argument("email", type=inputs.regex(r'(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)'),
required=True, location="args",
help='email format is incorrect')
self.parser.add_argument("status", type=str, required=True, location="args",
help='status format is incorrect')
self.parser.add_argument("pageNo", type=int, required=True, location="args",
help='pageNo format is incorrect')
self.parser.add_argument("pageSize", type=int, required=True, location="args",
help='pageSize format is incorrect')
args = self.parser.parse_args()
if args.status == "all":
orderInfo = OrdersModel.paginate(OrdersModel, args.pageNo, args.pageSize)
elif args.status == "noDelivery":
orderInfo = OrdersModel.paginate_by_status(OrdersModel,'noDelivery', args.pageNo, args.pageSize)
else:
orderInfo = OrdersModel.paginate_by_status(OrdersModel,'delivered', args.pageNo, args.pageSize)
data=[]
if orderInfo.items:
for item in orderInfo.items:
OrderItemVOS = []
productList = item.productList.split(',')
for i in productList:
productInfo = ProductsModel.query.filter_by(id=i).first()
OrderItemVOS.append(productInfo.to_dict())
data.append({
'no': item.no,
'totalPrice': item.totalPrice,
'username': item.username,
'phone': item.phone,
'payType': item.payType,
'status': item.status,
'productList': item.productList,
'addressId': item.addressId,
'createTime': item.createTime,
'OrderItemVOS': OrderItemVOS
})
totalCount = len(data)
result = {
'pageNo': args.pageNo,
'pageSize': args.pageSize,
'totalCount': totalCount,
'items': data,
'totalPage': math.ceil(totalCount/args.pageSize)
}
return pretty_result(code.OK, data=result, msg="get order info successful!")
@login_required
def post(self):
self.parser.add_argument("email", type=inputs.regex(r'(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)'),
required=True, location="json",
help='email format is incorrect')
self.parser.add_argument("totalPrice", type=int, required=True, location="json", help='totalPrice format is incorrect')
self.parser.add_argument("username", type=str, required=True, location="json",
help='username format is incorrect')
self.parser.add_argument("phone", type=str, required=True, location="json",
help='phone format is incorrect')
self.parser.add_argument("payType", type=str, required=True, location="json",
help='payType format is incorrect')
self.parser.add_argument("productList", type=list, required=True, location="json",
help='productList format is incorrect')
self.parser.add_argument("addressId", type=str, required=True, location="json",
help='addressId format is incorrect')
args = self.parser.parse_args()
mallUsersInfo = MallUsersModel.query.filter_by(email=args.email).first()
if mallUsersInfo.balance < args.totalPrice:
return pretty_result(code.ERROR, msg="create order failed, balance ont enough!")
else:
for i in args.productList:
proudctInfo = ProductsModel.query.filter_by(id=i).first()
proudctInfo.status = "solded"
ProductsModel.update(proudctInfo)
mallUsersInfo.balance = mallUsersInfo.balance - args.totalPrice
MallUsersModel.update(mallUsersInfo)
order = OrdersModel(email=args.email,no=get_order_code(), username=args.username, totalPrice=args.totalPrice, phone=args.phone.replace("-", ""),
couponPrice=0, payType=args.payType, status="noDelivery", productList=','.join([str(x) for x in args.productList]),
addressId=args.addressId)
OrdersModel.add(OrdersModel, order)
return pretty_result(code.OK, msg="create order info successful!")
@login_required
def put(self):
self.parser.add_argument("no", type=inputs.regex(r'(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)'),
required=True, location="json",
help='no format is incorrect')
self.parser.add_argument("status", type=str, required=True, location="json",
help='status format is incorrect')
args = self.parser.parse_args()
orderInfo = OrdersModel.query.filter_by(no=args.no).first()
orderInfo.status = args.status
OrdersModel.update(orderInfo)
return pretty_result(code.OK, msg="update order status info successful!")
class OrdersByNoResource(Resource):
"""
示例orders list资源类
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def get(self):
self.parser.add_argument("no", type=str, required=True, location="args",
help='no format is incorrect')
args = self.parser.parse_args()
orderInfo = OrdersModel.query.filter_by(no=args.no).first()
OrderItemVOS = []
productList = orderInfo.productList.split(',')
for i in productList:
productInfo = ProductsModel.query.filter_by(id=i).first()
OrderItemVOS.append(productInfo.to_dict())
data ={
'no': orderInfo.no,
'totalPrice': orderInfo.totalPrice,
'phone': orderInfo.phone,
'payType': orderInfo.payType,
'status': orderInfo.status,
'productList': orderInfo.productList,
'addressId': orderInfo.addressId,
"createTime": orderInfo.createTime,
'OrderItemVOS': OrderItemVOS
}
return pretty_result(code.OK, data=data, msg="get order info successful!")
```
#### File: backend/resources/packageManagement.py
```python
from flask import g
from models.packages import PackageModel
from models.carts import CartsModel
from flask_restful import Resource, inputs
from models.mallUsers import MallUsersModel
from flask_restful.reqparse import RequestParser
from common import code, pretty_result
from common.decorators import login_required
from models.logs import LogsModel
from werkzeug.datastructures import FileStorage
from config import config
from models.products import ProductsModel
import datetime
import os
filePath = r'./download/package/'
if not os.path.exists(filePath):
os.makedirs(filePath)
class PackageManagementResource(Resource):
"""
package management资源类
"""
def __init__(self):
self.parser = RequestParser()
# @login_required
def get(self):
"""
获取套餐管理列表信息
:return: json
"""
self.parser.add_argument("pageNo", type=int, required=True, location="args",
help='pageNo is required')
self.parser.add_argument("pageSize", type=int, required=True, location="args", help='pageSize is required')
self.parser.add_argument("name", type=str, required=True, location="args", help='name is required')
self.parser.add_argument("Pclass[]", type=str, location="args", action='append', help='Pclass is required')
self.parser.add_argument("age", type=str, required=True, location="args", help='age is required')
self.parser.add_argument("size", type=str, required=True, location="args", help='size is required')
self.parser.add_argument("gender", type=str, required=True, location="args", help='gender is required')
args = self.parser.parse_args()
# package_list = PackageModel.paginate(PackageModel, args.pageNo, args.pageSize)
items = []
# totalCount = package_list.total
# package_list = package_list.items
# if args.name and args.Pclass:
Pclass = args['Pclass[]']
if(not args['Pclass[]']):
Pclass = ['coat', 'pants', 'skirt']
package_list = PackageModel.filter_by_name_gender_size_age_Pclass(PackageModel, args.name, args.gender, args.size, args.age, Pclass)
# package_list = PackageModel.filter_by_name_Pclass(PackageModel, args.name, args.Pclass)
totalCount = len(package_list)
for package in package_list:
fileList = []
fileList2 = []
url = config.domain +"/api/v1/pictureManagement/get?type=package&id=" + package.picture
fileList.append({"name": package.picture, "url": url})
description = package.description.split(",")
for i in description:
url2 = config.domain + "/api/v1/pictureManagement/get?type=package&id=" + i
fileList2.append({"name": i, "url": url2})
items.append(
{
'id': package.id,
'name': package.name,
'no': package.no,
'type': package.type,
'gender': package.gender,
'size': package.size,
'age': package.age,
'Pclass': package.Pclass,
'count': package.count,
# 'price': package.price,
# 'total': package.total,
'point': package.point,
'description': fileList2,
'picture': fileList,
'remark': package.remark,
'updateUser': package.updateUser,
'updateTime': package.update_time.strftime("%m/%d/%Y %H:%M:%S")
}
)
data = {
'pageNo': args.pageNo,
'pageSize': args.pageSize,
'totalCount': totalCount,
'items': items
}
return pretty_result(code.OK, data=data, msg='套餐配置信息获取成功!')
@login_required
def post(self):
self.parser.add_argument("name", type=str, required=True, location="form", help='name is required')
self.parser.add_argument("no", type=str, required=True, location="form", help='no is required')
self.parser.add_argument("type", type=str, required=True, location="form", help='type is required')
self.parser.add_argument("gender", type=str, required=True, location="form", help='gender is required')
self.parser.add_argument("size", type=str, required=True, location="form", help='size is required')
self.parser.add_argument("age", type=str, required=True, location="form", help='age is required')
self.parser.add_argument("Pclass", type=str, required=True, location="form", help='Pclass is required')
self.parser.add_argument("count", type=str, required=True, location="form", help='count is required')
# self.parser.add_argument("price", type=int, required=True, location="form", help='price is required')
# self.parser.add_argument("total", type=int, required=True, location="form", help='total is required')
self.parser.add_argument("point", type=int, required=True, location="form", help='point is required')
self.parser.add_argument("picture", type=FileStorage, required=True, location='files', action='append',
help='picture is required')
self.parser.add_argument("description", type=FileStorage, required=True, location='files', action='append',
help='description is required')
self.parser.add_argument("remark", type=str, location="form", help='remark is required')
self.parser.add_argument("updateUser", type=str, required=True, location="form", help='updateUser is required')
self.parser.add_argument("removeList", type=str, required=True, location="form", help='removelist is required')
self.parser.add_argument("removeList2", type=str, required=True, location="form", help='removelist2 is required')
args = self.parser.parse_args()
packageInfo = PackageModel.query.filter_by(name=args.name).all()
if packageInfo:
return pretty_result(code.ERROR, msg='该套餐名称管理已经被添加!')
removeList = args.removeList.split(",")
pictureList = ''
for item in args.picture:
if item.filename in removeList:
continue
new_fname = filePath + str(item.filename) + '.png'
item.save(new_fname)
pictureList = pictureList + str(item.filename) + ","
pictureList = pictureList[:-1]
removeList2 = args.removeList2.split(",")
pictureList2 = ''
for item in args.description:
if item.filename in removeList2:
continue
new_fname = filePath + str(item.filename) + '.png'
item.save(new_fname)
pictureList2 = pictureList2 + str(item.filename) + ","
pictureList2 = pictureList2[:-1]
Package = PackageModel(name=args.name, no=args.no, gender=args.gender, size=args.size, age=args.age,
Pclass=args.Pclass, count=args.count, type=args.type, description=pictureList2,
point=args.point, picture=pictureList, remark=args.remark, updateUser=args.updateUser)
PackageModel.add(PackageModel, Package)
if Package.id:
content = str({"name": args.name, "gender": args.gender,"no":args.no, "size": args.size, "age": args.age,
"Pclass": args.Pclass, "count":args.count, "type": args.type, "description": pictureList2,
"remark": args.remark, "point": args.point, "picture":pictureList,"updateUser": args.updateUser})
log = LogsModel(username=args.updateUser, model="package", action="add", content=content)
LogsModel.add(LogsModel, log)
return pretty_result(code.OK, msg='套餐管理信息添加成功!')
else:
return pretty_result(code.ERROR, msg='套餐管理信息添加失败!')
@login_required
def put(self):
self.parser.add_argument("id", type=int, required=True, location="form", help='id is required')
self.parser.add_argument("no", type=str, required=True, location="form", help='no is required')
self.parser.add_argument("name", type=str, required=True, location="form", help='name is required')
self.parser.add_argument("type", type=str, required=True, location="form", help='type is required')
self.parser.add_argument("gender", type=str, required=True, location="form", help='gender is required')
self.parser.add_argument("size", type=str, required=True, location="form", help='size is required')
self.parser.add_argument("age", type=str, required=True, location="form", help='age is required')
self.parser.add_argument("Pclass", type=str, required=True, location="form", help='Pclass is required')
self.parser.add_argument("count", type=str, required=True, location="form", help='count is required')
# self.parser.add_argument("price", type=int, required=True, location="form", help='price is required')
# self.parser.add_argument("total", type=int, required=True, location="form", help='total is required')
self.parser.add_argument("picture", type=FileStorage, location='files', action='append',
help='picture is file')
self.parser.add_argument("description", type=FileStorage, location='files', action='append',
help='description is file')
self.parser.add_argument("point", type=int, required=True, location="form", help='point is required')
self.parser.add_argument("remark", type=str, location="form", help='remark is required')
self.parser.add_argument("updateUser", type=str, required=True, location="form", help='updateUser is required')
self.parser.add_argument("removeList", type=str, required=True, location="form", help='removelist is required')
self.parser.add_argument("removeList2", type=str, required=True, location="form", help='removelist2 is required')
args = self.parser.parse_args()
packageInfo = PackageModel.query.filter_by(name=args.name).all()
for item in packageInfo:
if item.id != args.id:
return pretty_result(code.ERROR, msg='该套餐管理已经被添加!')
packageInfo = PackageModel.query.filter_by(id=args.id).first()
packagePictureList = packageInfo.picture.split(",")
removeList = args.removeList.split(",")
pictureList = ''
for j in removeList:
if j in packagePictureList:
packagePictureList.remove(j)
old_fname = filePath + str(j) + '.png'
if os.path.exists(old_fname):
os.remove(old_fname)
else:
print(str(j) + " the file does not exist")
if args.picture:
for item in args.picture:
if item.filename in removeList:
continue
new_fname = filePath + str(item.filename) + '.png'
item.save(new_fname)
packagePictureList.append(str(item.filename))
pictureList = ','.join(packagePictureList)
packagePictureList2 = packageInfo.description.split(",")
removeList2 = args.removeList2.split(",")
pictureList2 = ''
for j in removeList2:
if j in packagePictureList2:
packagePictureList2.remove(j)
old_fname = filePath + str(j) + '.png'
if os.path.exists(old_fname):
os.remove(old_fname)
else:
print(str(j) + " the file does not exist")
if args.description:
for item in args.description:
if item.filename in removeList2:
continue
new_fname = filePath + str(item.filename) + '.png'
item.save(new_fname)
packagePictureList2.append(str(item.filename))
pictureList2 = ','.join(packagePictureList2)
packageInfo.id = args.id
packageInfo.no = args.no
packageInfo.name = args.name
packageInfo.type = args.type
packageInfo.gender = args.gender
packageInfo.size = args.size
packageInfo.age = args.age
packageInfo.Pclass = args.Pclass
# packageInfo.price = args.price
# packageInfo.total = args.total
packageInfo.point = args.point
packageInfo.picture = pictureList
packageInfo.description = pictureList2
packageInfo.remark = args.remark
packageInfo.updateUser = args.updateUser
PackageModel.update(packageInfo)
content = str({"name": args.name, "no":args.no, "type": args.type, "gender": args.gender,
"size": args.size, "age": args.age, "Pclass": args.Pclass,
"point": args.point,"picture": pictureList, "description": pictureList2, "remark": args.remark,
"updateUser": args.updateUser})
log = LogsModel(username=args.updateUser, model="package", action="edit", content=content)
LogsModel.add(LogsModel, log)
return pretty_result(code.OK, msg='套餐管理信息更新成功!')
@login_required
def delete(self):
self.parser.add_argument("ids", type=list, required=True, location="json", help='ids is required')
self.parser.add_argument("updateUser", type=str, required=True, location="json", help='updateUser is required')
self.parser.add_argument("content", type=list, required=True, location="json", help='content is required')
args = self.parser.parse_args()
PackageModel.delete(PackageModel, args.ids)
for item in args.ids:
packageInfo = PackageModel.query.filter_by(id=item).first()
packagePictureList = packageInfo.picture.split(",")
for j in packagePictureList:
old_fname = filePath + str(j) + '.png'
if os.path.exists(old_fname):
os.remove(old_fname)
else:
print(str(j) + " the file does not exist")
packagePictureList2 = packageInfo.description.split(",")
for j in packagePictureList2:
old_fname = filePath + str(j) + '.png'
if os.path.exists(old_fname):
os.remove(old_fname)
else:
print(str(j) + " the file does not exist")
content = str(args.content)
if len(str(args.content)) > 500:
content = str(args.ids)
log = LogsModel(username=args.updateUser, model="package", action="delete", content=content)
LogsModel.add(LogsModel, log)
return pretty_result(code.OK, msg='套餐管理信息删除成功!')
class PackageManagementNameResource(Resource):
"""
package name资源类
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def get(self):
"""
名字是否存在判断
:return: json
"""
self.parser.add_argument("name", type=str, required=True, location="args",
help='name is required')
args = self.parser.parse_args()
packageInfo = PackageModel.query.filter_by(name=args.name).all()
if packageInfo:
return pretty_result(code.OK, data=False, msg='该套餐管理名称已经被添加!')
return pretty_result(code.OK, data=True, msg='该套餐管理名称不存在!')
class PackageDetailResource(Resource):
"""
package detail资源类
"""
def __init__(self):
self.parser = RequestParser()
def get(self):
"""
套餐细节信息
:return: json
"""
self.parser.add_argument("no", type=str, required=True, location="args",
help='no is required')
args = self.parser.parse_args()
package_list = PackageModel.query.filter_by(no=args.no).all()
items = []
for package in package_list:
fileList = []
fileList2 = []
url = config.domain + "/api/v1/pictureManagement/get?type=package&id=" + package.picture
fileList.append({"name": package.picture, "url": url})
description = package.description.split(",")
for i in description:
url2 = config.domain + "/api/v1/pictureManagement/get?type=package&id=" + i
fileList2.append({"name": i, "url": url2})
items.append(
{
'id': package.id,
'no': package.no,
'name': package.name,
'type': package.type,
'gender': package.gender,
'size': package.size,
'age': package.age,
'Pclass': package.Pclass,
'count': package.count,
# 'price': package.price,
# 'total': package.total,
'point': package.point,
'picture': fileList,
'description': fileList2,
'remark': package.remark,
'updateUser': package.updateUser,
'updateTime': package.update_time.strftime("%m/%d/%Y %H:%M:%S")
}
)
if items:
return pretty_result(code.OK, data=items, msg='Get package detail successful!')
else:
return pretty_result(code.ERROR, data=[], msg='The package id is exit!')
class PackageIDResource(Resource):
"""
packages management资源类
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def get(self):
"""
获取包裹列表信息
:return: json
"""
self.parser.add_argument("type", type=str, required=True, location="args", help='ids is required')
args = self.parser.parse_args()
id = PackageModel.get_id(PackageModel)
no = ""
if not id[0][0]:
no = args.type + "%04d" % 1
else:
no = args.type + "%04d" % (id[0][0] + 1)
return pretty_result(code.OK, data=no, msg='获取套餐No信息成功!')
class PackageAwardResource(Resource):
"""
package detail资源类
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def post(self):
"""
抽奖信息
:return: json
"""
self.parser.add_argument("gender", type=str, required=True, location="json",
help='gender is required')
self.parser.add_argument("size", type=str, required=True, location="json",
help='size is required')
self.parser.add_argument("age", type=str, required=True, location="json",
help='age is required')
self.parser.add_argument("Pclass", type=str, required=True, location="json",
help='Pclass is required')
self.parser.add_argument("count", type=int, required=True, location="json",
help='count is required')
self.parser.add_argument("point", type=int, required=True, location="json",
help='point is required')
self.parser.add_argument("email", type=inputs.regex(r'(^[a-zA-Z0-9_.+-]+@[a-zA-Z0-9-]+\.[a-zA-Z0-9-.]+$)'),
required=True, location="json",
help='email format is incorrect')
args = self.parser.parse_args()
products = ProductsModel.filter_by_gender_size_age_Pclass(ProductsModel, args.gender, args.size, args.age, args.Pclass)
i = 0
showData = []
data=[]
for item in products:
print(item)
if i >= args.count:
break
if item.status == "unsold":
i = i + 1
showData.append(item)
if i < args.count:
for item in products:
if i >= args.count:
break
if item.status == "online":
if (datetime.datetime.now() - item.update_time).total_seconds() > config.countDown:
i = i + 1
showData.append(item)
if i < args.count:
return pretty_result(code.ERROR, data=[], msg='Get award info failed, Product not enough!')
else:
for j in showData:
# 更新次数和状态
productsInfo = ProductsModel.query.filter_by(id=j.id).first()
productsInfo.status = 'online'
productsInfo.showTimes = productsInfo.showTimes + 1
ProductsModel.update(productsInfo)
url = ''
pictures = productsInfo.picture.split(",")
for i in pictures:
url = config.domain + "/api/v1/pictureManagement/get?type=product&id=" + i
break
data.append(
{
'id': productsInfo.id,
'no': productsInfo.no,
'position': productsInfo.position,
'picture': url,
'gender': productsInfo.gender,
'size': productsInfo.size,
'age': productsInfo.age,
'Pclass': productsInfo.Pclass,
'type': productsInfo.type,
'status': productsInfo.status,
'inPrice': productsInfo.inPrice,
'outPrice': productsInfo.outPrice,
'price': productsInfo.price,
'level': productsInfo.level,
'showTimes': productsInfo.showTimes,
'remark': productsInfo.remark,
'updateUser': productsInfo.updateUser,
'updateTime': productsInfo.update_time.strftime("%m/%d/%Y %H:%M:%S")
}
)
Cart = CartsModel(email=args.email, no=productsInfo.no)
CartsModel.add(CartsModel, Cart)
mallUserInfo = MallUsersModel.query.filter_by(email=args.email).first()
mallUserInfo.point = mallUserInfo.point - args.point
MallUsersModel.update(mallUserInfo)
return pretty_result(code.OK, data=data, msg='Get award info successful!')
```
#### File: backend/tests/testlogger.py
```python
import logging
import unittest
class lgtest(unittest.TestCase):
logging.basicConfig(filename='../LOG/'+__name__+'.logs',format='[%(asctime)s-%(filename)s-%(levelname)s:%(message)s]', level = logging.DEBUG,filemode='a',datefmt='%Y-%m-%d%I:%M:%S %p')
def test(self):
logging.error("这是一条error信息的打印")
logging.info("这是一条info信息的打印")
logging.warning("这是一条warn信息的打印")
logging.debug("这是一条debug信息的打印")
if __name__=='__main__':
unittest.main()
``` |
{
"source": "jimbunny/AIVideo",
"score": 2
} |
#### File: AIVideo/backend/app.py
```python
from flask import Flask, jsonify
from flask import abort
import sys
sys.path.append('./jieba/')
import jieba
# from jieba.test.extract_tags_with_weight import get_tag_with_weight
app = Flask(__name__)
@app.route('/')
def index():
return "Hello, Flask!"
@app.route('/api/TFIDF/<string:content>', methods=['GET'])
def get_task(content, topK=3):
# tags = get_tag_with_weight(content, withWeight=True, topK=topK)
tags = jieba.analyse.extract_tags(content, topK=topK, withWeight=True)
if len(content) == 0:
abort(404)
return jsonify({'data': tags, 'code': 1, 'msg': 'OK'})
if __name__ == '__main__':
app.run(debug=True)
``` |
{
"source": "jimbunny/cartoonVideo",
"score": 2
} |
#### File: cartoonVideo/combineVideo/combine_bk.py
```python
import tensorflow as tf
import cv2
# Deeplab Demo
import os
import tarfile
from matplotlib import gridspec
import matplotlib.pyplot as plt
import numpy as np
from PIL import Image
import tempfile
from six.moves import urllib
import tensorflow as tf
class DeepLabModel(object):
"""
加载 DeepLab 模型;
推断 Inference.
"""
INPUT_TENSOR_NAME = 'ImageTensor:0'
OUTPUT_TENSOR_NAME = 'SemanticPredictions:0'
INPUT_SIZE = 513
FROZEN_GRAPH_NAME = 'frozen_inference_graph'
def __init__(self, tarball_path):
"""
Creates and loads pretrained deeplab model.
"""
self.graph = tf.Graph()
graph_def = None
# Extract frozen graph from tar archive.
tar_file = tarfile.open(tarball_path)
for tar_info in tar_file.getmembers():
if self.FROZEN_GRAPH_NAME in os.path.basename(tar_info.name):
file_handle = tar_file.extractfile(tar_info)
graph_def = tf.GraphDef.FromString(file_handle.read())
break
tar_file.close()
if graph_def is None:
raise RuntimeError('Cannot find inference graph in tar archive.')
with self.graph.as_default():
tf.import_graph_def(graph_def, name='')
self.sess = tf.Session(graph=self.graph)
def run(self, image):
"""
Runs inference on a single image.
Args:
image: A PIL.Image object, raw input image.
Returns:
resized_image: RGB image resized from original input image.
seg_map: Segmentation map of `resized_image`.
"""
width, height = image.size
# resize_ratio = 1.0 * self.INPUT_SIZE / max(width, height)
resize_ratio = 1
target_size = (int(resize_ratio * width), int(resize_ratio * height))
resized_image = image.convert('RGB').resize(target_size, Image.ANTIALIAS)
batch_seg_map = self.sess.run(
self.OUTPUT_TENSOR_NAME,
feed_dict={self.INPUT_TENSOR_NAME: [np.asarray(resized_image)]})
seg_map = batch_seg_map[0]
return resized_image, seg_map
def run2(self, image):
width = image.shape[1]
height = image.shape[0]
resize_ratio = 1.0 * self.INPUT_SIZE / max(width, height)
target_size = (int(resize_ratio * width), int(resize_ratio * height))
b, g, r = cv2.split(image)
img_rgb = cv2.merge([r, g, b])
resized_image = cv2.resize(img_rgb, target_size, interpolation=cv2.INTER_CUBIC)
batch_seg_map = self.sess.run(self.OUTPUT_TENSOR_NAME,
feed_dict={self.INPUT_TENSOR_NAME: [np.asarray(resized_image)]})
seg_map = batch_seg_map[0]
return resized_image, seg_map
def create_pascal_label_colormap():
colormap = np.zeros((256, 3), dtype=int)
ind = np.arange(256, dtype=int)
for shift in reversed(range(8)):
for channel in range(3):
colormap[:, channel] |= ((ind >> channel) & 1) << shift
ind >>= 3
return colormap
def label_to_color_image(label):
if label.ndim != 2:
raise ValueError('Expect 2-D input label')
colormap = create_pascal_label_colormap()
if np.max(label) >= len(colormap):
raise ValueError('label value too large.')
return colormap[label]
def load_model():
model_path = './deeplabv3_mnv2_pascal_train_aug_2018_01_29.tar.gz'#'deeplab_model.tar.gz'
MODEL = DeepLabModel(model_path)
print('model loaded successfully!')
return MODEL
model = load_model()
def combine_backend(pic, bk, to_dir, i):
src = cv2.imread(pic)
# Read image with Image
src_view = cv2.imread(bk)
resized_im, seg_map = model.run2(src)
resized_view = cv2.resize(src_view,(resized_im.shape[1],resized_im.shape[0]))
resized_view = cv2.medianBlur(resized_view,11)
# seg_image = label_to_color_image(seg_map).astype(np.uint8)
# seg_map = cv2.GaussianBlur(np.uint8(seg_map),(11,11),0)
src_resized = cv2.resize(src,(resized_im.shape[1],resized_im.shape[0]))
# seg_image = cv2.GaussianBlur(seg_image,(11,11),0)
bg_img = np.zeros_like(src_resized)
# 复制背景
bg_img[seg_map == 0] = src_resized[seg_map == 0]
blured_bg = cv2.GaussianBlur(bg_img,(11,11),0)
result = np.zeros_like(bg_img)
# 合成
result[seg_map > 0] = resized_im[seg_map > 0]
result[seg_map == 0] = blured_bg[seg_map == 0]
# 背景变换与合成
result_2 = np.zeros_like(bg_img)
result_2[seg_map > 0] = src_resized[seg_map > 0]
result_2[seg_map == 0] = resized_view[seg_map == 0]
# cv2.imwrite('D:\\pythonpractice\\bkkkkkkkkkkkkkk.jpg', result_2)
cv2.imwrite(to_dir + str(i) + '.jpg', result_2)
# cv2.imshow('src',src)
# cv2.imshow('resized_im',resized_im)
# cv2.imshow("seg_image",seg_image)
# cv2.imshow('bg_image',bg_img)
# cv2.imshow('blured_bg',blured_bg)
# cv2.imshow('result',result)
# cv2.imshow('result_2', result_2)
#
# cv2.waitKey()
# cv2.destroyAllWindows()
def combine_bk(bk, img_dir, to_dir):
print("开始照片换背景!")
all_images = [os.path.join(img_dir, i) for i in os.listdir(img_dir)]
a = 0
for i in all_images:
print("正在换背景第" + str(a) + '张')
a += 1
combine_backend(i, bk, to_dir, a)
print("照片换背景完成!")
if __name__ == '__main__':
file1 = 'D:\\pythonpractice\\from\\1.jpg'
file2 = 'D:\\pythonpractice\\from\\1.jpg'
```
#### File: cartoonVideo/combineVideo/combine_video.py
```python
import cv2
import os
from os.path import isfile, join
def convert_frames_to_video(pathIn, pathOut, fps):
frame_array = []
files = [f for f in os.listdir(pathIn) if isfile(join(pathIn, f))]
# for sorting the file names properly
files.sort(key=lambda x: int(x[:-11]))
for i in range(len(files)):
filename = pathIn + files[i]
# reading each files
img = cv2.imread(filename)
height, width, layers = img.shape
size = (width, height)
# inserting the frames into an image array
frame_array.append(img)
out = cv2.VideoWriter(pathOut, cv2.VideoWriter_fourcc(*'DIVX'), fps, size)
for i in range(len(frame_array)):
# writing to a image array
out.write(frame_array[i])
out.release()
def combine_video(pathIn, pathOut, fps):
print("开始合成视频!")
convert_frames_to_video(pathIn, pathOut, fps)
print('视频已经合成完')
if __name__ == '__main__':
pathIn = 'D:\\pythonpractice\\out_test\\'
pathOut = 'D:\\pythonpractice\\video.avi'
fps = 60.0
```
#### File: cartoonVideo/combineVideo/read_video_info.py
```python
import cv2
def read_video_info(file):
cap = cv2.VideoCapture(file)
# 获取视频分辨率
size = (int(cap.get(cv2.CAP_PROP_FRAME_WIDTH)), int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT)))
# 输出文件编码,Linux下可选X264
fourcc = cv2.VideoWriter_fourcc(*'MJPG')
# 视频帧率
fps = cap.get(cv2.CAP_PROP_FPS)
print("视频size:" + str(size))
print("视频编码:" + str(fourcc))
print("视频的FPS:" + str(fps))
return size, fourcc, fps
``` |
{
"source": "jimbunny/LuckyBlindBox",
"score": 2
} |
#### File: api/models/base.py
```python
from sqlalchemy import inspect, orm
from datetime import datetime
from . import db
class BaseModel(db.Model):
"""
data base class
"""
__abstract__ = True
# status = Column(SmallInteger, default=1)
id = db.Column(db.Integer, primary_key=True, autoincrement=True)
is_delete = db.Column(db.BOOLEAN, default=False)
create_time = db.Column(db.DATETIME(6), default=datetime.now)
update_time = db.Column(db.DATETIME(6), default=datetime.now, onupdate=datetime.now)
def __init__(self):
# self.create_time = int(datetime.now().timestamp())
pass
def __getitem__(self, item):
return getattr(self, item)
@property
def create_datetime(self):
if self.create_time:
return datetime.fromtimestamp(self.create_time)
else:
return None
def set_attrs(self, attrs_dict):
for key, value in attrs_dict.items():
if hasattr(self, key) and key != 'id':
setattr(self, key, value)
def delete(self):
self.is_delete = True
def keys(self):
return self.fields
def hide(self, *keys):
for key in keys:
self.fields.remove(key)
return self
def append(self, *keys):
for key in keys:
self.fields.append(key)
return self
class MixinJSONSerializer:
@orm.reconstructor
def init_on_load(self):
self._fields = []
# self._include = []
self._exclude = []
self._set_fields()
self.__prune_fields()
def _set_fields(self):
pass
def __prune_fields(self):
columns = inspect(self.__class__).columns
if not self._fields:
all_columns = set(columns.keys())
self._fields = list(all_columns - set(self._exclude))
def hide(self, *args):
for key in args:
self._fields.remove(key)
return self
def keys(self):
return self._fields
def __getitem__(self, key):
return getattr(self, key)
```
#### File: resources/admin/roles.py
```python
from models.adminRoles import AdminRolesModel
from flask_restful import Resource
from flask_restful.reqparse import RequestParser
from common import code, pretty_result
from common.decorators import login_required
from app import app
import json, os
router_file_path = os.path.join(os.path.dirname(app.instance_path), "config", 'router')
class RoleResource(Resource):
"""
roles resource class
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def get(self):
"""
get permission info list
:return: json
"""
self.parser.add_argument("pageNo", type=int, required=True, location="args",
help='pageNo is required')
self.parser.add_argument("pageSize", type=int, required=True, location="args", help='pageSize is required')
self.parser.add_argument("description", type=str, location="args", help='description is required')
args = self.parser.parse_args()
role_list = AdminRolesModel.paginate(AdminRolesModel, args.pageNo, args.pageSize, description=args.description,
not_permission="SuperAdmin")
data = {
'pageNo': args.pageNo,
'pageSize': args.pageSize,
'totalCount': role_list.total,
'items': role_list.items
}
return pretty_result(code.OK, data=data, msg='get permission info successful!')
@login_required
def post(self):
"""
create permission info
:return: json
"""
self.parser.add_argument("permission", type=str, required=True, location="json", help='permission is required')
self.parser.add_argument("description", type=str, required=True, location="json", help='description is required')
args = self.parser.parse_args()
rolePermissionInfo = AdminRolesModel.is_permission(AdminRolesModel,permission=args.permission)
if rolePermissionInfo:
return pretty_result(code.ERROR, msg='the permission code is exit!')
role = AdminRolesModel()
role.description = args.description
role.permission = args.permission
AdminRolesModel.add(AdminRolesModel, role)
if role.id:
return pretty_result(code.OK, msg='add permission code successful!')
else:
return pretty_result(code.ERROR, data='', msg='add permission code failed!')
@login_required
def put(self):
self.parser.add_argument("id", type=int, required=True, location="json", help='id is required')
self.parser.add_argument("permission", type=str, required=True, location="json", help='permission is required')
self.parser.add_argument("description", type=str, required=True, location="json", help='description is required')
args = self.parser.parse_args()
roleInfo = AdminRolesModel.get(AdminRolesModel, _id=args.id)
roleInfo.description = args.description
roleInfo.permission = args.permission
AdminRolesModel.update(roleInfo)
return pretty_result(code.OK, msg='update permission code successful!')
def delete_menu_permission(self, routeLIst, permission):
if len(routeLIst):
for item in routeLIst:
if permission in item.get("meta").get("permissions"):
item.get("meta").get("permissions").remove(permission)
if item.get("children"):
self.delete_menu_permission(item.get("children"), permission)
return routeLIst
@login_required
def delete(self):
self.parser.add_argument("ids", type=list, required=True, location="json", help='ids is required')
args = self.parser.parse_args()
filePath = os.path.join(router_file_path, 'router.json')
with open(filePath, 'r', encoding='utf-8') as load_f:
load_dict = json.load(load_f)
menuList = []
for _id in args.ids:
roleInfo = AdminRolesModel.get(AdminRolesModel, _id=_id)
menuList = self.delete_menu_permission(load_dict["data"], roleInfo.permission)
data = {
"code": 200,
"msg": "success",
"data": menuList
}
with open(filePath, 'w', encoding='utf-8') as file:
file.write(json.dumps(data))
AdminRolesModel.delete(AdminRolesModel, args.ids)
return pretty_result(code.OK, msg='delete permission code successful!')
class PermissionResource(Resource):
"""
permissions resource class
"""
def __init__(self):
self.parser = RequestParser()
@login_required
def post(self):
"""
check permission code is exit
:return: json
"""
self.parser.add_argument("permission", type=str, required=True, location="json", help='permission is required')
args = self.parser.parse_args()
rolePermissionInfo = AdminRolesModel.is_permission(AdminRolesModel, permission=args.permission)
if rolePermissionInfo:
return pretty_result(code.OK, data={"status": True}, msg="permission is exit!")
else:
return pretty_result(code.OK, data={"status": False}, msg="successful!")
``` |
{
"source": "jimbunny/wedding-invitation",
"score": 3
} |
#### File: common/BK/dbSearch.py
```python
#!/usr/bin/env python
#-*- coding:utf-8 -*-
# author:jingtongyu
# datetime:2021/6/29 上午10:40
# software: PyCharm
# #简单查询
# print(session.query(User).all())
# print(session.query(User.name, User.fullname).all())
# print(session.query(User, User.name).all())
#
# #带条件查询
# print(session.query(User).filter_by(name='user1').all())
# print(session.query(User).filter(User.name == "user").all())
# print(session.query(User).filter(User.name.like("user%")).all())
#
# #多条件查询
# print(session.query(User).filter(and_(User.name.like("user%"), User.fullname.like("first%"))).all())
# print(session.query(User).filter(or_(User.name.like("user%"), User.password != None)).all())
#
# #sql过滤
# print(session.query(User).filter("id>:id").params(id=1).all())
#
# #关联查询
# print(session.query(User, Address).filter(User.id == Address.user_id).all())
# print(session.query(User).join(User.addresses).all())
# print(session.query(User).outerjoin(User.addresses).all())
#
# #聚合查询
# print(session.query(User.name, func.count('*').label("user_count")).group_by(User.name).all())
# print(session.query(User.name, func.sum(User.id).label("user_id_sum")).group_by(User.name).all())
#
# #子查询
# stmt = session.query(Address.user_id, func.count('*').label("address_count")).group_by(Address.user_id).subquery()
# print(session.query(User, stmt.c.address_count).outerjoin((stmt, User.id == stmt.c.user_id)).order_by(User.id).all())
#
# #exists
# print(session.query(User).filter(exists().where(Address.user_id == User.id)))
# print(session.query(User).filter(User.addresses.any()))
# #限制返回字段查询
# person = session.query(Person.name, Person.created_at,
# Person.updated_at).filter_by(name="zhongwei").order_by(
# Person.created_at).first()
#
# #查询总记录数
# from sqlalchemy import func
#
# # count User records, without
# # using a subquery.
# session.query(func.count(User.id))
#
# # return count of user "id" grouped
# # by "name"
# session.query(func.count(User.id)).\
# group_by(User.name)
#
# from sqlalchemy import distinct
#
# # count distinct "name" values
# session.query(func.count(distinct(User.name)))
# def to_dict(self):
# model_dict = dict(self.__dict__)
# del model_dict['_sa_instance_state']
# for key, value in model_dict.items():
# if isinstance(value, date):
# model_dict[key] = value.strftime('%Y-%m-%d')
# if key == 'picture':
# urls = model_dict[key].split(",")
# model_dict[key] = setting.domain + "/api/v1/pictureManagement/get?type=product&id=" + urls[0]
#
# return model_dict
#
#
# def dobule_to_dict(self):
# result = {}
# for key in self.__mapper__.c.keys():
# if getattr(self, key) is not None:
# result[key] = str(getattr(self, key))
# else:
# result[key] = getattr(self, key)
# return result
#
#
# # 配合todict一起使用
# def to_json(all_vendors):
# v = [ven.dobule_to_dict() for ven in all_vendors]
# return v
```
#### File: migrations/versions/ea937f8dea77_v1.py
```python
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = 'ea<PASSWORD>'
down_revision = None
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('admin_roles',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('description', sa.String(length=50), nullable=False),
sa.Column('permission', sa.String(length=10), nullable=False),
sa.PrimaryKeyConstraint('id')
)
op.create_table('admin_users',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('email', sa.String(length=50), nullable=False),
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('_password', sa.String(length=250), nullable=False),
sa.Column('permission', sa.String(length=50), nullable=False),
sa.Column('avatar', sa.String(length=250), nullable=False),
sa.Column('login_time', sa.DATETIME(timezone=6), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('email'),
sa.UniqueConstraint('username')
)
op.create_table('classifications',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('rank', sa.Integer(), nullable=False),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('logs',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('username', sa.String(length=50), nullable=False),
sa.Column('model', sa.String(length=20), nullable=False),
sa.Column('action', sa.String(length=10), nullable=False),
sa.Column('content', sa.String(length=500), nullable=False),
sa.Column('update_time', sa.DateTime(timezone=6), nullable=True),
sa.PrimaryKeyConstraint('id')
)
op.create_table('packages',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('name', sa.String(length=25), nullable=False),
sa.Column('no', sa.String(length=50), nullable=False),
sa.Column('_type', sa.String(length=50), nullable=False),
sa.Column('cover_img', sa.String(length=100), nullable=False),
sa.Column('detail_img', sa.String(length=100), nullable=False),
sa.Column('status', sa.BOOLEAN(), nullable=False),
sa.Column('price', sa.Integer(), nullable=False),
sa.Column('rank', sa.Integer(), nullable=False),
sa.Column('product_list', sa.String(length=100), nullable=False),
sa.Column('remark', sa.String(length=500), nullable=True),
sa.Column('update_user_id', sa.Integer(), nullable=False),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name')
)
op.create_table('products',
sa.Column('id', sa.Integer(), autoincrement=True, nullable=False),
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('no', sa.String(length=25), nullable=False),
sa.Column('name', sa.String(length=50), nullable=False),
sa.Column('cover_img', sa.String(length=100), nullable=False),
sa.Column('detail_img', sa.String(length=100), nullable=False),
sa.Column('description_img', sa.String(length=100), nullable=False),
sa.Column('classification', sa.String(length=50), nullable=False),
sa.Column('status', sa.BOOLEAN(), nullable=False),
sa.Column('in_price', sa.Integer(), nullable=False),
sa.Column('out_price', sa.Integer(), nullable=False),
sa.Column('count', sa.Integer(), nullable=False),
sa.Column('rank', sa.Integer(), nullable=False),
sa.Column('level', sa.String(length=10), nullable=False),
sa.Column('position', sa.String(length=100), nullable=False),
sa.Column('package_list', sa.String(length=100), nullable=False),
sa.Column('remark', sa.String(length=500), nullable=True),
sa.Column('update_user_id', sa.Integer(), nullable=False),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('name'),
sa.UniqueConstraint('no')
)
op.create_table('tests',
sa.Column('is_delete', sa.BOOLEAN(), nullable=True),
sa.Column('create_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('update_time', sa.DATETIME(timezone=6), nullable=True),
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('username', sa.String(length=250), nullable=False),
sa.Column('login_time', sa.DateTime(), nullable=True),
sa.PrimaryKeyConstraint('id'),
sa.UniqueConstraint('username')
)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_table('tests')
op.drop_table('products')
op.drop_table('packages')
op.drop_table('logs')
op.drop_table('classifications')
op.drop_table('admin_users')
op.drop_table('admin_roles')
# ### end Alembic commands ###
```
#### File: api/models/appUsers.py
```python
from flask import current_app
from . import db
from .base import BaseModel
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import orm
from config.setting import config
from werkzeug.security import generate_password_hash, check_password_hash
import datetime
avatarConfig = config.avatar
class AppUsersModel(BaseModel):
__tablename__ = 'app_users'
id = db.Column(db.Integer, primary_key=True)
email = db.Column(db.String(25), unique=True, nullable=False)
username = db.Column(db.String(25), nullable=False)
phone = db.Column(db.String(25), nullable=True)
password = db.Column(db.String(250), nullable=True)
description = db.Column(db.String(50), nullable=True)
role = db.Column(db.String(10), default='user', nullable=False)
facebook = db.Column(db.String(25), default='', nullable=True)
avatar = db.Column(db.String(100), default=avatarConfig, nullable=False)
login_time = db.Column(db.DateTime, default=datetime.datetime.now)
@orm.reconstructor
def __init__(self):
self.fields = ['id', 'email', 'username', 'phone', 'description', 'role', 'facebook', 'avatar', 'login_time']
def __str__(self):
return "app_users(id='%s')" % self.id
def set_password(self, password):
return generate_password_hash(password)
def check_password(self, hash, password):
return check_password_hash(hash, password)
def paginate(self, page, per_page):
return self.query.paginate(page=page, per_page=per_page, error_out=False)
def filter_by_username(self, username):
return self.query.filter(self.username.like("%" + username + "%")).all()
def get(self, _id):
return self.query.filter_by(id=_id).first()
def add(self, user):
db.session.add(user)
return session_commit()
def update(self):
return session_commit()
def delete(self, ids):
# self.query.filter_by(id=id).delete()
self.query.filter(self.id.in_(ids)).delete(synchronize_session=False)
return session_commit()
def session_commit():
try:
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
reason = str(e)
current_app.logger.info(e)
return reason
```
#### File: api/models/templates.py
```python
from flask import current_app
from . import db
from .base import BaseModel
from sqlalchemy.exc import SQLAlchemyError
from sqlalchemy import orm
from config.setting import config
import datetime
avatarConfig = config.avatar
class TemplatesModel(BaseModel):
__tablename__ = 'templates'
id = db.Column(db.Integer, primary_key=True)
name = db.Column(db.String(200), nullable=False)
description = db.Column(db.String(50), nullable=True)
pageViews = db.Column(db.String(10), default='8888', nullable=False)
price = db.Column(db.String(25), default='', nullable=True)
avatar = db.Column(db.String(100), default='', nullable=False)
@orm.reconstructor
def __init__(self):
self.fields = ['id', 'name', 'description', 'pageViews', 'price', 'avatar']
def __str__(self):
return "templates(id='%s')" % self.id
def paginate(self, page, per_page):
return self.query.paginate(page=page, per_page=per_page, error_out=False)
def filter_by_name(self, name):
return self.query.filter(self.name.like("%" + name + "%")).all()
def get(self, _id):
return self.query.filter_by(id=_id).first()
def add(self, user):
db.session.add(user)
return session_commit()
def update(self):
return session_commit()
def delete(self, ids):
# self.query.filter_by(id=id).delete()
self.query.filter(self.id.in_(ids)).delete(synchronize_session=False)
return session_commit()
def session_commit():
try:
db.session.commit()
except SQLAlchemyError as e:
db.session.rollback()
reason = str(e)
current_app.logger.info(e)
return reason
```
#### File: resources/admin/upload.py
```python
from flask import Response
from flask_restful import Resource
from flask_restful.reqparse import RequestParser
from common import code, pretty_result
from flask import make_response, render_template, abort, request
import os
import json
import math
root = os.path.abspath(os.path.join(os.getcwd()))
headers = {'Content-Type': 'text/html'}
class UploadTemplateResource(Resource):
"""
示例picture list资源类
"""
def __init__(self):
self.parser = RequestParser()
def get(self):
"""
工具函数:
获取本地图片流
:param img_local_path:文件单张图片的本地绝对路径
:return: 图片流
"""
try:
return make_response(render_template('uploadTemplate.html'), 200, headers)
except Exception as e:
abort(404)
def post(self):
# 获取post过来的文件名称,从name=file参数中获取
file = request.files['file']
# secure_filename方法会去掉文件名中的中文
file_name = 'management.json'
dirs = os.path.join('data', 'template')
if not os.path.exists(dirs):
os.makedirs(dirs)
file.save(os.path.join('data', 'template', file_name))
return pretty_result(code.OK)
class UploadSwipeResource(Resource):
"""
示例picture list资源类
"""
def __init__(self):
self.parser = RequestParser()
def get(self):
"""
工具函数:
获取本地图片流
:param img_local_path:文件单张图片的本地绝对路径
:return: 图片流
"""
try:
return make_response(render_template('uploadSwipe.html'), 200, headers)
except Exception as e:
abort(404)
def post(self):
# 获取post过来的文件名称,从name=file参数中获取
file = request.files['file']
# secure_filename方法会去掉文件名中的中文
file_name = 'swipe.json'
dirs = os.path.join('data', 'template')
if not os.path.exists(dirs):
os.makedirs(dirs)
file.save(os.path.join('data', 'template', file_name))
return pretty_result(code.OK)
class UploadProductResource(Resource):
"""
示例picture list资源类
"""
def __init__(self):
self.parser = RequestParser()
def get(self):
"""
工具函数:
获取本地图片流
:param img_local_path:文件单张图片的本地绝对路径
:return: 图片流
"""
try:
return make_response(render_template('uploadProduct.html'), 200, headers)
except Exception as e:
abort(404)
def post(self):
# 获取post过来的文件名称,从name=file参数中获取
file = request.files['file']
# secure_filename方法会去掉文件名中的中文
file_name = 'product.json'
dirs = os.path.join('data', 'template')
if not os.path.exists(dirs):
os.makedirs(dirs)
file.save(os.path.join('data', 'template', file_name))
return pretty_result(code.OK)
class TemplatesResource(Resource):
"""
products resource class
"""
def __init__(self):
self.parser = RequestParser()
def get(self):
"""
get product info
:return: json
"""
self.parser.add_argument("pageNo", type=int, required=True, location="args",
help='pageNo is required')
self.parser.add_argument("pageSize", type=int, required=True, location="args", help='pageSize is required')
self.parser.add_argument("color", type=str, required=True, location="args", help='color is required')
args = self.parser.parse_args()
load_dict = {}
with open(os.path.join(root, "data", "template", "management.json"), 'r', encoding="utf8") as load_f:
load_dict = json.load(load_f)
temp = []
if args.color == 'all':
temp = load_dict.get("data")
for item in load_dict.get("data"):
if args.color in item.get('tmpColor'):
temp.append(item)
data = {
'pageNo': args.pageNo,
'pageSize': args.pageSize,
'totalCount': len(temp),
'totalPages': math.ceil(len(temp)/args.pageSize),
'items': temp[(args.pageNo-1)*args.pageSize: args.pageNo*args.pageSize]
}
return pretty_result(code.OK, data=data, msg='get template info successful!')
``` |
{
"source": "jimbydamonk/jenkins-job-builder-addons",
"score": 2
} |
#### File: jenkins-job-builder-addons/jenkins_jobs_addons/views.py
```python
import xml.etree.ElementTree as XML
import jenkins_jobs.modules.base
def all_view(parser, xml_parent, data):
"""
All view
:arg bool filter-executors: only those build executors will be shown that
could execute the jobs in this view.
:arg bool filter-queue: only jobs in this view will be shown in the queue.
:arg bool folder: Wether or not this view is in a folder.
Example:
.. literalinclude:: /../tests/views/fixtures/all_view.yaml
"""
view = XML.SubElement(xml_parent, 'hudson.model.AllView')
XML.SubElement(view, 'name').text = 'All'
in_folder = data.get('folder', False)
owner_attrs = dict()
if in_folder:
owner_attrs['class'] = 'com.cloudbees.hudson.plugins.folder.Folder'
owner_attrs['reference'] = '../../..'
XML.SubElement(view, 'owner', attrib=owner_attrs)
executors = data.get('filter-executors', False)
XML.SubElement(view, 'filterExecutors').text = str(executors).lower()
queue = data.get('filter-queue', False)
XML.SubElement(view, 'filterQueue').text = str(queue).lower()
properties_attributes = dict()
properties_attributes['class'] = 'hudson.model.View$PropertyList'
XML.SubElement(view, 'properties', attrib=properties_attributes)
def delivery_pipeline_view(parser, xml_parent, data):
"""
Delivery Pipeline View requires the Jenkins `Delivery Pipeline Plugin.
<https://wiki.jenkins-ci.org/display/JENKINS/Delivery+Pipeline+Plugin>`_
:arg bool filter-executors: only those build executors will be shown that
could execute the jobs in this view.
:arg bool filter-queue: only jobs in this view will be shown in the queue.
:arg bool folder: Wether or not this view is in a folder.
:arg str name: The name of this view.
:arg dict components: The components (jobs) for this pipeline:
* **name** (str): Name of the pipeline, usually the name of the
component or product.
* **first-job** (str): First job in the pipeline. Usually the
build/compile job. The build number/build
display name will be used as the version in
later tasks or stages. If using folders, it
should be a full path to the job.
:arg int number-of-pipelines: Number of pipelines instances shown for each
pipeline.
:arg bool show-aggregated-pipeline: Show an aggregated view where each
stage shows the latest version being
executed.
:arg int number-of-columns: Number of columns used for showing pipelines.
Useful for multiple components in the view to
show them beside each others.
:arg int sorting: How to sort the pipeline in the view.
Only applicable for several pipelines.
Can be sorted by latest activity or by name.
:arg int update-interval: How often will the view be updated in seconds.
:arg bool allow-pipeline-start: Start a new pipeline build.
:arg bool allow-manual-triggers: If a task is manual (Build other projects
(manual step) from Build Pipeline Plugin, show a button.
:arg bool allow-rebuild: Rebuild a task.
:arg str show-avatars: Show avatars pictures instead of names of the people
involved in a pipeline instance. Use the `Avatar Plugin
<https://https://wiki.jenkins-ci.org/display/JENKINS/Avatar+Plugin>`_
or the `Gravatar Plugin.
<https://wiki.jenkins-ci.org/display/JENKINS/Gravatar+plugin>`_ or
similar to set avatar picture for contributors.
:arg bool show-changes: Show SCM change log for the first job in the
pipeline. If Repository browser is configured, link to change will be
created to the repository browser.
:arg bool show-description: Show build description connected to a task.
:arg bool show-promotions: Show promotions from the `Promoted Builds
Plugin.
<https://wiki.jenkins-ci.org/display/JENKINS/Promoted+Builds+Plugin>_`
:arg bool show-total-buildtime: Show total build time of a pipeline.
If there are multiple routes in a pipeline, total build time is
calculated as the sum of the build times in the longest route.
:arg str css-url: Possibility to override CSS for the normal view.
Enter the full url to the custom CSS.
:arg str fullscreen-css-url: Possibility to override CSS for the
fullscreen view. Enter the full url to the custom CSS.
:arg list regexp-first-jobs: Find jenkins job matching regular expression.
^build-(.+?)-project
Example:
.. literalinclude:: /../tests/views/fixtures/delivery_pipeline.yaml
"""
delivery_pipeline = 'se.diabol.jenkins.pipeline.DeliveryPipelineView'
view = XML.SubElement(xml_parent, delivery_pipeline)
in_folder = data.get('folder', False)
owner_attrs = dict()
if in_folder:
owner_attrs['class'] = 'com.cloudbees.hudson.plugins.folder.Folder'
owner_attrs['reference'] = '../../..'
XML.SubElement(view, 'owner', attrib=owner_attrs)
XML.SubElement(view, 'name').text = data.get('name')
executors = data.get('filter-executors', False)
XML.SubElement(view, 'filterExecutors').text = str(executors).lower()
queue = data.get('filter-queue', False)
XML.SubElement(view, 'filterQueue').text = str(queue).lower()
properties_attributes = dict()
properties_attributes['class'] = 'hudson.model.View$PropertyList'
XML.SubElement(view, 'properties', attrib=properties_attributes)
xml_components = XML.SubElement(view, 'componentSpecs')
components = data.get('components', [])
for component in components:
spec_class = "se.diabol.jenkins.pipeline."\
"DeliveryPipelineView_-ComponentSpec"
component_spec = XML.SubElement(xml_components, spec_class)
name = component.get('name')
XML.SubElement(component_spec, 'name').text = name
first_job = component.get('first-job')
XML.SubElement(component_spec, 'firstJob').text = first_job
number_of_pipelines = str(data.get('number-of-pipelines', 3))
XML.SubElement(
view, 'noOfPipelines').text = number_of_pipelines
aggregated_pipeline_raw = data.get('show-aggregated-pipeline', False)
aggregated_pipeline = str(aggregated_pipeline_raw).lower()
XML.SubElement(view, 'showAggregatedPipeline').text = aggregated_pipeline
number_of_columns = str(data.get('number-of-columns', 1))
XML.SubElement(view, 'noOfColumns').text = number_of_columns
sorting_options = ['none', 'Name', 'LatestActivity']
sorting = data.get('sorting', 'none')
if sorting not in sorting_options:
raise ValueError('sorting must be one of {} '.format(sorting_options))
if sorting == 'none':
XML.SubElement(view, 'sorting').text = 'none'
else:
XML.SubElement(
view, 'sorting'
).text = 'se.diabol.jenkins.pipeline.sort.{}Comparator'.format(sorting)
show_avatars = data.get('show-avatars', False)
XML.SubElement(view, 'showAvatars').text = str(show_avatars).lower()
update_interval = str(data.get('update-interval', 1))
XML.SubElement(view, 'updateInterval').text = update_interval
show_changes = str(data.get('show-changes', False)).lower()
XML.SubElement(view, 'showChanges').text = str(show_changes).lower()
manual_triggers = str(data.get('allow-manual-triggers', False)).lower()
XML.SubElement(view, 'allowManualTriggers').text = manual_triggers
total_build_time = str(data.get('show-total-buildtime', False)).lower()
XML.SubElement(view, 'showTotalBuildTime').text = total_build_time
allow_rebuild = str(data.get('allow-rebuild', False)).lower()
XML.SubElement(view, 'allowRebuild').text = allow_rebuild
pipeline_start = str(data.get('allow-pipeline-start', False)).lower()
XML.SubElement(view, 'allowPipelineStart').text = pipeline_start
show_description = str(data.get('show-description', False)).lower()
XML.SubElement(view, 'showDescription').text = show_description
show_promotions = str(data.get('show-promotions', False)).lower()
XML.SubElement(view, 'showPromotions').text = show_promotions
xml_jobs = XML.SubElement(view, 'regexpFirstJobs')
jobs = data.get('regexp-first-jobs', [])
for job in jobs:
xml_job = XML.SubElement(xml_jobs, 'se.diabol.jenkins.pipeline.'
'DeliveryPipelineView_-RegExpSpec')
XML.SubElement(xml_job, 'regexp').text = job
XML.SubElement(view, 'fullScreenCss').text = data.get('csss-url')
XML.SubElement(view, 'embeddedCss').text = data.get('fullscreen-csss-url')
def build_pipeline_view(parser, xml_parent, data):
"""
Build Pipeline View requires the Jenkins `Build Pipeline Plugin.
<https://wiki.jenkins-ci.org/display/JENKINS/Build+Pipeline+Plugin>`_
:arg bool filter-executors: only those build executors will be shown that
could execute the jobs in this view.
:arg bool filter-queue: only jobs in this view will be shown in the queue.
:arg bool folder: Wether or not this view is in a folder.
:arg str name: The name of this view.
:arg str first-job: Select the initial or parent Job in the build
pipeline view.
:arg int display-number-of-builds: Select the number of build pipelines to
display in the view.
:arg str build-view-title: The title of this view.
:arg str console-output-link-style: One the following:
* **This Window**
* **New Window**
* **Light Box** (default)
:arg bool trigger-only-latest-job: Select this option to restrict the
display of a Trigger button to only the most recent successful build
pipelines. This option will also limit retries to just unsuccessful
builds of the most recent build pipelines.
* **True**: Only the most recent successful builds displayed on the
view will have a manual trigger button for the next build
in the pipeline.
* **False**: All successful builds displayed on the view will have a
manual trigger button for the next build in the pipeline.
:arg bool always-allow-manual-trigger: Select this option if you want to
be able to execute again a successful pipeline step. If the build is
parameterized, this will re-execute the step using the same parameter
values that were used when it was previously executed.
:arg bool start-with-parameters: Select this option if you want to
show the pipeline definition header in the pipeline view. If this option
is not selected, then a pipeline that has never been run will not show
any details about its jobs and appear like a blank form. Job details will
only appear after the pipeline has been run at least once.
:arg bool show-pipeline-parameters-in-header: Select this option if you
want to display the parameters used to run the latest successful job
in the pipeline's project headers.
:arg bool show-pipeline-parameters: Select this option if you want to
display the parameters used to run the first job in each pipeline's
revision box.
:arg bool refresh-frequency: Frequency at which the Build Pipeline
Plugin updates the build cards in seconds
:arg str css-url: Link to override style sheet
Example:
.. literalinclude:: /../tests/views/fixtures/build_pipeline_view.yaml
"""
build_pipeline = 'au.com.centrumsystems.hudson.plugin.'\
'buildpipeline.BuildPipelineView'
view = XML.SubElement(xml_parent, build_pipeline)
in_folder = data.get('folder', False)
owner_attrs = dict()
if in_folder:
owner_attrs['class'] = 'com.cloudbees.hudson.plugins.folder.Folder'
owner_attrs['reference'] = '../../..'
XML.SubElement(view, 'owner', attrib=owner_attrs)
XML.SubElement(view, 'name').text = data.get('name')
executors = data.get('filter-executors', False)
XML.SubElement(view, 'filterExecutors').text = str(executors).lower()
queue = data.get('filter-queue', False)
XML.SubElement(view, 'filterQueue').text = str(queue).lower()
properties_attributes = dict()
properties_attributes['class'] = 'hudson.model.View$PropertyList'
XML.SubElement(view, 'properties', attrib=properties_attributes)
grid_attrs = dict()
grid_attrs['class'] = 'au.com.centrumsystems.hudson.plugin.buildpipeline.'\
'DownstreamProjectGridBuilder'
grid = XML.SubElement(view, 'gridBuilder', attrib=grid_attrs)
first_job = data.get('first-job', None)
XML.SubElement(grid, 'firstJob').text = first_job
display_number_of_builds = str(data.get('display-number-of-builds', 10))
XML.SubElement(view, 'noOfDisplayedBuilds').text = display_number_of_builds
build_view_title = data.get('build-view-title')
XML.SubElement(view, 'buildViewTitle').text = build_view_title
console_output_links = ['This Window', 'New Window', 'Light Box']
console_output_link_style = data.get(
'console-output-link-style', 'Light Box')
if console_output_link_style not in console_output_links:
raise ValueError('console-output-link-style must '
'be one of {}'.format(console_output_links))
XML.SubElement(
view, 'consoleOutputLinkStyle'
).text = console_output_link_style
XML.SubElement(view, 'cssUrl').text = data.get('csss-url')
job = XML.SubElement(view, 'triggerOnlyLatestJob')
job.text = str(data.get('trigger-only-latest-job', False)).lower()
manual_trigger = data.get('always-allow-manual-trigger', False)
manual_trigger = str(manual_trigger).lower()
XML.SubElement(
view, 'alwaysAllowManualTrigger'
).text = manual_trigger
parmas = str(data.get('show-pipeline-parameters', False)).lower()
XML.SubElement(view, 'showPipelineParameters').text = parmas
headers_raw = data.get('show-pipeline-parameters-in-header', False)
headers = str(headers_raw).lower()
XML.SubElement(
view, 'showPipelineParametersInHeaders'
).text = headers
start_with_params = str(data.get('start-with-parameters', False)).lower()
XML.SubElement(
view, 'startsWithParameters'
).text = start_with_params
refresh_freq = data.get('refresh-frequency', 3)
XML.SubElement(view, 'refreshFrequency').text = str(refresh_freq)
show_def_raw = data.get('show-pipeline-definition-in-headers', False)
show_def = str(show_def_raw).lower()
XML.SubElement(view, 'showPipelineDefinitionHeader').text = show_def
class Views(jenkins_jobs.modules.base.Base):
sequence = 20
component_type = 'view'
component_list_type = 'views'
def gen_xml(self, parser, xml_parent, data):
views = XML.SubElement(xml_parent, 'views')
for view in data.get('views', []):
self.registry.dispatch('view', parser, views, view)
```
#### File: jimbydamonk/jenkins-job-builder-addons/setup.py
```python
from setuptools.command.test import test as TestCommand
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
with open('README.rst') as readme_file:
readme = readme_file.read()
with open('HISTORY.rst') as history_file:
history = history_file.read().replace('.. :changelog:', '')
requirements = [
# TODO: put package requirements here
]
test_requirements = [
# TODO: put package test requirements here
]
class Tox(TestCommand):
user_options = [('tox-args=', 'a', "Arguments to pass to tox")]
def initialize_options(self):
TestCommand.initialize_options(self)
self.tox_args = None
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = []
self.test_suite = True
def run_tests(self):
#import here, cause outside the eggs aren't loaded
import tox
import shlex
args = self.tox_args
if args:
args = shlex.split(self.tox_args)
tox.cmdline(args=args)
setup(
name='jenkins-job-builder-addons',
version='1.0.5',
description="A suite of jenkins job builder addons",
long_description=readme + '\n\n' + history,
author="<NAME>",
author_email='<EMAIL>',
url='https://github.com/jimbydamonk/jenkins-job-builder-addons',
packages=['jenkins_jobs_addons'],
include_package_data=True,
install_requires=requirements,
license="Apache",
zip_safe=False,
keywords='jenkins ',
classifiers=[
'Development Status :: 2 - Pre-Alpha',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
"Programming Language :: Python :: 2",
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
],
test_suite='tests',
tests_require=['tox'] + test_requirements,
cmdclass={'test': Tox},
entry_points={
'jenkins_jobs.projects': [
'folder=jenkins_jobs_addons.folders:Folder',
],
'jenkins_jobs.views': [
'all=jenkins_jobs_addons.views:all_view',
'build_pipeline=jenkins_jobs_addons.views:build_pipeline_view',
'delivery_pipeline=jenkins_jobs_addons.'
'views:delivery_pipeline_view'
],
'jenkins_jobs.modules': [
'views=jenkins_jobs_addons.views:Views'
]
},
)
``` |
{
"source": "jimcarreer/dinao",
"score": 3
} |
#### File: dinao/backend/base.py
```python
import logging
from abc import ABC, abstractmethod
from contextlib import contextmanager
from dataclasses import dataclass
from typing import List, Tuple
from urllib.parse import parse_qs, urlparse
from dinao.backend.errors import ConfigurationError
@dataclass
class ColumnDescriptor:
"""Describes a column in a result set."""
name: str
type_code: int
display_size: int = None
internal_size: int = None
precision: int = None
scale: int = None
null_ok: bool = None
class ResultSet:
"""Basic interface definition for result sets (a.k.a rows) returned from Database queries."""
def __init__(self, cursor):
"""Construct a result set.
:param cursor: the underlying DB API 2.0 cursor being wrapped by this object.
"""
self._cursor = cursor
self._columns = None
self._description = None
def fetchone(self) -> Tuple:
"""Fetch one result tuple from the underlying cursor.
If no results are left, None is returned.
:returns: a tuple representing a result row or None
"""
return self._cursor.fetchone()
def fetchall(self) -> List[Tuple]:
"""Fetch the *remaining* result tuples from the underlying cursor.
If no results are left, an empty list is returned.
:returns: a list of tuples that are the remaining results of the underlying cursor.
"""
return self._cursor.fetchall()
@property
def description(self) -> Tuple[ColumnDescriptor]:
"""Return a sequence of column descriptions representing the result set.
:returns: a tuple of ColumnDescriptors
"""
if not self._description:
self._description = tuple([ColumnDescriptor(*d) for d in self._cursor.description])
return self._description
@property
def rowcount(self) -> int:
"""Return the row count of the result set.
:returns: the integer count of the rows in the result set
"""
return self._cursor.rowcount
class Connection(ABC):
"""Basic interface definition for a database connection."""
def __init__(self, cnx, auto_commit: bool = True):
"""Construct a Connection object.
:param cnx: the inner DB API 2.0 connection this object wraps
:param auto_commit: should calls to execute() be automatically committed, defaults to True
"""
self.logger = logging.getLogger(__name__)
self._cnx = cnx
self._auto_commit = auto_commit
@property
def autocommit(self):
"""Whether or not commit is called after every call to query(...) and execute(...)."""
return self._auto_commit
@autocommit.setter
def autocommit(self, value: bool):
self._auto_commit = value
def commit(self):
"""Commit changes for this connection / transaction to the database."""
self._cnx.commit()
def rollback(self):
"""Rollback changes for this connection / transaction to the database."""
self._cnx.rollback()
@abstractmethod
def _execute(self, cursor, sql: str, params: tuple = None):
pass # pragma: no cover
@contextmanager
def query(self, sql: str, params: tuple = None) -> ResultSet:
"""Execute the given SQL as a statement with the given parameters. Provide the results as context.
:param sql: the SQL statement(s) to execute
:param params: the values to bind to the execution of the given SQL
:returns: a result set representing the query's results
"""
cursor = self._cnx.cursor()
self._execute(cursor, sql, params)
try:
yield ResultSet(cursor)
finally:
cursor.close()
def execute(self, sql: str, params: tuple = None, commit: bool = None) -> int:
"""Execute the given SQL as a statement with the given parameters and return the affected row count.
:param sql: the SQL statement(s) to execute
:param params: the values to bind to the execution of the given SQL
:param commit: commit the changes to the database after execution, defaults to value given in constructor
"""
commit = commit if commit is not None else self._auto_commit
cursor = self._cnx.cursor()
self._execute(cursor, sql, params)
affected = cursor.rowcount
if commit:
self.commit()
cursor.close()
return affected
class ConnectionPool(ABC):
"""Basic interface definition for a pool of database connections."""
def __init__(self, db_url: str):
"""Construct a connection pool for the given connection URL.
The db_url is expected to be in the following format::
"{dialect}+{driver}://{username}:{password}@{hostname}:{port}/{db_name}?{optional_args}"
:param db_url: a url with the described format
"""
self.logger = logging.getLogger(__name__)
self._raw_db_url = db_url
self._db_url = urlparse(self._raw_db_url)
self._args = parse_qs(self._db_url.query)
@staticmethod
def _strict_bool(value: str):
if value.lower() not in ["true", "false"]:
raise ValueError(f"Cannot cast '{value}' to bool")
return value.lower() == "true"
def _raise_for_unexpected_args(self):
unexpected = ",".join(self._args.keys())
if unexpected:
raise ConfigurationError(f"Unexpected argument(s): {unexpected}")
def _get_arg(self, name: str, expected_type, default=None):
if name not in self._args:
self.logger.debug(f"No '{name}' specified, defaulting to {default}")
return default
caster = expected_type if expected_type is not bool else self._strict_bool
try:
if caster != list:
assert len(self._args.get(name)) == 1
return caster(self._args.pop(name)[0])
return self._args.pop(name)
except AssertionError as x:
raise ConfigurationError(f"Invalid argument '{name}': only a single value must be specified") from x
except ValueError as x:
raise ConfigurationError(f"Invalid argument '{name}': must be {expected_type.__name__}") from x
@property
@abstractmethod
def mung_symbol(self) -> str:
"""Return the symbol used when replacing variable specifiers in templated SQL."""
pass # pragma: no cover
@abstractmethod
def lease(self) -> Connection:
"""Lease a connection from the underlying pool."""
pass # pragma: no cover
@abstractmethod
def release(self, cnx: Connection):
"""Release a connection back to the underlying pool."""
pass # pragma: no cover
@abstractmethod
def dispose(self):
"""Close the pool and clean up any resources it was using."""
pass # pragma: no cover
```
#### File: dinao/backend/postgres.py
```python
from dinao.backend.base import Connection, ConnectionPool
from dinao.backend.errors import BackendNotInstalledError, ConfigurationError
class ConnectionPSQLPsycopg2(Connection):
"""Implementation of Connection for Psycopg2."""
def _execute(self, cursor, sql: str, params: tuple = None):
cursor.execute(query=sql, vars=params)
class ConnectionPoolPSQLPsycopg2(ConnectionPool):
"""Implementation of ConnectionPool for Psycopg2."""
def __init__(self, db_url: str):
"""Construct a connection pool for the given connection URL.
The db_url is expected to be in the following format::
"postgres+psycopg2://{username}:{password}@{hostname}:{port}/{db_name}?{optional_args}"
Supported `optional_args` include:
* schema, a list of strings that sets the search path of the connections, defaults to "public"
* pool_min_conn, an integer specifying the minimum connections to keep in the pool, defaults to 1
* pool_max_conn, an integer specifying the maximum connections to keep in the pool, defaults to 1
* pool_threaded, a boolean specifying a threaded pool should be used, defaults to False
:param db_url: a url with the described format
:raises: ConfigurationError, BackendNotInstalledError
"""
super().__init__(db_url)
try:
import psycopg2.pool
except ModuleNotFoundError: # pragma: no cover
issue = "Module psycopg2 not installed, cannot create connection pool"
raise BackendNotInstalledError(issue)
self._cnx_kwargs = self._make_cnx_kwargs()
self._pool_class = psycopg2.pool.SimpleConnectionPool
if self._get_arg("pool_threaded", bool, False):
self._pool_class = psycopg2.pool.ThreadedConnectionPool
self._pool_impl = None
self._raise_for_unexpected_args()
self._pool = self._pool_class(**self._cnx_kwargs)
def _make_cnx_kwargs(self):
dbname = self._db_url.path.strip("/")
if not dbname:
raise ConfigurationError("Database name is required but missing")
schema = ",".join(self._get_arg("schema", list, ["public"]))
max_c = self._get_arg("pool_max_conn", int, 1)
min_c = self._get_arg("pool_min_conn", int, 1)
if max_c < min_c:
raise ConfigurationError("The argument pool_max_conn must be greater or equal to pool_min_conn")
return {
"maxconn": max_c,
"minconn": min_c,
"dbname": dbname,
"user": self._db_url.username,
"password": self._db_url.password,
"host": self._db_url.hostname,
"port": self._db_url.port,
"options": f"-c search_path={schema}",
}
def lease(self) -> Connection: # noqa: D102
inner_cnx = self._pool.getconn()
return ConnectionPSQLPsycopg2(inner_cnx)
def release(self, cnx: Connection): # noqa: D102
self._pool.putconn(cnx._cnx)
def dispose(self): # noqa: D102
if not self._pool.closed:
self._pool.closeall()
@property
def mung_symbol(self) -> str: # noqa: D102
return "%s"
```
#### File: tests/backend/test_util.py
```python
from dinao.backend import create_connection_pool
from dinao.backend.errors import ConfigurationError, UnsupportedBackendError
import pytest
@pytest.mark.parametrize(
"db_uri, match, except_class",
[
("://user:pass@host:4444", "No database backend specified", ConfigurationError),
("oracle://user:pass@host:4444", "not supported", UnsupportedBackendError),
("postgresql+psycopg3://user:pass@host:4444", "not supported", UnsupportedBackendError),
("postgresql://user:pass@host:4444", "name is required but missing", ConfigurationError),
("postgresql://user:pass@host:4444/dbname?pool_max_conn=ABC", "must be int", ConfigurationError),
("postgresql://user:pass@host:4444/dbname?pool_min_conn=ABC", "must be int", ConfigurationError),
("postgresql://user:pass@host:4444/dbname?pool_threaded=ABC", "must be bool", ConfigurationError),
("postgresql://user:pass@host:4444/dbname?weird=XYZ", "Unexpected argument", ConfigurationError),
(
"postgresql://user:pass@host:4444/dbname?weird=XYZ&schema=s1&schema=s2&schema=s3",
"Unexpected argument",
ConfigurationError,
),
(
"postgresql://user:pass@host:4444/dbname?weird=JUNK&pool_threaded=True",
"Unexpected argument",
ConfigurationError,
),
(
"postgresql://user:pass@host:4444/dbname?pool_min_conn=2&pool_max_conn=1",
"pool_max_conn must be greater or equal to pool_min_conn",
ConfigurationError,
),
(
"postgresql://user:pass@host:4444/dbname?pool_min_conn=1&pool_min_conn=2",
"single value",
ConfigurationError,
),
("sqlite3+invalid://test.db", "not supported", UnsupportedBackendError),
("sqlite3://test.db?schema=test", "Unexpected argument", ConfigurationError),
],
)
def test_backend_create_rejection(db_uri: str, match: str, except_class):
"""Tests bad db_url are rejected by create_connection_pool."""
with pytest.raises(except_class, match=match):
create_connection_pool(db_uri)
```
#### File: tests/binding/test_binders_errors.py
```python
from typing import Generator, List, Tuple, Union
from dinao.backend import Connection
from dinao.binding import FunctionBinder
from dinao.binding.binders import BoundedGeneratingQuery
from dinao.binding.errors import (
BadReturnTypeError,
CannotInferMappingError,
FunctionAlreadyBoundError,
MissingTemplateArgumentError,
MultipleConnectionArgumentError,
NoPoolSetError,
PoolAlreadySetError,
TemplateError,
)
from dinao.binding.templating import Template
import pytest
from tests.binding.mocks import MockConnection, MockConnectionPool
def test_cannot_infer_generic(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests that binding a function to typed generics raises an error."""
binder, _ = binder_and_pool
with pytest.raises(CannotInferMappingError, match="Unable to determine mapper for typing.Union"):
@binder.query("SELECT * FROM table")
def raises_cannot_infer() -> Union[str, int]:
pass # pragma: no cover
def test_cannot_infer_nested_generic(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests that binding a function to typed generics as row types raises."""
binder, _ = binder_and_pool
with pytest.raises(CannotInferMappingError, match="Unable to determine row mapper for typing.List\\[str\\]"):
@binder.query("SELECT * FROM table")
def raises_cannot_infer_row_type() -> List[List[str]]:
pass # pragma: no cover
def test_binding_generator_throws(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests that binding a function to generate when send type and return type are specified."""
binder, pool = binder_and_pool
with pytest.raises(CannotInferMappingError, match="Only yield_type"):
@binder.query("SELECT some_num FROM table LIMIT 3")
def generating_query_bad() -> Generator[int, int, int]:
pass # pragma: no cover
def test_bounded_generating_query_throws(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests that BoundedGeneratingQuery raises if not bound to a generator."""
binder, pool = binder_and_pool
def not_a_generator() -> int:
pass # pragma: no cover
with pytest.raises(BadReturnTypeError, match="Expected results type to be Generator"):
BoundedGeneratingQuery(binder, Template("SELECT * FROM table"), not_a_generator)
def test_binder_execute_bad_type(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests that binding a function specifying an invalid return type for execution raises an exception."""
binder, _ = binder_and_pool
with pytest.raises(BadReturnTypeError, match="can only return None or int"):
@binder.execute("INSERT INTO TABLE (#{arg1})")
def should_raise(arg1: str) -> List:
pass # pragma: no cover
def test_binder_raises_for_template(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests that a bad template causes an error at binding time."""
binder, _ = binder_and_pool
with pytest.raises(TemplateError, match="#{arg1"):
@binder.execute("INSERT INTO table #{arg1")
def should_raise_0(arg1: str) -> int:
pass # pragma: no cover
def test_double_binding_raises(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests that binding a function more than once results in an error."""
binder, _ = binder_and_pool
match = "has already been bounded by"
with pytest.raises(FunctionAlreadyBoundError, match=match):
@binder.execute("UPDATE table SET col = #{arg1}")
@binder.execute("INSERT INTO TABLE (#{arg1})")
def should_raise_1(arg1: str):
pass # pragma: no cover
with pytest.raises(FunctionAlreadyBoundError, match=match):
@binder.execute("UPDATE table SET col = #{arg1}")
@binder.query("SELECT * FROM table WHERE col = #{arg1})")
def should_raise_2(arg1: str):
pass # pragma: no cover
with pytest.raises(FunctionAlreadyBoundError, match=match):
@binder.execute("UPDATE table SET col = #{arg1}")
@binder.transaction()
def should_raise_3(arg1: str):
pass # pragma: no cover
def test_args_mismatch_raises(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests an error is raised if a template is bound to a function without a matching argument."""
binder, _ = binder_and_pool
with pytest.raises(MissingTemplateArgumentError, match="specified in template but is not an argument of"):
@binder.execute("INSERT INTO table (#{arg})")
def should_raise_4(some_arg: str):
pass # pragma: no cover
def test_binder_raises_for_no_pool():
"""Tests an error is raised when a bind has no pool but an operation requiring one is performed."""
binder = FunctionBinder()
@binder.execute("INSERT INTO table (#{arg})")
def test_bound_execute(arg: str):
pass # pragma: no cover
with pytest.raises(NoPoolSetError, match="No connection pool"):
test_bound_execute("testing")
with pytest.raises(NoPoolSetError, match="No connection pool"):
with binder.connection() as cnx: # noqa: F841
pass # pragma: no cover
def test_binder_raises_for_pool_set_twice(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests an error is raised when a binder has its pool set twice."""
binder, _ = binder_and_pool
pool = MockConnectionPool([])
with pytest.raises(PoolAlreadySetError, match="only be set once"):
binder.pool = pool
def test_binder_raises_for_double_connection_arg(binder_and_pool: Tuple[FunctionBinder, MockConnectionPool]):
"""Tests an error is raised when a bound function specifies it would like more than one connection."""
binder, _ = binder_and_pool
with pytest.raises(MultipleConnectionArgumentError, match="Connection argument specified multiple times for"):
@binder.transaction()
def should_raise_5(cnx1: Connection, cnx2: MockConnection):
pass # pragma: no cover
``` |
{
"source": "jim-cassidy/Tkinter-Sqlite-Template",
"score": 4
} |
#### File: Tkinter-Sqlite-Template/module/main.py
```python
import numpy as np
import matplotlib.pyplot as plt
## import scikit-learn
import sklearn
from sklearn.linear_model import Ridge
from sklearn.preprocessing import PolynomialFeatures
from sklearn.pipeline import make_pipeline
## we will be using sqlite
import sqlite3
from sqlite3 import Error
## here is our tkinter import
from tkinter import *
## here is the date and time function import
import time
import datetime
class MyWindow:
def __init__(self, win):
self.lbl1=Label(win, text='Write Label')
self.t1=Entry(bd=3)
self.lbl1.place(x=100, y=50)
self.t1.place(x=200, y=50)
self.b1=Button(win, text='Press Button')
self.b1.place(x=100, y=150)
self.lb=Listbox(window, height=10, selectmode='multiple')
self.lb.place(x=100, y=250)
### --- select
def select(self):
self.lb.delete(0,100)
conn = None
try:
conn = sqlite3.connect("students.db")
except Error as e:
print(e)
cur = conn.cursor()
cur.execute("SELECT * FROM STUDENTS")
rows = cur.fetchall()
for row in rows:
showid = str(row[0])
showname = str(row[1])
showdate = str(row[2])
showdate1 = showdate[0:4]
showdate2 = showdate[4:6]
showdate3 = showdate[6:8]
showfulldate = showdate1 + " " + showdate2 + " " + showdate3
showline = showid + " " + showname + " " + showfulldate
print ("name: " , row[1] )
self.lb.insert(END,showline)
window=Tk()
mywin=MyWindow(window)
window.title('Cambridge SAT score Prediction')
window.geometry("800x600+10+10")
window.mainloop()
``` |
{
"source": "JimCatacora/bespa",
"score": 3
} |
#### File: bespa/mwh_tvs/action_distance.py
```python
import numpy as np
def get_dist_mr_to_stride(idx, stride, st_delta, stdir, maxidx):
d = 0
if stdir == 'stay':
return d
elif stdir == 'left':
d += st_delta
if stride > idx:
acts = maxidx - stride
d += 2 * acts
else:
acts = (stride if stride >= idx else maxidx) - idx
d += 2 * acts
d += st_delta
return d
def get_dist_mr_at_edge(goal, start, rval, stdir, carry, maxclr):
if stdir == 'stay':
if rval == 0:
g = goal
d1 = g - start
d2 = maxclr - g + start + 4
else:
g = goal
d1 = abs(g - start) + 3
d2 = maxclr - g + start + 1
elif stdir == 'left':
if rval == 0:
g = goal if not carry else (maxclr + 1 + goal)
d1 = g - start
d2 = abs(maxclr + 1 - g + start) + 3
else:
g = goal if not carry else (maxclr + 1 + goal)
d1 = abs(g - start) + 3
d2 = abs(maxclr - g + start + 1)
else:
if rval == 0:
g = goal
d1 = abs(g - start) + 2
d2 = maxclr - g + start + 2
else:
g = goal if not carry else -1
d1 = abs(g - start) + 3
d2 = maxclr - g + start + 3
if d1 <= d2:
return d1
else:
return d2
def get_dist_mr_stride_dir(station, stride, st_delta, stdir, carry, maxidx, maxclr, done, best_d):
doneidx, doneval = done
tv = [0]
ds = [0]
for i, s in enumerate(station):
n = len(tv)
if n == 0:
break
for _ in range(n):
v = tv.pop(0)
d = ds.pop(0)
#goup
if v == 1:
if i == doneidx and i < maxidx:
egd = get_dist_mr_at_edge(s, v, doneval, stdir, carry, maxclr)
std = get_dist_mr_to_stride(i + (1 if stdir == 'right' else 0), stride, st_delta, stdir, maxidx)
total_d = d + egd + std
if total_d < best_d:
best_d = total_d
elif s > 0:
if i == maxidx:
std = get_dist_mr_to_stride(i, stride, st_delta, stdir, maxidx)
total_d = d + s - 1 + std
if total_d < best_d:
best_d = total_d
else:
new_d = d + s
if new_d < best_d:
tv.append(1)
ds.append(new_d)
elif n == 1 and (i < doneidx or doneidx == maxidx):
if s > 0:
if i == maxidx:
std = get_dist_mr_to_stride(i, stride, st_delta, stdir, maxidx)
total_d = d + s + std
if total_d < best_d:
best_d = total_d
else:
new_d = d + s + 1
if new_d < best_d:
tv.append(1)
ds.append(new_d)
#godown
if v == 0:
if i == doneidx and i < maxidx:
egd = get_dist_mr_at_edge(s, v, doneval, stdir, carry, maxclr)
std = get_dist_mr_to_stride(i + (1 if stdir == 'right' else 0), stride, st_delta, stdir, maxidx)
total_d = d + egd + std
if total_d < best_d:
best_d = total_d
elif s > 0:
if i == maxidx:
std = get_dist_mr_to_stride(i, stride, st_delta, stdir, maxidx)
total_d = d + maxclr - s + 1 + std
if total_d < best_d:
best_d = total_d
else:
new_d = d + maxclr - s + 2
if new_d < best_d:
tv.append(0)
ds.append(new_d)
else:
if i == maxidx:
std = get_dist_mr_to_stride(i, stride, st_delta, stdir, maxidx)
total_d = d + std
if total_d < best_d:
best_d = total_d
else:
new_d = d + 1
if new_d < best_d:
tv.append(1)
ds.append(new_d)
elif n == 1 and (i < doneidx or doneidx == maxidx):
if s > 1:
if i == maxidx:
std = get_dist_mr_to_stride(i, stride, st_delta, stdir, maxidx)
total_d = d + maxclr - s + 2 + std
if total_d < best_d:
best_d = total_d
else:
new_d = d + maxclr - s + 3
if new_d < best_d:
tv.append(0)
ds.append(new_d)
elif s == 0:
if i == maxidx:
std = get_dist_mr_to_stride(i, stride, st_delta, stdir, maxidx)
total_d = d + 1 + std
if total_d < best_d:
best_d = total_d
else:
new_d = d + 2
if new_d < best_d:
tv.append(1)
ds.append(new_d)
if len(ds) > 1:
if ds[0] != ds[1]:
deli = ds.index(max(ds))
del tv[deli]
del ds[deli]
return best_d
def get_distance_moving_right(station, stride, colors, best_d, stdir='both'):
stnlen = len(station)
maxidx = stnlen - 1
maxclr = colors - 1
if all([s == 0 for s in station]):
d1 = stride
d2 = stnlen - stride
if d1 <= d2:
return d1 * 2
else:
return d2 * 2
elif all([s == maxclr for s in station]):
d1 = stride
d2 = maxidx - stride
if d1 <= d2:
return d1 * 2 + 1
else:
return d2 * 2 + 1
doneval = station[-1]
if doneval in [0, maxclr]:
doneidx = 0
for s in reversed(station):
if s == doneval:
doneidx += 1
else:
break
doneidx = maxidx - doneidx
else:
doneidx = maxidx
if stride == doneidx:
best_d = get_dist_mr_stride_dir(station, stride, 0, 'stay', 0, maxidx, maxclr, (doneidx, doneval), best_d)
else:
#stride_right
if stdir in ['both', 'right']:
if stride < doneidx:
st_delta = stride + 1
adj_station = []
c = 0
carry = 0
for i, s in enumerate(station):
rep = i <= stride
if not rep and c == 0:
adj_station.extend(station[i:])
break
offset = 1 if rep else 0
adj_s = s - offset - c
if adj_s < 0:
adj_s += colors
c = 1
else:
c = 0
if i == doneidx:
carry = c
adj_station.append(adj_s)
elif stride > doneidx:
st_delta = 0
carry = 0
adj_station = station[:]
best_d = get_dist_mr_stride_dir(adj_station, stride, st_delta, 'right', carry, maxidx, maxclr, (doneidx, doneval), best_d)
#stride_left
if stdir in ['both', 'left']:
steq = stride if stride < doneidx else -1
st_delta = doneidx - steq
adj_station = []
c = 0
carry = 0
for i, s in enumerate(station):
rep = i > steq and i <= doneidx
offset = 1 if rep else 0
adj_s = s + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
else:
c = 0
if i == doneidx:
carry = c
adj_station.append(adj_s)
if i >= doneidx and c == 0:
adj_station.extend(station[i + 1:])
break
best_d = get_dist_mr_stride_dir(adj_station, stride, st_delta, 'left', carry, maxidx, maxclr, (doneidx, doneval), best_d)
return best_d
def get_dist_ml_to_stride(idx, stride, st_delta, stdir, extra=0):
d = 0
if stdir == 'left':
acts = idx - (stride + 1 if stride < idx else 1)
d += 2 * acts
d += st_delta
else:
d += st_delta
acts = stride if stride > 0 and stride < idx else 0
if extra > 0:
d += extra + 2 * (acts - 1)
else:
d += 2 * acts
return d
def get_dist_ml_stride_dir(station, stride, st_delta, stdir, initvdx, maxidx, maxclr, doneidx, best_d, extra=0):
v0, d0 = initvdx
done = maxidx - doneidx
off = [0]
ds = [d0]
if v0 == 0:
for i, s in enumerate(reversed(station[1:])):
n = len(off)
if n == 0:
break
o1 = o2 = off.pop(0)
d1 = d2 = ds.pop(0)
if n > 1:
o2 = off.pop(0)
d2 = ds.pop(0)
if i == done:
std = get_dist_ml_to_stride(doneidx, stride, st_delta, stdir, extra)
up_d, down_d = d1 + s, d2 + maxclr - s + 1 + o2
total_d = min(up_d, down_d) + std
if total_d < best_d:
best_d = total_d
break
else:
if s == maxclr:
up_d = d2 + 1 + o2
down_d = up_d + 2
else:
up_d = d1 + s + 2
down_d = d2 + maxclr - s + 1 + o2
if min(up_d, down_d) < best_d:
if down_d - up_d > 1:
off.append(1)
ds.append(up_d)
elif up_d >= down_d:
off.append(-1)
ds.append(down_d)
else:
off.append(1)
ds.append(up_d)
off.append(-1)
ds.append(down_d)
else:
for i, s in enumerate(reversed(station[1:])):
n = len(off)
if n == 0:
break
o1 = o2 = off.pop(0)
d1 = d2 = ds.pop(0)
if n > 1:
o2 = off.pop(0)
d2 = ds.pop(0)
if i == done:
std = get_dist_ml_to_stride(doneidx, stride, st_delta, stdir, extra)
up_d, down_d = d1 + s + 1 + o1, d2 + maxclr - s
total_d = min(up_d, down_d) + std
if total_d < best_d:
best_d = total_d
break
else:
if s == maxclr:
up_d = down_d = d2 + 2
else:
up_d = d1 + s + 3 + o1
down_d = d2 + maxclr - s
if min(up_d, down_d) < best_d:
if up_d - down_d > 1:
off.append(1)
ds.append(down_d)
elif down_d >= up_d:
off.append(-1)
ds.append(up_d)
else:
off.append(-1)
ds.append(up_d)
off.append(1)
ds.append(down_d)
return best_d
def get_distance_moving_left(station, stride, colors, best_d, stdir='both', doedge=True):
stnlen = len(station)
maxidx = stnlen - 1
maxclr = colors - 1
if all([s == 0 for s in station]):
d1 = stride
d2 = stnlen - stride
if d1 <= d2:
return d1 * 2
else:
return d2 * 2
elif all([s == maxclr for s in station]):
d1 = stride
d2 = maxidx - stride
if d1 <= d2:
return d1 * 2 + 1
else:
return d2 * 2 + 1
doneidx = 1
s0 = station[0]
s1 = station[1]
if s1 in [0, maxclr]:
for s in station[1:]:
if s == s1:
doneidx += 1
else:
break
if doneidx > maxidx:
best_d = get_distance_moving_right(station, stride, colors, best_d)
else:
if s1 == 0 and doedge:
s0_d1 = s0 + 2
s0_rep1 = s0_d1 - 1
s0_d2 = maxclr - s0 + 4
s0_rep2 = 4 - s0_d2
elif s1 == maxclr and doedge:
s0_d1 = s0 + 5
s0_rep1 = s0_d1 - 4
s0_d2 = maxclr - s0 + 1
s0_rep2 = 1 - s0_d2
else:
s0_d1 = s0 + 2
s0_rep1 = s0_d1 - 1
s0_d2 = maxclr - s0 + 1
s0_rep2 = 1 - s0_d2
rep_off = 0
if stride == doneidx:
if s1 in [0, maxclr] and doedge:
tv = [s1]
if s0_d1 <= s0_d2:
ds = [s0_d1]
else:
ds = [s0_d2]
else:
if abs(s0_d1 - s0_d2) > 0:
if s0_d1 < s0_d2:
tv = [0]
ds = [s0_d1]
else:
tv = [maxclr]
ds = [s0_d2]
else:
tv = [0, maxclr]
ds = [s0_d1, s0_d2]
for v, d in zip(tv, ds):
best_d = get_dist_ml_stride_dir(station, stride, 0, 'right', (v, d), maxidx, maxclr, doneidx, best_d)
else:
#stride_left
if stdir in ['both', 'left']:
stpos = stride > doneidx
steq = stride if stpos else (maxidx + 1)
st_delta = maxidx - steq + 2
adj_station = []
rep_off = int(stpos)
c = 0
for i, s in enumerate(station):
if stpos:
rep = i == doneidx or i == 0 or i > stride
else:
rep = i == doneidx
if i > doneidx and c == 0:
adj_station.extend(station[i:])
break
offset = 1 if rep else 0
adj_s = s + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1 if i > 0 else 0
else:
c = 0
adj_station.append(adj_s)
adj_rep1 = s0_rep1 + rep_off
abs_rep1 = abs(adj_rep1)
adj_d1 = s0_d1 + abs_rep1 - abs(s0_rep1)
adj_rep2 = s0_rep2 + rep_off
abs_rep2 = abs(adj_rep2)
adj_d2 = s0_d2 + abs_rep2 - abs(s0_rep2)
if s1 in [0, maxclr] and doedge:
tv = [s1]
if adj_d1 <= adj_d2:
ds = [adj_d1]
else:
ds = [adj_d2]
else:
if abs(adj_d1 - adj_d2) > 0:
if adj_d1 < adj_d2:
tv = [0]
ds = [adj_d1]
else:
tv = [maxclr]
ds = [adj_d2]
else:
tv = [0, maxclr]
ds = [adj_d1, adj_d2]
for v, d in zip(tv, ds):
best_d = get_dist_ml_stride_dir(adj_station, stride, st_delta, 'left', (v, d), maxidx, maxclr, doneidx, best_d)
#stride_right
if stdir in ['both', 'right']:
if s1 == 0 and not (stride > 0 and stride < doneidx) and doedge:
s0_d1 = s0 + 2
s0_rep1 = s0_d1 - 1
s0_d2 = maxclr - s0 + 4
s0_rep2 = 4 - s0_d2
elif s1 == maxclr and not (stride > 0 and stride < doneidx) and doedge:
s0_d1 = s0 + 5
s0_rep1 = s0_d1 - 4
s0_d2 = maxclr - s0 + 1
s0_rep2 = 1 - s0_d2
else:
s0_d1 = s0 + 2
s0_rep1 = s0_d1 - 1
s0_d2 = maxclr - s0 + 1
s0_rep2 = 1 - s0_d2
stpos = stride > doneidx
steq = stride if stpos else stnlen
st_delta = steq - doneidx
adj_station = []
c = 0
rep_off = 0
if not stpos:
adj_s0 = s0 - 1
if adj_s0 < 0:
adj_s0 += colors
adj_station.append(adj_s0)
rep_off = -1
else:
adj_station.append(s0)
for i, s in enumerate(station):
if i == 0:
continue
rep = i > doneidx and i <= steq
offset = 1 if rep else 0
adj_s = s - offset - c
if adj_s < 0:
adj_s += colors
c = 1
else:
c = 0
adj_station.append(adj_s)
if i >= steq and c == 0:
adj_station.extend(station[i + 1:])
break
adj_rep1 = s0_rep1 + rep_off
abs_rep1 = abs(adj_rep1)
adj_d1 = s0_d1 + abs_rep1 - abs(s0_rep1)
adj_rep2 = s0_rep2 + rep_off
abs_rep2 = abs(adj_rep2)
adj_d2 = s0_d2 + abs_rep2 - abs(s0_rep2)
extras = []
if s1 in [0, maxclr] and not (stride > 0 and stride < doneidx) and doedge:
extras.append(0)
tv = [s1]
if adj_d1 <= adj_d2:
ds = [adj_d1]
else:
ds = [adj_d2]
else:
if s1 in [0, maxclr] and (stride > 0 and stride < doneidx):
if abs(adj_d1 - adj_d2) > 0:
tv = [s1]
if adj_d1 < adj_d2:
ds = [adj_d1]
if s1 == maxclr:
extras.append(3)
else:
extras.append(0)
else:
ds = [adj_d2]
if s1 == 0:
extras.append(1)
else:
extras.append(0)
else:
tv = [s1, s1]
ds = [adj_d1, adj_d2]
if s1 == 0:
extras.extend([0, 1])
else:
extras.extend([3, 0])
else:
if abs(adj_d1 - adj_d2) > 0:
extras.append(0)
if adj_d1 < adj_d2:
tv = [0]
ds = [adj_d1]
else:
tv = [maxclr]
ds = [adj_d2]
else:
tv = [0, maxclr]
ds = [adj_d1, adj_d2]
extras.extend([0, 0])
for v, d, xt in zip(tv, ds, extras):
best_d = get_dist_ml_stride_dir(adj_station, stride, st_delta, 'right', (v, d), maxidx, maxclr, doneidx, best_d, xt)
return best_d
def get_windows(station, colors):
max_idx = len(station) - 1
max_symbol = colors - 1
w = False
windows = []
winval = 0
window_start = 0
for i, d in enumerate(station[1:]):
if not w and (d == 0 or d == max_symbol):
window_start = i
winval = d
w = True
elif w and d != winval:
windows.append([window_start, i + 1, winval])
if d in [0, max_symbol]:
window_start = i
winval = d
w = True
else:
w = False
if w:
windows.append([window_start, max_idx + 1, winval])
return windows
def action_distance(station, stride, colors):
stnlen = len(station)
maxidx = stnlen - 1
maxclr = colors - 1
if all([s == 0 for s in station]):
d1 = stride
d2 = stnlen - stride
if d1 <= d2:
return d1 * 2
else:
return d2 * 2
elif all([s == maxclr for s in station]):
d1 = stride
d2 = maxidx - stride
if d1 <= d2:
return d1 * 2 + 1
else:
return d2 * 2 + 1
else:
#all right or left
best_d = np.inf
best_d = get_distance_moving_right(station, stride, colors, best_d)
best_d = get_distance_moving_left(station, stride, colors, best_d)
windows = get_windows(station, colors)
#print(windows)
for (lowedge, highedge, winval) in windows:
if lowedge == 0 or highedge == stnlen:
continue
#first right then left
#stride in place
if stride == highedge:
adj_station = [s if i <= lowedge else winval for i, s in enumerate(station)]
dp = get_distance_moving_right(adj_station, maxidx, colors, best_d, stdir='left')
if dp < best_d:
best_d = get_dist_ml_stride_dir(station, stride, 0, 'right', (winval, dp), maxidx, maxclr, stride, best_d)
else:
#stride right
if stride > highedge:
adj_station = [s if i <= lowedge else winval for i, s in enumerate(station)]
dp = get_distance_moving_right(adj_station, maxidx, colors, best_d, stdir='left')
if dp < best_d:
st_delta = stride - highedge
adj_station = []
c = 0
for i, s in enumerate(station):
if i <= highedge:
adj_station.append(s)
continue
rep = i <= stride
offset = 1 if rep else 0
adj_s = s - offset - c
if adj_s < 0:
adj_s += colors
c = 1
else:
c = 0
adj_station.append(adj_s)
if not rep and c == 0:
adj_station.extend(station[i + 1:])
break
best_d = get_dist_ml_stride_dir(adj_station, stride, st_delta, 'right', (winval, dp), maxidx, maxclr, highedge, best_d)
else:
if stride < lowedge:
steps_forward = stride + 1
steps_end = 0
adj_station = []
c = 0
for i, s in enumerate(station):
if i <= stride:
adj_station.append(s)
continue
rep = i <= lowedge
offset = 1 if rep else 0
if i <= lowedge:
adj_s = s + offset + c
else:
adj_s = winval + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
else:
c = 0
adj_station.append(adj_s)
if not rep and c == 0:
adj_station.extend([winval] * (maxidx - i))
break
else:
steps_forward = lowedge + 1
steps_end = stride - lowedge
adj_station = [s if i <= lowedge else winval for i, s in enumerate(station)]
dp = get_distance_moving_right(adj_station, lowedge, colors, best_d)
if dp < best_d:
steps_back = lowedge + 1
dp += steps_back
st_delta = maxidx - highedge
adj_station = []
c = 0
for i, s in enumerate(station):
if i <= highedge:
adj_station.append(s)
continue
adj_s = s - 1 - c
if adj_s < 0:
adj_s += colors
c = 1
else:
c = 0
adj_station.append(adj_s)
dp = get_dist_ml_stride_dir(adj_station, maxidx, st_delta, 'right', (winval, dp), maxidx, maxclr, highedge, best_d)
if dp < best_d:
dp += steps_forward
dp += steps_end
if dp < best_d:
best_d = dp
#stride left
if stride >= lowedge and stride < highedge:
adj_station = [s if i <= lowedge else winval for i, s in enumerate(station)]
dp = get_distance_moving_right(adj_station, maxidx, colors, best_d, stdir='left')
if dp < best_d:
st_delta = 1
adj_station = []
c = 0
for i, s in enumerate(station):
if i < highedge:
adj_station.append(s)
continue
rep = i == highedge
offset = 1 if rep else 0
adj_s = s + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
else:
c = 0
adj_station.append(adj_s)
if not rep and c == 0:
adj_station.extend(station[i + 1:])
break
best_d = get_dist_ml_stride_dir(adj_station, stride, st_delta, 'left', (winval, dp), maxidx, maxclr, highedge, best_d)
else:
steq = stride if stride < lowedge else -1
adj_station = []
c = 0
for i, s in enumerate(station):
if i > lowedge and c == 0:
adj_station.extend([winval] * (maxidx - i + 1))
break
offset = (1 if i <= steq else 2) if i <= lowedge else 0
if i <= lowedge:
adj_s = s + offset + c
else:
adj_s = winval + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
else:
c = 0
adj_station.append(adj_s)
dp = get_distance_moving_right(adj_station, lowedge, colors, best_d)
if dp < best_d:
steps_back = lowedge + 1
dp += steps_back
adj_station = []
c = 0
for i, s in enumerate(station):
if i < highedge:
adj_station.append(s)
continue
rep = i == highedge
offset = 1 if rep else 0
adj_s = s + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
else:
c = 0
adj_station.append(adj_s)
if not rep and c == 0:
adj_station.extend(station[i + 1:])
break
steps_end = 0
if stride > highedge and stride < maxidx:
steps_end = maxidx - stride
prev_station = adj_station[:]
adj_station = []
c = 0
for i, s in enumerate(prev_station):
if i <= stride:
adj_station.append(s)
continue
adj_s = s + 1 + c
if adj_s > maxclr:
adj_s -= colors
c = 1
else:
c = 0
adj_station.append(adj_s)
dp = get_dist_ml_stride_dir(adj_station, highedge, 0, 'left', (winval, dp), maxidx, maxclr, highedge, best_d)
if dp < best_d:
dp += 1
steps_back = highedge - lowedge - 1
dp += 2 * steps_back
steps_end += (lowedge - stride) if stride < lowedge else (lowedge + 1)
dp += steps_end
if dp < best_d:
best_d = dp
#first left then right
if stride == lowedge:
doneidx = highedge
rep_off = -1
adj_station = []
c = 0
for i, s in enumerate([s if i == 0 or i >= highedge else 0 for i, s in enumerate(station)]):
rep = i > doneidx or i == 0
offset = 1 if rep else 0
adj_s = s - offset - c
if adj_s < 0:
adj_s += colors
c = 1
else:
c = 0
adj_station.append(adj_s)
s0 = adj_station[0]
s0_d1 = s0 + 2
s0_rep1 = s0_d1 - 1
s0_d2 = maxclr - s0 + 1
s0_rep2 = 1 - s0_d2
st_delta = stnlen - doneidx
if abs(s0_d1 - s0_d2) > 0:
if s0_d1 < s0_d2:
tv = [0]
ds = [s0_d1]
else:
tv = [maxclr]
ds = [s0_d2]
else:
tv = [0, maxclr]
ds = [s0_d1, s0_d2]
prev_station = adj_station[:]
for v, d in zip(tv, ds):
carryath = 0
if (v == 0 or (v == maxclr and s0 == maxclr)):
if winval == maxclr:
carryath = 1
else:
if winval == 0:
carryath = -1
adj_station = []
c = 0
for i, s in enumerate(prev_station):
if i < highedge:
adj_station.append(s)
continue
offset = carryath if i == highedge else 0
adj_s = s + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
elif adj_s < 0:
adj_s += colors
c = -1
else:
c = 0
adj_station.append(adj_s)
dp = get_dist_ml_stride_dir(adj_station, 0, st_delta, 'right', (v, d), maxidx, maxclr, doneidx, best_d)
if dp < best_d:
carryat1 = int(v == maxclr and not (v == maxclr and s0 == maxclr))
adj_station = []
c = 0
for i, s in enumerate(station):
if i == 0:
adj_station.append(0)
continue
offset = carryat1 if i == 1 else 0
adj_s = (s if i <= lowedge else winval) + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
else:
c = 0
adj_station.append(adj_s)
if i >= lowedge and c == 0:
adj_station.extend([winval] * (maxidx - i))
break
dp2 = get_distance_moving_right(adj_station, lowedge, colors, best_d, stdir='left')
dp += dp2
if dp < best_d:
best_d = dp
else:
#stride right
doneidx = highedge
rep_off = -1
adj_station = []
c = 0
for i, s in enumerate([s if i == 0 or i >= highedge else 0 for i, s in enumerate(station)]):
if stride >= lowedge and stride < highedge:
rep = i > doneidx or i == 0
offset = 1 if rep else 0
elif stride >= highedge:
offset = 2 if i > highedge and i <= stride else (1 if i == 0 or i == highedge or i > stride else 0)
else:
offset = 2 if i > highedge or i == 0 else (1 if i == highedge else 0)
adj_s = s - offset - c
if adj_s < 0:
adj_s += colors
c = 1
else:
c = 0
adj_station.append(adj_s)
s0 = adj_station[0]
s0_d1 = s0 + 2
s0_rep1 = s0_d1 - 1
s0_d2 = maxclr - s0 + 1
s0_rep2 = 1 - s0_d2
st_delta = stnlen - doneidx
if abs(s0_d1 - s0_d2) > 0:
if s0_d1 < s0_d2:
tv = [0]
ds = [s0_d1]
else:
tv = [maxclr]
ds = [s0_d2]
else:
tv = [0, maxclr]
ds = [s0_d1, s0_d2]
prev_station = adj_station[:]
for v, d in zip(tv, ds):
carryath = 0
if (v == 0 or (v == maxclr and s0 == maxclr)):
if winval == maxclr:
carryath = 1
else:
if winval == 0:
carryath = -1
adj_station = []
c = 0
for i, s in enumerate(prev_station):
if i < highedge:
adj_station.append(s)
continue
offset = carryath if i == highedge else 0
adj_s = s + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
elif adj_s < 0:
adj_s += colors
c = -1
else:
c = 0
adj_station.append(adj_s)
dp = get_dist_ml_stride_dir(adj_station, 0, st_delta, 'right', (v, d), maxidx, maxclr, doneidx, best_d)
if dp < best_d:
carryat1 = int(v == maxclr and not (v == maxclr and s0 == maxclr))
adj_station = []
c = 0
for i, s in enumerate(station):
if i == 0:
adj_station.append(0)
continue
if stride > 0 and stride <= lowedge:
if i == 1:
offset = (1 if i > 0 and i <= stride else 0) - carryat1
else:
offset = 1 if i > 0 and i <= stride else 0
else:
offset = -carryat1 if i == 1 else 0
adj_s = (s if i <= lowedge else winval) - offset - c
if adj_s < 0:
adj_s += colors
c = 1
elif adj_s > maxclr:
adj_s -= colors
c = -1
else:
c = 0
adj_station.append(adj_s)
dp2 = get_distance_moving_right(adj_station, lowedge + 1, colors, best_d)
dp += dp2
if dp < best_d:
steps_forward = (stride if stride >= lowedge + 1 and stride < highedge else highedge) - lowedge - 1
dp += steps_forward * 2
steps_end = (stride - highedge + 1) if stride >= highedge else ((stride + 1) if stride <= lowedge else 0)
dp += steps_end
if dp < best_d:
best_d = dp
#stride left
doneidx = highedge
rep_off = -1
adj_station = []
c = 0
for i, s in enumerate([s if i == 0 or i >= highedge else 0 for i, s in enumerate(station)]):
if stride < lowedge:
rep = i > doneidx or i == 0
offset = 1 if rep else 0
elif stride >= highedge:
offset = 1 if i > highedge and i <= stride else 0
else:
offset = -1 if i == highedge else 0
adj_s = s - offset - c
if adj_s < 0:
adj_s += colors
c = 1
elif adj_s > maxclr:
adj_s -= colors
c = -1
else:
c = 0
adj_station.append(adj_s)
s0 = adj_station[0]
s0_d1 = s0 + 2
s0_rep1 = s0_d1 - 1
s0_d2 = maxclr - s0 + 1
s0_rep2 = 1 - s0_d2
st_delta = stnlen - doneidx
if abs(s0_d1 - s0_d2) > 0:
if s0_d1 < s0_d2:
tv = [0]
ds = [s0_d1]
else:
tv = [maxclr]
ds = [s0_d2]
else:
tv = [0, maxclr]
ds = [s0_d1, s0_d2]
prev_station = adj_station[:]
for v, d in zip(tv, ds):
carryath = 0
if (v == 0 or (v == maxclr and s0 == maxclr and stride < lowedge)):
if winval == maxclr:
carryath = 1
else:
if winval == 0:
carryath = -1
adj_station = []
c = 0
for i, s in enumerate(prev_station):
if i < highedge:
adj_station.append(s)
continue
offset = carryath if i == highedge else 0
adj_s = s + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
elif adj_s < 0:
adj_s += colors
c = -1
else:
c = 0
adj_station.append(adj_s)
dp = get_dist_ml_stride_dir(adj_station, 0, st_delta, 'right', (v, d), maxidx, maxclr, doneidx, best_d)
if dp < best_d:
carryat1 = int(v == maxclr and not (v == maxclr and s0 == maxclr and stride < lowedge))
adj_station = []
c = 0
for i, s in enumerate(station):
if i == 0:
adj_station.append(0)
continue
if stride < lowedge:
if i == 1:
offset = (1 if i > stride and i <= lowedge else 0) + carryat1
else:
offset = 1 if i > stride and i <= lowedge else 0
else:
if i == 1:
offset = (1 if i <= lowedge else 0) + carryat1
else:
offset = 1 if i <= lowedge else 0
adj_s = (s if i <= lowedge else winval) + offset + c
if adj_s > maxclr:
adj_s -= colors
c = 1
elif adj_s < 0:
adj_s += colors
c = -1
else:
c = 0
adj_station.append(adj_s)
dp2 = get_distance_moving_right(adj_station, lowedge, colors, best_d)
dp += dp2
if dp < best_d:
steps_back = (lowedge - stride) if stride < lowedge else lowedge
if stride > lowedge:
steps_back += (maxidx - stride + 1) if stride >= highedge - 1 else (maxidx - highedge + 2)
dp += steps_back
steps_end = (highedge - 1 - stride) if stride > lowedge and stride < highedge - 1 else 0
dp += steps_end
if dp < best_d:
best_d = dp
return best_d
``` |
{
"source": "jimcbl/ppl_hcmut_assignment",
"score": 3
} |
#### File: ppl_hcmut_assignment/Assignment4/CodeGenerator.py
```python
from Utils import *
from StaticCheck import *
from StaticError import *
from Emitter import Emitter
from Frame import Frame
from abc import ABC, abstractmethod
class CodeGenerator(Utils):
def __init__(self):
self.libName = "io"
def init(self):
return [Symbol("getInt", MType(list(), IntType()), CName(self.libName)),
Symbol("putInt", MType([IntType()], VoidType()), CName(self.libName)),
Symbol("putIntLn", MType([IntType()], VoidType()), CName(self.libName)),
Symbol("getFloat", MType(list(), FloatType()), CName(self.libName)),
Symbol("putFloat", MType([FloatType()], VoidType()), CName(self.libName)),
Symbol("putFloatLn", MType([FloatType()], VoidType()), CName(self.libName)),
Symbol("putBool", MType([BoolType()], VoidType()), CName(self.libName)),
Symbol("putBoolLn", MType([BoolType()], VoidType()), CName(self.libName)),
Symbol("putString", MType([StringType()], VoidType()), CName(self.libName)),
Symbol("putStringLn", MType([StringType()], VoidType()), CName(self.libName)),
Symbol("putLn", MType(list(), VoidType()), CName(self.libName))
]
def gen(self, ast, dir_):
#ast: AST
#dir_: String
gl = self.init()
gc = CodeGenVisitor(ast, gl, dir_)
gc.visit(ast, None)
# class StringType(Type):
# def __str__(self):
# return "StringType"
# def accept(self, v, param):
# return None
class ArrayPointerType(Type):
def __init__(self, ctype):
#cname: String
self.eleType = ctype
def __str__(self):
return "ArrayPointerType({0})".format(str(self.eleType))
def accept(self, v, param):
return None
class ClassType(Type):
def __init__(self,cname):
self.cname = cname
def __str__(self):
return "Class({0})".format(str(self.cname))
def accept(self, v, param):
return None
class SubBody():
def __init__(self, frame, sym):
#frame: Frame
#sym: List[Symbol]
self.frame = frame
self.sym = sym
class Access():
def __init__(self, frame, sym, isLeft, isFirst):
#frame: Frame
#sym: List[Symbol]
#isLeft: Boolean
#isFirst: Boolean
self.frame = frame
self.sym = sym
self.isLeft = isLeft
self.isFirst = isFirst
class Val(ABC):
pass
class Index(Val):
def __init__(self, value):
#value: Int
self.value = value
class CName(Val):
def __init__(self, value):
#value: String
self.value = value
class CodeGenVisitor(BaseVisitor, Utils):
def __init__(self, astTree, env, dir_):
#astTree: AST
#env: List[Symbol]
#dir_: File
self.astTree = astTree
self.env = env
self.className = "MPClass"
self.path = dir_
self.emit = Emitter(self.path + "/" + self.className + ".j")
def visitProgram(self, ast, c):
#ast: Program
#c: Any
self.emit.printout(self.emit.emitPROLOG(self.className, "java.lang.Object"))
e = SubBody(None, self.env)
var_lst = [x for x in ast.decl if type(x) is VarDecl]
func_lst = [x for x in ast.decl if type(x) is FuncDecl]
for x in var_lst:
e = self.visit(x, e)
for i in func_lst:
lst = [x.varType for x in i.param]
e.sym.append(Symbol(i.name.name, MType(lst,i.returnType), CName(self.className)))
for x in func_lst:
self.visit(x, e)
# generate default constructor
self.genMETHOD(FuncDecl(Id("<init>"), list(), list(), list(),None), c, Frame("<init>", VoidType))
self.emit.emitEPILOG()
return c
def genMETHOD(self, consdecl, o, frame):
#consdecl: FuncDecl
#o: Any
#frame: Frame
isInit = consdecl.returnType is None
isMain = consdecl.name.name == "main" and len(consdecl.param) == 0 and type(consdecl.returnType) is VoidType
returnType = VoidType() if isInit else consdecl.returnType
methodName = "<init>" if isInit else consdecl.name.name
intype = [ArrayPointerType(StringType())] if isMain else [x.varType for x in consdecl.param]
mtype = MType(intype, returnType)
self.emit.printout(self.emit.emitMETHOD(methodName, mtype, not isInit, frame))
frame.enterScope(True)
glenv = o
# Generate code for parameter declarations
if isInit:
self.emit.printout(self.emit.emitVAR(frame.getNewIndex(), "this", ClassType(self.className), frame.getStartLabel(), frame.getEndLabel(), frame))
if isMain:
self.emit.printout(self.emit.emitVAR(frame.getNewIndex(), "args", ArrayPointerType(StringType()), frame.getStartLabel(), frame.getEndLabel(), frame))
for x in consdecl.param + consdecl.local:
glenv = self.visit(x,SubBody(frame, glenv.sym))
body = consdecl.body
self.emit.printout(self.emit.emitLABEL(frame.getStartLabel(), frame))
# Generate code for statements
if isInit:
self.emit.printout(self.emit.emitREADVAR("this", ClassType(self.className), 0, frame))
self.emit.printout(self.emit.emitINVOKESPECIAL(frame))
list(map(lambda x: self.visit(x, SubBody(frame, glenv.sym)), body))
self.emit.printout(self.emit.emitLABEL(frame.getEndLabel(), frame))
if type(returnType) is VoidType:
self.emit.printout(self.emit.emitRETURN(VoidType(), frame))
self.emit.printout(self.emit.emitENDMETHOD(frame))
frame.exitScope();
def visitFuncDecl(self, ast, o):
#ast: FuncDecl
#o: Any
if ast.name.name.lower()=="main":
ast.name.name = "main"
subctxt = o
frame = Frame(ast.name, ast.returnType)
self.genMETHOD(ast, subctxt, frame)
# return SubBody(None, [Symbol(ast.name, MType(lst, ast.returnType), CName(self.className))] + subctxt.sym)
def visitVarDecl(self, ast, o):
ctxt = o
if ctxt.frame is not None:
frame = ctxt.frame
index = frame.getNewIndex()
txt = self.emit.emitVAR(index,ast.variable.name,ast.varType,frame.getStartLabel(), frame.getEndLabel(),frame)
self.emit.printout(txt)
return SubBody(ctxt.frame,[Symbol(ast.variable.name, ast.varType, index)]+ctxt.sym)
else:
txt = self.emit.emitATTRIBUTE(ast.variable.name,ast.varType,False,None)
self.emit.printout(txt)
return SubBody(None, ctxt.sym+[Symbol(ast.variable.name,ast.varType, CName(self.className))])
def visitWhile(self, ast, o):
ctxt = o
frame = o.frame
sym = o.sym
frame.enterLoop()
self.emit.printout(self.emit.emitLABEL(frame.getContinueLabel(),frame))
expcode, exptyp = self.visit(ast.exp, Access(frame, sym, False, True))
self.emit.printout(expcode)
self.emit.printout(self.emit.jvm.emitIFEQ(frame.getBreakLabel()))
list(map(lambda x: self.visit(x, SubBody(frame, sym)), ast.sl))
self.emit.printout(self.emit.emitGOTO(frame.getContinueLabel(),frame))
self.emit.printout(self.emit.emitLABEL(frame.getBreakLabel(),frame))
frame.exitLoop()
def visitFor(self, ast ,o):
#id:Id
#expr1,expr2:Expr
#loop:list(Stmt)
#up:Boolean #True => increase; False => decrease
ctxt = o
frame = ctxt.frame
frame.enterLoop()
exp1, exp1typ = self.visit(ast.expr1,Access(frame, ctxt.sym, False, False))
self.emit.printout(exp1)
idstore,idtyp = self.visit(ast.id, Access(frame, ctxt.sym, True, False))
self.emit.printout(idstore)
idload,idtypnew = self.visit(ast.id, Access(frame, ctxt.sym, False, False))
# Lan dau tien
self.emit.printout(idload + self.emit.emitPUSHICONST(1,frame))
#if up -1, if downto +1
if ast.up:
self.emit.printout(self.emit.emitADDOP('-', IntType(), frame))
else:
self.emit.printout(self.emit.emitADDOP('+', IntType(), frame))
self.emit.printout(idstore)
self.emit.printout(self.emit.emitLABEL(frame.getContinueLabel(), frame))
exp2, exp2typ = self.visit(ast.expr2,Access(frame, ctxt.sym, False, False))
if ast.up:
self.emit.printout(idload + self.emit.emitPUSHICONST(1,frame)+self.emit.emitADDOP('+', IntType(), frame))
self.emit.printout(idstore)
else:
self.emit.printout(idload + self.emit.emitPUSHICONST(1,frame)+self.emit.emitADDOP('-', IntType(), frame))
self.emit.printout(idstore)
if ast.up:
self.emit.printout(idload + exp2 + self.emit.emitREOP("<=", IntType(), frame))
else:
self.emit.printout(idload + exp2 + self.emit.emitREOP(">=", IntType(), frame))
self.emit.printout(self.emit.jvm.emitIFEQ(frame.getBreakLabel()))
list(map(lambda x: self.visit(x, SubBody(frame, ctxt.sym)), ast.loop))
self.emit.printout(self.emit.emitGOTO(frame.getContinueLabel(),frame))
self.emit.printout(self.emit.emitLABEL(frame.getBreakLabel(), frame))
frame.exitLoop()
def visitIf(self, ast, o):
ctxt = o
frame = ctxt.frame
labelExit = frame.getNewLabel()
exprcode, exptyp = self.visit(ast.expr,Access(frame, ctxt.sym, False, False))
self.emit.printout(exprcode)
flagThen = self.checkFuncNoReturn(ast.thenStmt)
if len(ast.elseStmt) == 0:
self.emit.printout(self.emit.jvm.emitIFEQ(labelExit))
list(map(lambda x: self.visit(x, SubBody(frame, ctxt.sym)), ast.thenStmt))
if not flagThen:
self.emit.printout(self.emit.emitGOTO(labelExit,frame))
else:
labelElse = frame.getNewLabel()
flagElse = self.checkFuncNoReturn(ast.elseStmt)
self.emit.printout(self.emit.jvm.emitIFEQ(labelElse))
list(map(lambda x: self.visit(x, SubBody(frame, ctxt.sym)), ast.thenStmt))
if not flagThen:
self.emit.printout(self.emit.emitGOTO(labelExit,frame))
self.emit.printout(self.emit.emitLABEL(labelElse,frame))
list(map(lambda x: self.visit(x, SubBody(frame, ctxt.sym)), ast.elseStmt))
if not flagElse:
self.emit.printout(self.emit.emitGOTO(labelExit,frame))
self.emit.printout(self.emit.emitLABEL(labelExit,frame))
def checkFuncNoReturn(self,list):
check = False
for i in list:
if type(i) is If:
a = self.checkFuncNoReturn(i.thenStmt)
b = self.checkFuncNoReturn(i.elseStmt) if i.elseStmt != [] else False
check = a and b
if type(i) is With:
check = self.checkFuncNoReturn(i.stmt)
if type(i) is Return:
check = True
return check
def visitCallStmt(self, ast, o):
#ast: CallStmt
#o: Any
ctxt = o
frame = ctxt.frame
sym = self.lookup(ast.method.name.lower(), ctxt.sym, lambda x: x.name.lower())
for x in ctxt.sym:
if x.name.lower() == sym.name.lower():
ast.method.name = x.name
# print(ast.method.name)
cname = sym.value.value
ctype = sym.mtype
i = 0
in_ = ("", list())
for x in ast.param:
str1, typ1 = self.visit(x, Access(frame, ctxt.sym, False, True))
if type(typ1) is IntType and type(sym.mtype.partype[i]) is FloatType:
in_ = (in_[0] + str1 + self.emit.emitI2F(frame), in_[1]+[typ1])
else:
in_ = (in_[0] + str1, in_[1]+[typ1])
i += 1
self.emit.printout(in_[0])
self.emit.printout(self.emit.emitINVOKESTATIC(cname + "/" + ast.method.name, ctype, frame))
def visitCallExpr(self, ast, o):
#ast: CallExpr
#o: Any
ctxt = o
frame = ctxt.frame
sym = self.lookup(ast.method.name.lower(), ctxt.sym, lambda x: x.name.lower())
for x in ctxt.sym:
if x.name.lower() == sym.name.lower():
ast.method.name = x.name
cname = sym.value.value
ctype = sym.mtype
i = 0
in_ = ("", list())
for x in ast.param:
str1, typ1 = self.visit(x, Access(frame, ctxt.sym, False, True))
if type(typ1) is IntType and type(sym.mtype.partype[i]) is FloatType:
in_ = (in_[0] + str1 + self.emit.emitI2F(frame), in_[1]+[typ1])
else:
in_ = (in_[0] + str1, in_[1]+[typ1])
i += 1
return in_[0] + self.emit.emitINVOKESTATIC(cname + "/" + sym.name, ctype, frame), ctype.rettype
def visitBreak(self, ast, o):
ctxt = o
frame = ctxt.frame
brkLabel = frame.getBreakLabel()
self.emit.printout(self.emit.emitGOTO(brkLabel,frame))
def visitContinue(self, ast, o):
ctxt = o
frame = ctxt.frame
conLabel = frame.getContinueLabel()
self.emit.printout(self.emit.emitGOTO(conLabel,frame))
def visitAssign(self, ast, o):
ctxt = o
frame = ctxt.frame
nenv = ctxt.sym
right,righttyp = self.visit(ast.exp, Access(frame, nenv, False, True))
left,lefttyp = self.visit(ast.lhs, Access(frame, nenv, True, False))
self.emit.printout(right)
if type(righttyp) is IntType and type(lefttyp) is FloatType:
self.emit.printout(self.emit.emitI2F(frame))
self.emit.printout(left)
return
def visitBinaryOp(self, ast, o):
ctxt = o
frame = ctxt.frame
#lexeme = ast.op
leftcode, lefttyp = self.visit(ast.left, o)
rightcode, righttyp = self.visit(ast.right, o)
retyp = lefttyp
result = ""
if ast.op in ['+', '-']:
if type(lefttyp) is type(righttyp):
return leftcode + rightcode + self.emit.emitADDOP(ast.op, lefttyp, frame), retyp
else:
retyp = FloatType()
if type(lefttyp) is IntType:
return leftcode + self.emit.emitI2F(frame) + rightcode + self.emit.emitADDOP(ast.op, retyp, frame), retyp
else:
return leftcode + rightcode + self.emit.emitI2F(frame) + self.emit.emitADDOP(ast.op, retyp, frame), retyp
elif ast.op == '*':
if type(lefttyp) is type(righttyp):
return leftcode + rightcode + self.emit.emitMULOP(ast.op, lefttyp, frame), retyp
else:
retyp = FloatType()
if type(lefttyp) is IntType:
return leftcode + self.emit.emitI2F(frame) + rightcode + self.emit.emitMULOP(ast.op, retyp, frame), retyp
else:
return leftcode + rightcode + self.emit.emitI2F(frame) + self.emit.emitMULOP(ast.op, retyp, frame), retyp
elif ast.op == '/':
retyp = FloatType()
if type(lefttyp) is type(righttyp):
if type(lefttyp) is IntType:
return leftcode + self.emit.emitI2F(frame) + rightcode + self.emit.emitI2F(frame) + self.emit.emitMULOP(ast.op, retyp, frame), retyp
else:
return leftcode + rightcode + self.emit.emitMULOP(ast.op, retyp, frame), retyp
else:
if type(lefttyp) is IntType:
return leftcode + self.emit.emitI2F(frame) + rightcode + self.emit.emitMULOP(ast.op, retyp, frame), retyp
else:
return leftcode + rightcode + self.emit.emitI2F(frame) + self.emit.emitMULOP(ast.op, retyp, frame), retyp
elif ast.op.lower() == "div":
return leftcode + rightcode + self.emit.emitDIV(frame), IntType()
elif ast.op.lower() == "mod":
return leftcode + rightcode + self.emit.emitMOD(frame), IntType()
elif ast.op.lower() == "and":
return leftcode + rightcode + self.emit.emitANDOP(frame), BoolType()
elif ast.op.lower() == "or":
return leftcode + rightcode + self.emit.emitOROP(frame), BoolType()
elif ast.op in ['>','>=','<','<=','<>','=']:
retyp = BoolType()
if type(lefttyp) is type(righttyp):
return leftcode + rightcode + self.emit.emitREOP(ast.op, lefttyp, frame), retyp
else:
if type(lefttyp) is IntType:
return leftcode + self.emit.emitI2F(frame) + rightcode + self.emit.emitREOP(ast.op, FloatType(), frame), retyp
else:
return leftcode + rightcode + self.emit.emitI2F(frame) + self.emit.emitREOP(ast.op, FloatType(), frame), retyp
#TODO andthen & orelse
#if 5 > 3 and then 2 > 1
elif ast.op.lower() == 'andthen':
retyp = BoolType()
labelLz = frame.getNewLabel()
# labelTh = frame.getNewLabel()
result += leftcode
result += self.emit.emitDUP(frame)
result += self.emit.jvm.emitIFEQ(labelLz)
result += rightcode
result += self.emit.emitANDOP(frame)
result += self.emit.emitLABEL(labelLz,frame)
return result, retyp
elif ast.op.lower() == 'orelse':
retyp = BoolType()
labelLz = frame.getNewLabel()
result += leftcode
result += self.emit.emitDUP(frame)
result += self.emit.jvm.emitIFNE(labelLz)
result += rightcode
result += self.emit.emitOROP(frame)
result += self.emit.emitLABEL(labelLz,frame)
return result, retyp
# TODO: visitWith
def visitWith(self, ast, o):
ctxt = o
frame = ctxt.frame
sym = ctxt.sym
frame.enterScope(False)
labelSta = frame.getStartLabel()
labelEnd = frame.getEndLabel()
for x in ast.decl:
# print(type(sym))
if type(sym) is SubBody:
sym = self.visit(x,SubBody(frame, sym.sym))
else:
sym = self.visit(x,SubBody(frame, sym))
self.emit.printout(self.emit.emitLABEL(labelSta,frame))
list(map(lambda x: self.visit(x, SubBody(frame, sym.sym)), ast.stmt))
self.emit.printout(self.emit.emitLABEL(labelEnd,frame))
frame.exitScope()
def visitUnaryOp(self, ast, o):
ctxt = o
frame = ctxt.frame
unacode,unatyp = self.visit(ast.body,o)
if ast.op is '-':
return unacode + self.emit.emitNEGOP(unatyp,frame), unatyp
if ast.op.lower() == 'not':
return unacode + self.emit.emitNOT(BoolType(),frame), unatyp
def visitIntLiteral(self, ast, o):
#ast: IntLiteral
#o: Any
ctxt = o
frame = ctxt.frame
return self.emit.emitPUSHICONST(ast.value, frame), IntType()
def visitFloatLiteral(self, ast, o):
#ast: FloatLiteral
#o: Any
ctxt = o
frame = ctxt.frame
return self.emit.emitPUSHFCONST(str(ast.value), frame), FloatType()
def visitBooleanLiteral(self, ast, o):
#ast: BooleanLiteral
#o: Any
ctxt = o
frame = ctxt.frame
return self.emit.emitPUSHICONST(str(ast.value).lower(), frame), BoolType()
def visitStringLiteral(self, ast, o):
ctxt = o
frame = ctxt.frame
return self.emit.emitPUSHCONST('"' + ast.value + '"',StringType(), frame), StringType()
def visitReturn(self, ast, o):
ctxt = o
frame = ctxt.frame
refunctyp = frame.returnType
if ast.expr:
expcode,exptyp = self.visit(ast.expr, Access(frame,o.sym,False, True))
self.emit.printout(expcode)
if type(exptyp) is not type(refunctyp) and type(refunctyp) is FloatType:
self.emit.printout(self.emit.emitI2F(frame))
self.emit.printout(self.emit.emitRETURN(refunctyp,frame))
def visitId(self, ast, o):
ctxt = o
frame = ctxt.frame
isLeft = ctxt.isLeft
sym = self.lookup(ast.name.lower(), ctxt.sym, lambda x: x.name.lower())
if isLeft:
if type(sym.value) is CName:
name = self.className+"/"+sym.name
return self.emit.emitPUTSTATIC(name,sym.mtype,frame), sym.mtype
else:
return self.emit.emitWRITEVAR(sym.name,sym.mtype,sym.value,frame), sym.mtype
else:
if type(sym.value) is CName:
name =self.className+"/"+sym.name
return self.emit.emitGETSTATIC(name,sym.mtype,frame), sym.mtype
else:
return self.emit.emitREADVAR(sym.name,sym.mtype,sym.value,frame), sym.mtype
``` |
{
"source": "jimccann-rh/nutanix-NC2",
"score": 2
} |
#### File: jimccann-rh/nutanix-NC2/clusternutanixvm-pruner.py
```python
import datetime
import logging
import os
import socket
import subprocess
import sys
# trunk-ignore(flake8/F401)
import time
from base64 import b64encode
from pathlib import Path
# trunk-ignore(flake8/F401)
import click
import requests
import urllib3
from dateutil import parser
from dotenv import load_dotenv
from clusternutanix import nc2_cluster_status
logging.basicConfig(stream=sys.stderr, level=logging.INFO)
DELETE_THRESHOLD = 24 #: threshold for deleting vpc resources in hours
def determine_age_in_hours(date_string) -> int:
"""Determine age of an object in hours"""
start_date = parser.parse(date_string)
end_date = datetime.datetime.now(datetime.timezone.utc)
age = end_date - start_date
object_age = age.total_seconds() / 3600
return object_age
def is_expired(object_age: float) -> bool:
"""Check if the object age is above the threshold and return either True or False"""
logging.debug("object age in hours : " + str(object_age))
return object_age - DELETE_THRESHOLD > 0
def check_key_exist(test_dict, key):
try:
value = test_dict[key]
logging.debug(value)
return True
except KeyError:
return False
def vms_prune(): # noqa: max-complexity=12
"""Prune VMs in the cluster"""
ncs = nc2_cluster_status()
if (ncs != "hibernated") and (ncs == "running"):
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
# load the script configuration
env_path_env = Path(".") / ".env"
load_dotenv(dotenv_path=env_path_env)
PE_PORT = os.getenv("PE_PORT")
PE_USERNAME = os.getenv("PE_USERNAME")
PE_PASSWORD = os.getenv("PE_PASSWORD")
OUTPUT = os.getenv("OUTPUT")
VM_EXCEPTIONS = os.getenv("VM_EXCEPTIONS").split(",")
PRISMCENTRAL_VMDESC = os.getenv("PRISMCENTRAL_VMDESC")
if not VM_EXCEPTIONS:
logging.info(
"*** DANAGER *** Prism Centrals (all of them) should be in this list we are ABORTING *********"
)
exit
else:
logging.info("list assumed to have Prism Central's uuid listed")
# load the script configuration
env_path_nc2 = Path(OUTPUT) / "NC2clusterinfo.txt"
load_dotenv(dotenv_path=env_path_nc2)
PE_IP = os.getenv("PE_IP")
PE_LB = None
PE_LB = os.getenv("PE_LB")
if PE_LB is not None:
PE_IP = socket.gethostbyname(PE_LB)
logging.info(PE_IP)
logging.info("Waiting for cluster IP to come on-line.")
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(300) # 300 Second Timeout
result = sock.connect_ex((PE_IP, int(PE_PORT)))
if result == 0:
logging.info("port OPEN")
else:
logging.info("port CLOSED, connect_ex returned: " + str(result))
# # ping to see if cluster IP is up it should come up after the cluster is online
# cmd = ["ping", "-c2", "-W 5", PE_IP]
# done = False
# timeout = (
# 1000 # default time out after 1000 times, set to -1 to disable timeout
# )
# while not done and timeout:
# response = subprocess.Popen(cmd, stdout=subprocess.PIPE)
# stdout, stderr = response.communicate()
# if response.returncode == 0:
# logging.info("Server up!")
# done = True
# else:
# sys.stdout.write(".")
# timeout -= 1
# if not done:
# logging.info("\nCluster failed to respond")
# Get logged in and get list of vms
request_url = "https://%s:%s/api/nutanix/v3/vms/list" % (PE_IP, PE_PORT)
encoded_credentials = b64encode(
bytes(f"{PE_USERNAME}:{PE_PASSWORD}", encoding="ascii")
).decode("ascii")
auth_header = f"Basic {encoded_credentials}"
payload = '{"kind":"vm"}'
headers = {
"Accept": "application/json",
"Content-Type": "application/json",
"Authorization": f"{auth_header}",
"cache-control": "no-cache",
}
response = requests.request(
"post", request_url, data=payload, headers=headers, verify=False
)
logging.debug(response.status_code)
logging.debug(response.text)
info = response.json()
logging.info("*************")
# logging.info(info['entities'])
todelete = ""
toberemove = ""
for entity in info["entities"]:
logging.debug(entity)
vm_uuid = entity["metadata"]["uuid"]
vm_name = entity["spec"]["name"]
vm_creation = entity["metadata"]["creation_time"]
key_to_lookup = "description"
if check_key_exist(entity["spec"], key_to_lookup):
vm_description = entity["spec"]["description"]
else:
vm_description = ""
logging.debug("VM found in cluster")
logging.debug(
"********* " + vm_uuid,
vm_creation,
vm_name,
vm_description + "**************",
)
howlong = determine_age_in_hours(vm_creation)
logging.debug("how old ami " + str(howlong))
deleteme = is_expired(howlong)
logging.debug(deleteme)
# todelete = " ".join([todelete, vm_uuid]).lstrip()
if deleteme:
todelete = f"{todelete} {vm_uuid}".lstrip()
# logging.info(todelete)
listtodelete = todelete.split(" ")
# logging.info(listtodelete)
# fail safe in case prism central not in vm_exceptions list in .env
if vm_description == PRISMCENTRAL_VMDESC:
VM_EXCEPTIONS.append(vm_uuid)
logging.debug(f"These VMs are excepctions to be pruned {VM_EXCEPTIONS}")
toberemove = list(set(listtodelete) - set(VM_EXCEPTIONS))
# print("TOREMOVE***" + str(toberemove))
logging.debug(f"These VMs will be pruned *** {toberemove}")
else:
logging.debug(f"nothing to be done to this vm {vm_name} {vm_uuid}")
logging.debug("*******************")
for x in toberemove:
logging.info(f"DELETED {x}")
request_url2 = "https://%s:%s/api/nutanix/v3/vms/%s" % (
PE_IP,
PE_PORT,
x,
)
# danger
response2 = requests.request(
"delete", request_url2, data=payload, headers=headers, verify=False
)
logging.info(response2)
# time.sleep(1)
count = len(toberemove)
logging.info("There were " + str(count) + " VMs REMOVED!")
# if __name__ == "__main__":
# vms_prune()
def main() -> None:
vms_prune()
if __name__ == "__main__":
main()
``` |
{
"source": "jimchan932/data_structures",
"score": 4
} |
#### File: jimchan932/data_structures/mazeSearch.py
```python
from enum import Enum
class Action(Enum):
Left = 1
Right = 2
Up = 3
Down = 4
Init = 5
class Location(Enum):
Empty = 1
Wall = 2
Start = 3
Destination = 4
class Maze:
def __init__(self, file_name):
self.mazeMap = []
i = 0
with open(file_name, 'r') as file:
for lines in file:
rowList = []
for j in range(0, len(lines)-1):
if lines[j] == '1':
rowList.append(Location.Wall)
elif lines[j] == '0':
rowList.append(Location.Empty)
elif lines[j] == 'S':
self.startX = j
self.startY = i
rowList.append(Location.Start)
elif lines[j] == 'E':
self.destinationX = j
self.destinationY = i
rowList.append(Location.Destination)
j = j + 1
self.mazeMap.append(rowList)
i = i + 1
def getLocation(self, coordX, coordY):
return self.mazeMap[coordY][coordX]
class Node:
def __init__(self, action, location_x, location_y, parent_cost = 0):
self.action = action
if(self.action == Action.Init):
self.x = location_x
self.y = location_y
self.path_cost = 0
else:
if(self.action == Action.Left):
self.x = location_x-1
self.y = location_y
elif(self.action == Action.Right):
self.x = location_x+1
self.y = location_y
elif(self.action == Action.Up):
self.x = location_x
self.y = location_y-1
elif(self.action == Action.Down):
self.x = location_x
self.y = location_y+1
self.path_cost = parent_cost+1
def getParentCoordinates(self):
if(self.action == Action.Left):
return (self.x+1, self.y)
if(self.action == Action.Right):
return (self.x-1, self.y)
if(self.action == Action.Up):
return (self.x, self.y+1)
if(self.action == Action.Down):
return (self.x, self.y-1)
if(self.action == Action.Init):
return (self.x, self.y)
def breadthFirstSearch(filename):
maze = Maze(filename)
initNode = Node(Action.Init, maze.startX, maze.startY)
frontier = [] # queue
frontier.append(initNode)
explored = []
while(frontier): # while frontier is not empty
parentNode = frontier.pop(0)
#print("X: %d" % parentNode.x)
#print("Y: %d" % parentNode.y)
explored.append(parentNode)
possibleActions = [Action.Left, Action.Right, Action.Up, Action.Down]
if(parentNode.action != Action.Init):
possibleActions.remove(oppositeDirection(parentNode.action))
for action in possibleActions:
childNode = Node(action, parentNode.x, parentNode.y, parentNode.path_cost)
if(maze.mazeMap[childNode.y][childNode.x] == Location.Wall
or maze.getLocation(childNode.x, childNode.y) == Location.Start):
continue
else:
flag1 = False
flag2 = False
for node in frontier:
if node.x == childNode.x and node.y == childNode.y:
flag1 = True
for node in explored:
if node.x == childNode.x and node.y == childNode.y:
flag2 = True
if(not (flag1 or flag2)):
if(maze.getLocation(childNode.x, childNode.y) == Location.Destination): # goal test
solution = []
backTrackNodeCoordX, backTrackNodeCoordY = childNode.getParentCoordinates()
while(maze.getLocation(backTrackNodeCoordX, backTrackNodeCoordY) != Location.Start):
for exploredNode in explored:
if(exploredNode.x == backTrackNodeCoordX and exploredNode.y == backTrackNodeCoordY):
solution.append(exploredNode.action)
backTrackNodeCoordX, backTrackNodeCoordY = exploredNode.getParentCoordinates()
return solution
frontier.append(childNode)
return [] # failure
def depthFirstSearch(filename):
maze = Maze(filename)
initNode = Node(Action.Init, maze.startX, maze.startY)
stack = [] # queue
stack.append(initNode)
explored = []
while(stack):
parentNode = stack.pop()
#print("X: %d" % parentNode.x)
#print("Y: %d" % parentNode.y)
flag1 = False
flag2 = False
for node in stack:
if node.x == parentNode.x and node.y == parentNode.y:
flag1 = True
for node in explored:
if node.x == parentNode.x and node.y == parentNode.y:
flag2 = True
if(not (flag1 or flag2)):
if(maze.getLocation(parentNode.x, parentNode.y) == Location.Destination): # goal test
print(parentNode.path_cost)
solution = []
backTrackNodeCoordX, backTrackNodeCoordY = parentNode.getParentCoordinates()
while(maze.getLocation(backTrackNodeCoordX, backTrackNodeCoordY) != Location.Start):
for exploredNode in explored:
if(exploredNode.x == backTrackNodeCoordX and exploredNode.y == backTrackNodeCoordY):
solution.append(exploredNode.action)
backTrackNodeCoordX, backTrackNodeCoordY = exploredNode.getParentCoordinates()
return solution
explored.append(parentNode)
possibleActions = [Action.Left, Action.Right, Action.Up, Action.Down]
if(parentNode.action != Action.Init):
possibleActions.remove(oppositeDirection(parentNode.action))
for action in possibleActions:
childNode = Node(action, parentNode.x, parentNode.y, parentNode.path_cost)
if(maze.mazeMap[childNode.y][childNode.x] == Location.Wall
or maze.getLocation(childNode.x, childNode.y) == Location.Start):
continue
else:
stack.append(childNode)
return []
class DLSResult(Enum):
Cutoff = 1
Failure = 2
Success = 3
# recursive DLS
# input: maze, explored (initially empty), node, limit,
# output: explored, (cuttoff, success, or failure)
def oppositeDirection(direction):
if direction == Action.Left:
return Action.Right;
elif direction == Action.Right:
return Action.Left
elif direction == Action.Up:
return Action.Down
elif direction == Action.Down:
return Action.Up
def recursiveDLS(maze, node, limit, explored):
if(maze.getLocation(node.x, node.y) == Location.Destination): # is Solution
return explored, node, DLSResult.Success
elif limit == 0: return explored, node, DLSResult.Cutoff
else:
cutoff_occured = False
possibleActions = [Action.Left, Action.Right, Action.Up, Action.Down]
if(node.action != Action.Init):
possibleActions.remove(oppositeDirection(node.action))
for action in possibleActions:
childNode = Node(action, node.x, node.y, node.path_cost)
exploredFlag = False
if(maze.mazeMap[childNode.y][childNode.x] == Location.Wall
or maze.getLocation(childNode.x, childNode.y) == Location.Start):
continue
for exploredNode in explored:
if(exploredNode.x == childNode.x and exploredNode.y == childNode.y):
exploredFlag = True
if(exploredFlag): continue
explored.append(childNode) # add childNode to epxlored
explored, destinationNode, result = recursiveDLS(maze, childNode, limit-1, explored)
if result == DLSResult.Cutoff: cutoff_occured = True
elif result != DLSResult.Failure: return explored, destinationNode, result
if(cutoff_occured): return explored, node, DLSResult.Cutoff
else: return explored, node, DLSResult.Failure
def depthLimitedSearch(filename, limit):
maze = Maze(filename)
initNode = Node(Action.Init, maze.startX, maze.startY)
explored, destinationNode, result = recursiveDLS(maze, initNode, limit, [])
if(result == DLSResult.Cutoff):
print("Cutoff reached")
return []
elif(result == DLSResult.Failure):
print("No solutions")
return []
elif(result == DLSResult.Success):
solution = []
backTrackNodeCoordX, backTrackNodeCoordY = destinationNode.getParentCoordinates()
while(maze.getLocation(backTrackNodeCoordX, backTrackNodeCoordY) != Location.Start):
for exploredNode in explored:
if(exploredNode.x == backTrackNodeCoordX and exploredNode.y == backTrackNodeCoordY):
solution.append(exploredNode.action)
backTrackNodeCoordX, backTrackNodeCoordY = exploredNode.getParentCoordinates()
return solution
def printSolution(solution):
if(solution == []): return
print("Start->", end='')
for step in reversed(solution):
if(step == Action.Left):
print("Left->", end='')
if(step == Action.Right):
print("Right->", end='')
if(step == Action.Up):
print("Up->", end='')
if(step == Action.Down):
print("Down->", end='')
print("Destination")
def main():
solution1 = breadthFirstSearch(r'C:\users\jimmy\Desktop\aicourse\MazeData.txt')
print(len(solution1))
print("Breadth First Search")
printSolution(solution1)
solution2 = depthFirstSearch(r'C:\users\jimmy\Desktop\aicourse\MazeData.txt')
print("Depth First Search")
print(len(solution2))
printSolution(solution2)
print("Depth Limited Search")
solution3 = depthLimitedSearch(r'C:\users\jimmy\Desktop\aicourse\MazeData.txt', 78)
printSolution(solution3)
main()
``` |
{
"source": "JimChengLin/AsyncDB2",
"score": 3
} |
#### File: AsyncDB2/AsyncDB/Engine.py
```python
from asyncio import ensure_future, Lock, sleep, get_event_loop
from bisect import insort, bisect, bisect_left
from collections import UserList
from contextlib import suppress
from multiprocessing import Process
from os import rename, remove
from os.path import getsize, isfile
from pickle import load, UnpicklingError
from struct import pack, unpack
from .Allocator import Allocator
from .AsyncFile import AsyncFile
from .Node import IndexNode, ValueNode
from .TaskQue import TaskQue, Task
class SortedList(UserList):
def append(self, item):
insort(self.data, item)
OP = b'\x00'
ED = b'\x01'
MIN_DEGREE = 128
class BasicEngine:
# 基础事务
def __init__(self, filename: str):
if not isfile(filename):
with open(filename, 'wb') as file:
# indicator
file.write(OP)
# root
file.write(pack('Q', 9))
self.root = IndexNode(is_leaf=True)
self.root.dump(file)
else:
with open(filename, 'rb+') as file:
if file.read(1) == OP:
file.close()
p = Process(target=repair, args=(filename,))
p.start()
p.join()
return self.__init__(filename)
else:
ptr = unpack('Q', file.read(8))[0]
file.seek(ptr)
self.root = IndexNode(file=file)
file.seek(0)
file.write(OP)
self.allocator = Allocator()
self.async_file = AsyncFile(filename)
self.command_que = SortedList()
self.file = open(filename, 'rb+', buffering=0)
self.lock = Lock()
self.on_interval = (0, 1)
self.on_write = False
self.task_que = TaskQue()
def malloc(self, size: int) -> int:
def is_inside(ptr: int) -> bool:
if self.on_write:
begin, end = self.on_interval
return min(ptr + size, end) - max(ptr, begin) >= 0
ptr = self.allocator.malloc(size)
if ptr and is_inside(ptr):
self.free(ptr, size)
ptr = 0
if not ptr:
ptr = self.async_file.size
if is_inside(ptr):
ptr += 1
self.async_file.size += 1
self.async_file.size += size
return ptr
def free(self, ptr: int, size: int):
self.allocator.free(ptr, size)
def time_travel(self, token: Task, node: IndexNode):
address = node.nth_value_ads(0)
for i in range(len(node.ptrs_value)):
ptr = self.task_que.get(token, address, node.ptr)
if ptr:
node.ptrs_value[i] = ptr
address += 8
if not node.is_leaf:
for i in range(len(node.ptrs_child)):
ptr = self.task_que.get(token, address, node.ptr)
if ptr:
node.ptrs_child[i] = ptr
address += 8
def a_command_done(self, token: Task):
token.command_num -= 1
if token.command_num == 0:
self.task_que.clean()
if not self.task_que.que and self.lock.locked():
self.lock.release()
# cumulation
def do_cum(self, token: Task, free_nodes, command_map):
def func():
for node in free_nodes:
self.free(node.ptr, node.size)
token.free_param = func
for ptr, param in command_map.items():
data, depend = param if isinstance(param, tuple) else (param, 0)
self.ensure_write(token, ptr, data, depend)
self.time_travel(token, self.root)
self.root = self.root.clone()
def ensure_write(self, token: Task, ptr: int, data: bytes, depend=0):
async def coro():
while self.command_que:
ptr, token, data, depend = self.command_que.pop(0)
cancel = depend and self.task_que.is_canceled(token, depend)
if not cancel:
cancel = self.task_que.is_canceled(token, ptr)
if not cancel:
# 确保边界不相连
self.on_interval = (ptr - 1, ptr + len(data) + 1)
await self.async_file.write(ptr, data)
self.a_command_done(token)
self.on_write = False
if not self.on_write:
self.on_write = True
ensure_future(coro())
# 按ptr和token.id排序
self.command_que.append((ptr, token, data, depend))
token.command_num += 1
def close(self):
self.file.seek(0)
self.file.write(ED)
self.file.close()
self.async_file.close()
def repair(filename: str):
async def coro():
temp = '__' + filename
engine = Engine(temp)
size = getsize(filename)
with open(filename, 'rb') as file:
file.seek(9)
while file.tell() != size:
indicator = file.read(1)
if indicator != ED:
continue
with suppress(EOFError, UnpicklingError):
item = load(file)
if isinstance(item, tuple) and len(item) == 2:
engine.set(*item)
await sleep(0)
if engine.task_que.que:
await engine.lock.acquire()
await engine.lock.acquire()
engine.close()
remove(filename)
rename(temp, filename)
loop = get_event_loop()
loop.run_until_complete(coro())
class Engine(BasicEngine):
# B-Tree核心
async def get(self, key):
token = self.task_que.create(is_active=False)
token.command_num += 1
async def travel(ptr: int):
init = self.task_que.get(token, ptr, is_active=False)
if not init:
init = await self.async_file.exec(ptr, lambda f: IndexNode(file=f))
index = bisect(init.keys, key)
if init.keys[index - 1] == key:
ptr = self.task_que.get(token, init.nth_value_ads(index - 1), init.ptr) or init.ptrs_value[index - 1]
val = await self.async_file.exec(ptr, lambda f: ValueNode(file=f))
assert val.key == key
self.a_command_done(token)
return val.value
elif not init.is_leaf:
ptr = self.task_que.get(token, init.nth_child_ads(index), init.ptr) or init.ptrs_child[index]
return await travel(ptr)
else:
return self.a_command_done(token)
# root ptrs实时更新
index = bisect(self.root.keys, key)
if index - 1 >= 0 and self.root.keys[index - 1] == key:
ptr = self.root.ptrs_value[index - 1]
val = await self.async_file.exec(ptr, lambda f: ValueNode(file=f))
assert val.key == key
self.a_command_done(token)
return val.value
elif not self.root.is_leaf:
return await travel(self.root.ptrs_child[index])
else:
return self.a_command_done(token)
def set(self, key, value):
token = self.task_que.create(is_active=True)
free_nodes = []
# {..., ptr: data OR (data, depend)}
command_map = {}
def replace(address: int, ptr: int, depend: int):
self.file.seek(ptr)
org_val = ValueNode(file=self.file)
if org_val.value != value:
# 写入新Val
val = ValueNode(key, value)
self.file.seek(self.async_file.size)
val.dump(self.file)
self.async_file.size += val.size
# 状态设为0
self.file.seek(org_val.ptr)
self.file.write(OP)
# 释放
free_nodes.append(org_val)
# 同步
self.task_que.set(token, address, org_val.ptr, val.ptr)
# 命令
self.ensure_write(token, address, pack('Q', val.ptr), depend)
self.do_cum(token, free_nodes, command_map)
def split(address: int, par: IndexNode, child_index: int, child: IndexNode, depend: int):
org_par = par.clone()
org_child = child.clone()
# 一半数据给sibling
mi = (len(child.keys) - 1) // 2 + 1
sibling = IndexNode(is_leaf=child.is_leaf)
sibling.keys = child.keys[mi:]
sibling.ptrs_value = child.ptrs_value[mi:]
del child.keys[mi:]
del child.ptrs_value[mi:]
if not sibling.is_leaf:
sibling.ptrs_child = child.ptrs_child[mi:]
del child.ptrs_child[mi:]
# parent需一个值
par.keys.insert(child_index, child.keys.pop())
par.ptrs_value.insert(child_index, child.ptrs_value.pop())
# 分配空间
child_b = bytes(child)
sibling_b = bytes(sibling)
child.ptr = self.malloc(child.size)
sibling.ptr = self.malloc(sibling.size)
par.ptrs_child[child_index] = child.ptr
par.ptrs_child.insert(child_index + 1, sibling.ptr)
par_b = bytes(par)
par.ptr = self.malloc(par.size)
# 更新完毕
# 释放
free_nodes.extend((org_par, org_child))
# 同步
_ = None
for ptr, head, tail in ((address, org_par.ptr, par.ptr),
(org_par.ptr, org_par, _), (org_child.ptr, org_child, _),
(par.ptr, _, par), (child.ptr, _, child), (sibling.ptr, _, sibling)):
self.task_que.set(token, ptr, head, tail)
# 命令
command_map.update({address: (pack('Q', par.ptr), depend),
par.ptr: par_b, child.ptr: child_b, sibling.ptr: sibling_b})
cursor = self.root
address = 1
depend = 0
# root准满载
if len(cursor.keys) == 2 * MIN_DEGREE - 1:
# 新建root
root = IndexNode(is_leaf=False)
root.ptrs_child.append(self.root.ptr)
split(address, root, 0, self.root, depend)
self.root = cursor = root
index = bisect(cursor.keys, key)
# 检查key是否已存在
if cursor.keys and cursor.keys[index - 1] == key:
return replace(cursor.nth_value_ads(index - 1), cursor.ptrs_value[index - 1], cursor.ptr)
# 向下循环直到叶节点
while not cursor.is_leaf:
index = bisect(cursor.keys, key)
ptr = cursor.ptrs_child[index]
child = self.task_que.get(token, ptr)
if not child:
self.file.seek(ptr)
child = IndexNode(file=self.file)
self.time_travel(token, child)
i = bisect_left(child.keys, key)
if i < len(child.keys) and child.keys[i] == key:
return replace(child.nth_value_ads(i), child.ptrs_value[i], child.ptr)
if len(child.keys) == 2 * MIN_DEGREE - 1:
split(address, cursor, index, child, depend)
if cursor.keys[index] < key:
# 路径转移至sibling,且必存在于task_que
index += 1
ptr = cursor.ptrs_child[index]
child = self.task_que.get(token, ptr)
address = cursor.nth_child_ads(index)
depend = cursor.ptr
cursor = child
# 到达叶节点
val = ValueNode(key, value)
val_b = bytes(val)
val.ptr = self.malloc(val.size)
self.file.seek(val.ptr)
self.file.write(val_b)
org_cursor = cursor.clone()
index = bisect(cursor.keys, key)
cursor.keys.insert(index, val.key)
cursor.ptrs_value.insert(index, val.ptr)
cursor_b = bytes(cursor)
cursor.ptr = self.malloc(cursor.size)
# 更新完毕
# 释放
free_nodes.append(org_cursor)
# 同步
_ = None
for ptr, head, tail in ((address, org_cursor.ptr, cursor.ptr),
(org_cursor.ptr, org_cursor, _), (cursor.ptr, _, cursor)):
self.task_que.set(token, ptr, head, tail)
# 命令
command_map.update({address: (pack('Q', cursor.ptr), depend), cursor.ptr: cursor_b})
self.do_cum(token, free_nodes, command_map)
def pop(self, key):
token = self.task_que.create(is_active=True)
free_nodes = []
command_map = {}
def indicate(val: ValueNode):
self.file.seek(val.ptr)
self.file.write(OP)
free_nodes.append(val)
def fetch(ptr: int) -> IndexNode:
result = self.task_que.get(token, ptr)
if not result:
self.file.seek(ptr)
result = IndexNode(file=self.file)
self.time_travel(token, result)
return result
def left_to_right(address: int, par: IndexNode, val_index: int,
left_child: IndexNode, right_child: IndexNode, depend: int):
org_par = par.clone()
org_left = left_child.clone()
org_right = right_child.clone()
# 内存
last_val_key = left_child.keys.pop()
last_val_ptr = left_child.ptrs_value.pop()
val_key = par.keys[val_index]
val_ptr = par.ptrs_value[val_index]
par.keys[val_index] = last_val_key
par.ptrs_value[val_index] = last_val_ptr
right_child.keys.insert(0, val_key)
right_child.ptrs_value.insert(0, val_ptr)
if not left_child.is_leaf:
last_ptr_child = left_child.ptrs_child.pop()
right_child.ptrs_child.insert(0, last_ptr_child)
# 空间
left_b = bytes(left_child)
right_b = bytes(right_child)
left_child.ptr = self.malloc(left_child.size)
right_child.ptr = self.malloc(right_child.size)
par.ptrs_child[val_index] = left_child.ptr
par.ptrs_child[val_index + 1] = right_child.ptr
par_b = bytes(par)
par.ptr = self.malloc(par.size)
# 更新完毕
# 释放
free_nodes.extend((org_par, org_left, org_right))
# 同步
_ = None
for ptr, head, tail in ((address, org_par.ptr, par.ptr),
(org_par.ptr, org_par, _), (par.ptr, _, par),
(org_left.ptr, org_left, _), (left_child.ptr, _, left_child),
(org_right.ptr, org_right, _), (right_child.ptr, _, right_child)):
self.task_que.set(token, ptr, head, tail)
# 命令
command_map.update({address: (pack('Q', par.ptr), depend),
par.ptr: par_b, left_child.ptr: left_b, right_child.ptr: right_b})
def right_to_left(address: int, par: IndexNode, val_index: int,
left_child: IndexNode, right_child: IndexNode, depend: int):
org_par = par.clone()
org_left = left_child.clone()
org_right = right_child.clone()
# 内存
first_val_key = right_child.keys.pop(0)
first_val_ptr = right_child.ptrs_value.pop(0)
val_key = par.keys[val_index]
val_ptr = par.ptrs_value[val_index]
par.keys[val_index] = first_val_key
par.ptrs_value[val_index] = first_val_ptr
left_child.keys.append(val_key)
left_child.ptrs_value.append(val_ptr)
if not right_child.is_leaf:
first_ptr_child = right_child.ptrs_child.pop(0)
left_child.ptrs_child.append(first_ptr_child)
# 空间
left_b = bytes(left_child)
right_b = bytes(right_child)
left_child.ptr = self.malloc(left_child.size)
right_child.ptr = self.malloc(right_child.size)
par.ptrs_child[val_index] = left_child.ptr
par.ptrs_child[val_index + 1] = right_child.ptr
par_b = bytes(par)
par.ptr = self.malloc(par.size)
# 更新完毕
# 释放
free_nodes.extend((org_par, org_left, org_right))
# 同步
_ = None
for ptr, head, tail in ((address, org_par.ptr, par.ptr),
(org_par.ptr, org_par, _), (par.ptr, _, par),
(org_left.ptr, org_left, _), (left_child.ptr, _, left_child),
(org_right.ptr, org_right, _), (right_child.ptr, _, right_child)):
self.task_que.set(token, ptr, head, tail)
# 命令
command_map.update({address: (pack('Q', par.ptr), depend),
par.ptr: par_b, left_child.ptr: left_b, right_child.ptr: right_b})
def merge_left(address: int, par: IndexNode, val_index: int,
left_child: IndexNode, cursor: IndexNode, depend: int):
org_par = par.clone()
org_cursor = cursor.clone()
# 内存
val_key = par.keys.pop(val_index)
val_ptr = par.ptrs_value.pop(val_index)
del par.ptrs_child[val_index]
cursor.keys = [*left_child.keys, val_key, *cursor.keys]
cursor.ptrs_value = [*left_child.ptrs_value, val_ptr, *cursor.ptrs_value]
if not left_child.is_leaf:
cursor.ptrs_child = [*left_child.ptrs_child, *cursor.ptrs_child]
# 空间
cursor_b = bytes(cursor)
cursor.ptr = self.malloc(cursor.size)
par.ptrs_child[val_index] = cursor.ptr
par_b = bytes(par)
par.ptr = self.malloc(par.size)
# 更新完毕
# 释放
free_nodes.extend((org_par, org_cursor, left_child))
# 同步
_ = None
for ptr, head, tail in ((address, org_par.ptr, par.ptr),
(org_par.ptr, org_par, _), (par.ptr, _, par),
(org_cursor.ptr, org_cursor, _), (cursor.ptr, _, cursor),
(left_child.ptr, left_child, _)):
self.task_que.set(token, ptr, head, tail)
# 命令
command_map.update({address: (pack('Q', par.ptr), depend), par.ptr: par_b, cursor.ptr: cursor_b})
def merge_right(address: int, par: IndexNode, val_index: int,
cursor: IndexNode, right_child: IndexNode, depend: int):
org_par = par.clone()
org_cursor = cursor.clone()
# 内存
val_key = par.keys.pop(val_index)
val_ptr = par.ptrs_value.pop(val_index)
del par.ptrs_child[val_index + 1]
cursor.keys.extend((val_key, *right_child.keys))
cursor.ptrs_value.extend((val_ptr, *right_child.ptrs_value))
if not cursor.is_leaf:
cursor.ptrs_child.extend(right_child.ptrs_child)
# 空间
cursor_b = bytes(cursor)
cursor.ptr = self.malloc(cursor.size)
par.ptrs_child[val_index] = cursor.ptr
par_b = bytes(par)
par.ptr = self.malloc(par.size)
# 更新完毕
# 释放
free_nodes.extend((org_par, org_cursor, right_child))
# 同步
_ = None
for ptr, head, tail in ((address, org_par.ptr, par.ptr),
(org_par.ptr, org_par, _), (par.ptr, _, par),
(org_cursor.ptr, org_cursor, _), (cursor.ptr, _, cursor),
(right_child.ptr, right_child, _)):
self.task_que.set(token, ptr, head, tail)
# 命令
command_map.update({address: (pack('Q', par.ptr), depend), par.ptr: par_b, cursor.ptr: cursor_b})
def travel(address: int, init: IndexNode, key, depend: int):
index = bisect(init.keys, key) - 1
def key_in_leaf():
org_init = init.clone()
self.file.seek(init.ptrs_value[index])
val = ValueNode(file=self.file)
# 内存
del init.keys[index]
del init.ptrs_value[index]
# 空间
init_b = bytes(init)
init.ptr = self.malloc(init.size)
# 释放
indicate(val)
free_nodes.append(org_init)
# 同步
_ = None
for ptr, head, tail in ((address, org_init.ptr, init.ptr),
(org_init.ptr, org_init, _), (init.ptr, _, init)):
self.task_que.set(token, ptr, head, tail)
# 命令
command_map.update({address: (pack('Q', init.ptr), depend), init.ptr: init_b})
return val.value
def root_empty(successor: IndexNode):
free_nodes.append(self.root)
_ = None
for ptr, head, tail in ((address, self.root.ptr, successor.ptr),
(self.root.ptr, self.root, _), (successor.ptr, _, successor)):
self.task_que.set(token, ptr, head, tail)
command_map[address] = pack('Q', successor.ptr)
self.root = successor
# 已定位
if index >= 0 and init.keys[index] == key:
# 位于叶节点
if init.is_leaf:
return key_in_leaf()
# 位于内部节点
else:
left_ptr = init.ptrs_child[index]
left_child = fetch(left_ptr)
right_ptr = init.ptrs_child[index + 1]
right_child = fetch(right_ptr)
# 左 >= t
if len(left_child.keys) >= MIN_DEGREE:
left_to_right(address, init, index, left_child, right_child, depend)
return travel(init.nth_child_ads(index + 1), right_child, key, init.ptr)
# 右 >= t
elif len(right_child.keys) >= MIN_DEGREE:
right_to_left(address, init, index, left_child, right_child, depend)
return travel(init.nth_child_ads(index), left_child, key, init.ptr)
# 左右均 < t
else:
merge_left(address, init, index, left_child, right_child, depend)
if len(self.root.keys) == 0:
root_empty(right_child)
return travel(init.nth_child_ads(index), right_child, key, init.ptr)
# 向下寻找
elif not init.is_leaf:
index += 1
ptr = init.ptrs_child[index]
cursor = fetch(ptr)
# 目标 < t
if len(cursor.keys) < MIN_DEGREE:
left_sibling = right_sibling = None
if index - 1 >= 0:
left_ptr = init.ptrs_child[index - 1]
left_sibling = fetch(left_ptr)
# 左 >= t
if len(left_sibling.keys) >= MIN_DEGREE:
left_to_right(address, init, index - 1, left_sibling, cursor, depend)
return travel(init.nth_child_ads(index), cursor, key, init.ptr)
if index + 1 < len(init.ptrs_child):
right_ptr = init.ptrs_child[index + 1]
right_sibling = fetch(right_ptr)
# 右 >= t
if len(right_sibling.keys) >= MIN_DEGREE:
right_to_left(address, init, index, cursor, right_sibling, depend)
return travel(init.nth_child_ads(index), cursor, key, init.ptr)
# 无 >= t
if left_sibling:
index -= 1
merge_left(address, init, index, left_sibling, cursor, depend)
else:
merge_right(address, init, index, cursor, right_sibling, depend)
if len(self.root.keys) == 0:
root_empty(cursor)
return travel(init.nth_child_ads(index), cursor, key, init.ptr)
travel(1, self.root, key, 0)
self.do_cum(token, free_nodes, command_map)
async def items(self, item_from=None, item_to=None, max_len=0, reverse=False):
assert item_from <= item_to if item_from and item_to else True
token = self.task_que.create(is_active=False)
token.command_num += 1
result = []
async def travel(init: IndexNode):
async def get_item(index: int):
ptr = init.ptrs_value[index]
val = await self.async_file.exec(ptr, lambda f: ValueNode(file=f))
return val.key, val.value
async def get_child(index: int) -> IndexNode:
ptr = init.ptrs_child[index]
child = self.task_que.get(token, ptr, is_active=False)
if not child:
child = await self.async_file.exec(ptr, lambda f: IndexNode(file=f))
self.time_travel(token, child)
return child
# lo_key >= item_from
# hi_key > item_to
lo = 0 if item_from is None else bisect_left(init.keys, item_from)
hi = len(init.keys) if item_to is None else bisect(init.keys, item_to)
extend = not init.is_leaf and (item_from is None or lo == len(init.keys) or init.keys[lo] > item_from)
if not reverse and extend:
await travel(await get_child(lo))
for i in range(lo, hi) if not reverse else reversed(range(lo, hi)):
if reverse and not init.is_leaf:
await travel(await get_child(i + 1))
if max_len and len(result) >= max_len:
return
item = await get_item(i)
result.append(item)
if not reverse and not init.is_leaf:
await travel(await get_child(i + 1))
if reverse and extend:
await travel(await get_child(lo))
await travel(self.root)
self.a_command_done(token)
return result
```
#### File: JimChengLin/AsyncDB2/ManualTest.py
```python
from asyncio import get_event_loop
from AsyncDB import AsyncDB
M = 10000
FILE = 'Test.db'
async def write():
db = AsyncDB(FILE)
for i in range(M):
db[i] = i
print('set', i)
async def read():
db = AsyncDB(FILE)
for i in range(M):
value = await db[i]
print('get', value)
def main():
loop = get_event_loop()
loop.run_until_complete(write())
# loop.run_until_complete(read())
if __name__ == '__main__':
main()
``` |
{
"source": "JimChr-R4GN4R/FilesCrypterSavior",
"score": 3
} |
#### File: JimChr-R4GN4R/FilesCrypterSavior/FCS.py
```python
FCS_Version = 3.0 # DON'T REMOVE OR MOVE THIS LINE
import os
from os import path
import json
import hashlib
from Crypto.Cipher import AES
from Crypto.Util.Padding import pad, unpad
from Crypto.Random import get_random_bytes
ERRORS = {
'[F-0]':"Files should not be more than 64GB.",
'[F-1]':"You have selected files thar are more than 64GB.",
'[F-2]':"Some files you selected do not exist.",
'[F_UC-0]':"Please check back later or contact with R4GN4R. Probably there is an update on the way!",
'[F_UC-1]':"Please check your internet connection.",
'[F_UC-2]':"HTTP Error.",
'[F_UC-3]':"Error Connecting.",
'[F_UC-4]':"Timeout Error.",
'[DB_KNS-0]':"Something is wrong with the DB file.",
'[DB_KNS-1]':"DB file does not exist.",
'[DB_DBFC-0]':"Something is wrong with the DB file.",
'[DB_DKNC-0]':"Something is wrong with DB file.",
'[AES_E-0]':"This key has been used already. Please enter a unique one.",
'[AES_E-1]':"Please enter a key up to 32 characters.",
'[AES_E-2]':"Please enter a key up to 32 characters.",
'[AES_E-3]':"File does not exist.",
'[AES_D-0]':"Please enter key and nonce to decrypt.",
'[AES_D-1]':"Please check your key's hex format.",
'[AES_D-2]':"Please check your nonce's hex format.",
'[AES_D-3]':"Please check your key and nonce.",
'[AES_D-4]':"Please enter a key up to 32 characters.",
'[AES_D-5]':"Your key or nonce is incorrect.",
'[AES_D-6]':"This file is not encrypted.",
'[UI_DBB-0]':'Something is wrong with this DB file.',
'[UI_DBB-1]':'This file contains characters that are not understandable.'
}
def Logger(mtype, message):
if mtype =='fileslistimport':
Logger('info',"You have selected:")
for i in message:
UIWindow.Logger.appendPlainText(i)
else:
if mtype == 'warn':
message = '[Warning] - ' + message
elif mtype =='info':
message = '[Info] - ' + message
elif mtype =='imp':
message = '[Important] - ' + message
elif mtype =='error':
message = '[Error] - ' + message + ' ' + ERRORS[message]
UIWindow.Logger.appendPlainText(message)
class AES_SYSTEM():
def EncSystem(self):
self.cur_enc_system = UIWindow.Enc_system_label.text()
if UIWindow.Enc_system_label.text() == 'AES-EAX':
return AES.MODE_EAX
else:
self.cur_enc_system = 'AES-GCM'
return AES.MODE_GCM
def Encrypt(self):
for self.address in UIWindow.files_list:
if not path.exists(self.address):
Logger('error','[AES_E-3]')
continue
UIWindow.SetShortcuts('cur_file',self.address)
self.filesize = os.path.getsize(self.address)/pow(1024,3)
if self.filesize >= 64: # If file is >= 64GB, you should split file to smaller parts cause of encryption security reasons
Logger('error','[AES_E-2]')
continue
## ENCRYPTION KEY
if UIWindow.enc_key_input.text(): # KEY
if UIWindow.enc_key_label.text() == 'Key (B):': # Bytes format key
## Check key's length
if len(UIWindow.enc_key_input.text()) <= 31:
self.key = pad(UIWindow.enc_key_input.text().encode() ,32) # pad key in total of 32 bytes (256bit)
elif len(UIWindow.enc_key_input.text()) == 32:
self.key = UIWindow.enc_key_input.text().encode()
else:
Logger('error','[AES_E-1]')
continue
else: # Hex format key
try:
self.key = UIWindow.enc_key_input.text()
bytes.fromhex(self.key)
except ValueError:
Logger('error',"Please enter a key in Hex format.")
continue
if UIWindow.option_Check_for_dublicate_key_nonce_in_DB.isChecked():
if self.DoubleKeyNonceChecker('key'):
Logger('error','[AES_E-0]')
continue
else: # Generate key
self.key = get_random_bytes(UIWindow.key_gen_bits.value()//8)
if UIWindow.option_Check_for_dublicate_key_nonce_in_DB.isChecked():
while self.DoubleKeyNonceChecker('key'):
self.key = get_random_bytes(UIWindow.key_gen_bits.value()//8)
Logger('info',f"Generated Key: {self.key.hex()}")
## NONCE
cipher = AES.new(self.key, self.EncSystem()) # AES Encryption System
self.nonce = cipher.nonce # Generated Nonce
if UIWindow.option_Check_for_dublicate_key_nonce_in_DB.isChecked(): # Check if nonce already exists
while self.DoubleKeyNonceChecker('nonce'):
cipher = AES.new(self.key, self.EncSystem()) # AES Encryption System
self.nonce = cipher.nonce # Nonce Generate
Logger('info',f"Generated Nonce: {self.nonce.hex()}")
## Basic Actions
UIWindow.enc_button.setEnabled(False)
UIWindow.dec_button.setEnabled(False)
UIWindow.dec_progressBar.setFormat('')
UIWindow.enc_files_counter_progressBar.setFormat(f'{UIWindow.files_counter}/{UIWindow.enc_files_counter_progressBar.maximum()}') # files counter bar 0/$num_of_files
file_blocks = os.path.getsize(self.address)//UIWindow.USABLE_RAM # file's blocks calculation
counter = 0
## ENCRYPTION PROCESS
with open(self.address, 'rb') as file:
with open(self.address + UIWindow.FILE_EXT, 'wb') as enc_file:
fb = file.read(UIWindow.USABLE_RAM) # read first $UIWindow.USABLE_RAM bytes
while len(fb) > 0: # While there is still data being read from the file
if file_blocks != 0: # Print Encryption Progress
UIWindow.enc_progressBar.setValue(counter*100//file_blocks)
UIWindow.enc_progressBar.setFormat( str(counter*100//file_blocks) + '%' )
enc_file.write(cipher.encrypt(fb))
fb = file.read(UIWindow.USABLE_RAM) # Read the next block of the file
counter += 1
## Tag
self.tag = cipher.digest() # Calculate tag
UIWindow.enc_progressBar.setValue(100)
UIWindow.enc_progressBar.setFormat( '100%' )
Logger('info',f"File has been successfully encrypted: {self.address}")
## Files Counter
UIWindow.files_counter += 1
UIWindow.enc_files_counter_progressBar.setValue(UIWindow.files_counter)
UIWindow.enc_files_counter_progressBar.setFormat(f'{UIWindow.files_counter}/{UIWindow.enc_files_counter_progressBar.maximum()}')
if UIWindow.option_Delete_original_file.isChecked(): # Delete original file
self.DeleteOriginalFile()
if UIWindow.option_Store_key_nonce_in_DB.isChecked(): # Save key/nonce/tag
self.filehash = self.sha256Hash(self.address + UIWindow.FILE_EXT) # calculate encfile hash
self.SaveKeyNonceTag() # save hash,key,nonce to database
def Decrypt(self):
for self.address in UIWindow.files_list:
UIWindow.SetShortcuts('cur_file',self.address)
if self.ManyFilesSelected: # If file is already encrypted, check if it's key/nonce exist in DB
self.filehash = self.sha256Hash(self.address)
self.KeyNonceSearcher()
if not self.address.endswith(UIWindow.FILE_EXT):
Logger('error',"[AES_D-6]")
continue
## KEY & NONCE inputs
if not (UIWindow.dec_key_input.text() and UIWindow.dec_nonce_input.text()): # If key/nonce have not been filled, then stop
Logger('error','[AES_D-0]')
continue
if UIWindow.dec_key_label.text() == 'Key (H):': # Hex format key
try:
self.key = bytes.fromhex(UIWindow.dec_key_input.text())
except ValueError:
Logger('error','[AES_D-1]')
continue
else: # Bytes format key
if len(UIWindow.dec_key_input.text()) <= 31:
self.key = pad(UIWindow.dec_key_input.text().encode(),32) # pad key in total of 32 bytes (256bit)
elif len(UIWindow.dec_key_input.text()) == 32:
self.key = UIWindow.dec_key_input.text().encode()
else:
Logger('error','[AES_D-4]')
continue
try: # Check nonce's hex format
self.nonce = bytes.fromhex(UIWindow.dec_nonce_input.text())
except ValueError:
Logger('error','[AES_D-2]')
continue
try:
cipher = AES.new( self.key, self.EncSystem(), nonce=self.nonce ) # AES Encryption System
except ValueError:
Logger('error','[AES_D-3]')
continue
## Basic Actions
UIWindow.enc_button.setEnabled(False)
UIWindow.dec_button.setEnabled(False)
UIWindow.enc_progressBar.setFormat('')
UIWindow.dec_files_counter_progressBar.setFormat(f'{UIWindow.files_counter}/{UIWindow.dec_files_counter_progressBar.maximum()}') # files counter bar 0/$num_of_files
file_blocks = os.path.getsize(self.address)//UIWindow.USABLE_RAM # file blocks calculation
counter = 0
## DECRYPT PROCESS
with open(self.address, 'rb') as file:
with open(self.address[:-len(UIWindow.FILE_EXT)],'wb') as dec_file:
fb = file.read(UIWindow.USABLE_RAM) # read first $UIWindow.USABLE_RAM bytes
while len(fb) > 0: # While there is still data being read from the file
if file_blocks != 0:
UIWindow.dec_progressBar.setValue(counter*100//file_blocks)
UIWindow.dec_progressBar.setFormat(str(counter*100//file_blocks) + '%' )
dec_file.write(cipher.decrypt(fb))
fb = file.read(UIWindow.USABLE_RAM) # Read the next block from the file
counter += 1
## DECRYPTION VERIFICATION
try: # if tag exists
if self.tag:
try:
cipher.verify(self.tag)
Logger('info',f"File has been successfully decrypted and verified:\n{self.address}")
dec_verified = 1
except ValueError:
Logger('error',"[AES_D-5]")
UIWindow.dec_button.setEnabled(True)
UIWindow.dec_button.setEnabled(True)
UIWindow.dec_progressBar.setFormat('Ready To Decrypt')
UIWindow.enc_progressBar.setFormat('Ready To Encrypt')
try:
os.remove(self.address[:-len(UIWindow.FILE_EXT)])
except PermissionError:
Logger('warn',"FCS does not have permission to delete trash file.")
continue
except AttributeError:
Logger('info',f"File has been successfully decrypted but not verified.")
dec_verified = 0
## Decryption process Counter
UIWindow.dec_progressBar.setValue(100)
UIWindow.dec_progressBar.setFormat( '100%' )
## Files Counter
UIWindow.files_counter += 1
UIWindow.dec_files_counter_progressBar.setValue(UIWindow.files_counter)
UIWindow.dec_files_counter_progressBar.setFormat(f'{UIWindow.files_counter}/{UIWindow.dec_files_counter_progressBar.maximum()}')
if not dec_verified: # If Decryption not verified
if not UIWindow.option_not_decrypted_verified_keep_original_file.isChecked(): # Delete original file
if UIWindow.option_Delete_original_file.isChecked():
self.DeleteOriginalFile()
if not UIWindow.option_not_verified_keep_key_nonce_DB.isChecked(): # delete hash,key,nonce,tag from database
if UIWindow.option_Delete_key_nonce_after_decryption.isChecked():
self.DeleteKeyNonce()
else: # If Decryption verified
if UIWindow.option_Delete_original_file.isChecked(): # Delete original file
self.DeleteOriginalFile()
if UIWindow.option_Delete_key_nonce_after_decryption.isChecked() and self.KeyNonceSearcher(): # delete hash,key,nonce,tag from database
self.DeleteKeyNonce()
UIWindow.dec_key_input.setText('')
UIWindow.dec_nonce_input.setText('')
def DeleteOriginalFile(self):
try:
os.remove(self.address)
Logger('info',"Original file has been deleted.")
except PermissionError:
Logger('warn',"FCS does not have permission to delete original file.")
def sha256Hash(self, address):
file_hash = hashlib.sha256()
with open(address, 'rb') as f: # Open the file to read it's bytes
fb = f.read(UIWindow.USABLE_RAM) # Read from the file. Take in the amount declared above
while len(fb) > 0: # While there is still data being read from the file
file_hash.update(fb) # Update the hash
fb = f.read(UIWindow.USABLE_RAM) # Read the next block from the file
return file_hash.hexdigest()
class DB():
def SaveKeyNonceTag(self): # { hash : [ key, nonce, tag, enc_system, file_name ] }
if self.DBFileChecker():
with open(UIWindow.DATABASE_FILE, 'r+') as DB_file:
try:
data = json.load(DB_file)
except json.decoder.JSONDecodeError:
data = {}
finally:
data[self.filehash] = [self.key.hex() , self.nonce.hex(), self.tag.hex(), UIWindow.Enc_system_label.text(), self.address]
DB_file.seek(0)
json.dump(data, DB_file)
DB_file.truncate()
Logger('info',"Key and Nonce have been saved in DB.")
def DeleteKeyNonce(self):
if self.DBFileChecker():
try:
with open(UIWindow.DATABASE_FILE, 'r+') as DB_file:
data = json.load(DB_file)
del data[self.filehash]
DB_file.seek(0)
json.dump(data, DB_file)
DB_file.truncate()
except FileNotFoundError:
Logger('warn','DB file could not be found to delete key and nonce.')
def KeyNonceSearcher(self): # { hash : [ key, nonce, tag, enc_system ] }
if self.DBFileChecker():
try:
with open(UIWindow.DATABASE_FILE, 'r') as DB_file:
data = json.load(DB_file)
if self.filehash in data:
Logger('info',"File's key/nonce have been found in the database.")
UIWindow.dec_key_label.setText('Key (H):')
UIWindow.dec_key_input.setText(data[self.filehash][0])
UIWindow.dec_nonce_input.setText(data[self.filehash][1])
self.tag = bytes.fromhex(data[self.filehash][2])
UIWindow.Enc_system_label.setText(data[self.filehash][3])
return True
else:
return False
except json.decoder.JSONDecodeError:
Logger('error','[DB_KNS-0]')
UIWindow.DATABASE_FILE = UIWindow.DATABASE_FILE + '_tempfile.txt'
with open(UIWindow.DATABASE_FILE, 'w') as DB_file:
data = {}
json.dump(data, DB_file)
Logger('info',f'New DB file has been created: {os.getcwd()}\\{UIWindow.DATABASE_FILE}')
UIWindow.SetShortcuts('DB')
else:
return False
def DoubleKeyNonceChecker(self,obj):
if self.DBFileChecker():
try:
with open(UIWindow.DATABASE_FILE, 'r') as DB_file:
data = DB_file.read()
if obj == 'key':
return (self.key.hex() in data)
else:
return (self.nonce.hex() in data)
except json.decoder.JSONDecodeError:
Logger('error','[DB_DKNC-0]')
Logger('info','Encryption continues without key/nonce check.')
return True
else:
return True
def DBFileChecker(self):
if UIWindow.DATABASE_FILE != None:
if path.exists(UIWindow.DATABASE_FILE):
try:
DB_file = open(UIWindow.DATABASE_FILE, 'r')
DB_file.close()
except Exception as e:
print(e)
UIWindow.DATABASE_FILE = 'FCS_DB_tempfile.txt'
Logger('error','[DB_DBFC-0]')
with open(UIWindow.DATABASE_FILE, 'w') as DB_file:
data = {}
json.dump(data, DB_file)
Logger('info',f'Created a temp DB file: {UIWindow.DATABASE_FILE}')
UIWindow.SetShortcuts('DB')
UIWindow.SaveOptions()
return True
else:
UIWindow.SetShortcuts('DB-clear')
Logger('error','[DB_KNS-1]')
return False
else:
UIWindow.SetShortcuts('DB-clear')
return False
def NewDBFile(self):
new_DB = QFileDialog.getSaveFileName(UIWindow, 'New Database')[0]
if new_DB:
with open(new_DB,'w') as New_DB_file:
New_DB_file.write('{}')
UIWindow.DATABASE_FILE = new_DB
UIWindow.SetShortcuts('DB')
UIWindow.SaveOptions()
class File(AES_SYSTEM,DB):
def __init__(self, address_list):
address_list = self.AddressFixer(address_list)
if all([path.exists(x) for x in address_list]): # Check if all files exist
if len(address_list) == 1: # If only one file has been selected
self.ManyFilesSelected = False
self.address = address_list[0]
self.filesize = os.path.getsize(self.address)/pow(1024,3) # convert filesize from bytes to gigabytes
if self.filesize < 64: # If file is >= 64GB, you should split file to smaller parts cause of encryption security reasons
# Enable key/nonce inputs
UIWindow.enc_key_input.setEnabled(True)
UIWindow.dec_key_input.setEnabled(True)
UIWindow.dec_nonce_input.setEnabled(True)
# Enable enc/dec buttons
UIWindow.enc_button.setEnabled(True)
UIWindow.dec_button.setEnabled(True)
UIWindow.enc_button.clicked.connect(lambda: self.Encrypt())
UIWindow.dec_button.clicked.connect(lambda: self.Decrypt())
Logger('fileslistimport', address_list)
if self.address.endswith(UIWindow.FILE_EXT): # If file is already encrypted, check if it's key/nonce exist in DB
self.filehash = self.sha256Hash(self.address)
self.KeyNonceSearcher()
else: # If file is bigger than 64GB
Logger('error','[F-0]')
else: # If many files are chosen
self.ManyFilesSelected = True
self.addresses = address_list
Logger('fileslistimport', address_list) # print selected files
if all([ os.path.getsize(x)/pow(1024,3) < 64 for x in self.addresses]): # Check if all files are <64GB
if all([ UIWindow.FILE_EXT in x for x in self.addresses]): # Check if all files are encrypted ones
Logger('info',"All selected files are already encrypted.")
elif any([ UIWindow.FILE_EXT in x for x in self.addresses]):
Logger('info',"Some selected files are already encrypted and some are not.")
else:
Logger('info',"All selected files are not encrypted.")
## Disable enc/dec key/nonce inputs
UIWindow.enc_key_input.setText('')
UIWindow.enc_key_input.setEnabled(False)
UIWindow.dec_key_input.setText('')
UIWindow.dec_nonce_input.setText('')
UIWindow.dec_key_input.setEnabled(False)
UIWindow.dec_nonce_input.setEnabled(False)
## Enable enc/dec buttons
UIWindow.enc_button.setEnabled(True)
UIWindow.dec_button.setEnabled(True)
UIWindow.enc_button.clicked.connect(lambda: self.Encrypt())
UIWindow.dec_button.clicked.connect(lambda: self.Decrypt())
else:
Logger('error','[F-1]')
else:
Logger('error','[F-2]')
def AddressFixer(self, address_list):
for i in range(len(address_list)):
address_list[i] = os.path.abspath(address_list[i])
return address_list
#### UI Area
from PyQt5.QtWidgets import QApplication, QMainWindow, QVBoxLayout, QWidget, QPushButton, QTextEdit, QFileDialog, QSlider, QHBoxLayout, QLabel
from PyQt5.QtCore import *
from PyQt5.QtGui import *
import sys
from PyQt5 import uic
def clickable(widget): # https://wiki.python.org/moin/PyQt/Making%20non-clickable%20widgets%20clickable
class Filter(QObject):
clicked = pyqtSignal()
def eventFilter(self, obj, event):
if obj == widget:
if event.type() == QEvent.MouseButtonRelease:
if obj.rect().contains(event.pos()):
self.clicked.emit()
# The developer can opt for .emit(obj) to get the object within the slot.
return True
return False
filter = Filter(widget)
widget.installEventFilter(filter)
return filter.clicked
class RamToUse(QWidget):
def RamToUseOpenWindow(self):
if UIWindow.RamToUseUIWindow is None: # If window is not opened
UIWindow.RamToUseUIWindow = RamToUse()
UIWindow.RamToUseUIWindow.show()
def __init__(self):
super().__init__()
self.initUI()
def initUI(self): # https://zetcode.com/pyqt/qslider/
import psutil
hbox = QHBoxLayout()
## Side Bar
self.slide_bar = QSlider(Qt.Horizontal, self)
self.free_mb = int( psutil.virtual_memory().available//(1024**2) ) # Convert Bytes to MB
self.slide_bar.setRange( 1, (self.free_mb - self.free_mb//10) ) # Let free at least 10% of the free RAM MB
self.slide_bar.setValue(UIWindow.USABLE_RAM//(1024))
self.slide_bar.setFocusPolicy(Qt.NoFocus)
self.slide_bar.setPageStep(100)
self.slide_bar.valueChanged.connect(self.updateLabel)
## Side Bar Label
self.slide_bar_value_label = QLabel(f'{UIWindow.USABLE_RAM//(1024)} MB | 0 GB\nRECOMMENDED', self)
self.slide_bar_value_label.setStyleSheet("color: green;")
self.slide_bar_value_label.setAlignment(Qt.AlignCenter | Qt.AlignVCenter)
self.slide_bar_value_label.setMinimumWidth(80)
## Window characteristics
hbox.addWidget(self.slide_bar)
hbox.addSpacing(15)
hbox.addWidget(self.slide_bar_value_label)
self.setLayout(hbox)
self.setGeometry(300, 300, 350, 250)
self.setWindowTitle('Usable RAM')
self.show()
def updateLabel(self, value):
if value < 500: # If use up to 500 MB, show green
self.slide_bar_value_label.setStyleSheet("color: green;")
self.slide_bar_value_label.setText(f'{value} MB | {value//1024} GB\nRECOMMENDED')
elif value < self.slide_bar.maximum()*0.75: # If use up to 75% of free space, show orange
self.slide_bar_value_label.setStyleSheet("color: orange;")
self.slide_bar_value_label.setText(f'{value} MB | {value//1024} GB')
else: # If use more than 75% of free space, show red
self.slide_bar_value_label.setStyleSheet("color: red;")
self.slide_bar_value_label.setText(f'{value} MB | {value//1024} GB\nWARNING!')
UIWindow.USABLE_RAM = value*1024 # convert mb to bytes
class UI(QMainWindow):
def __init__(self):
super(UI,self).__init__()
uic.loadUi("FCS_GUI.ui",self) # Load Main GUI
self.setWindowTitle(f'FilesCrypterSavior V{str(FCS_Version)}') # Window Title
self.setWindowIcon(QIcon('images/Small_Logo.png')) # Window icon
### Default variables ###
self.DATABASE_FILE = None # Default key/nonce database
self.FILE_EXT = '.encfcs'
self.FEEDBACKURL = 'https://forms.gle/yhaoef5rZesX5Mez9'
### Browse Button ###
self.load_file_folder_button.clicked.connect(self.BrowseFiles) # Load File(s) Button
### Progress Bars ###
self.enc_progressBar.setAlignment(Qt.AlignCenter)
self.dec_progressBar.setAlignment(Qt.AlignCenter)
self.enc_files_counter_progressBar.setAlignment(Qt.AlignCenter)
self.dec_files_counter_progressBar.setAlignment(Qt.AlignCenter)
### Key labels ###
clickable(self.enc_key_label).connect(lambda label_name="enc_key_label": self.LabelSwitcher(label_name) )
clickable(self.dec_key_label).connect(lambda label_name="dec_key_label": self.LabelSwitcher(label_name) )
### Encryption System label ###
clickable(self.Enc_system_label).connect(lambda label_name="enc_system_label": self.LabelSwitcher(label_name) )
### Options ###
self.option_Check_for_Updates.triggered.connect(self.UpdateChecker) # Update Check
self.RamToUseUIWindow = None ; self.option_Blocks_Size.triggered.connect(RamToUse.RamToUseOpenWindow) # Usable RAM
self.option_Import_DB_file.triggered.connect(self.DBBrowser) # Import DB
self.option_Save_Settings.triggered.connect(self.SaveOptions) # Save Options
self.option_New_DB.triggered.connect(DB.NewDBFile) # New DB
self.option_Feedback.triggered.connect(self.FeedBackRedirect) # Feedback
### Shortcuts ###
self.SetShortcuts('DB-clear')
self.show()
def UpdateChecker(self):
import requests
Logger('info',"Checking for new version...")
try:
try:
url_response = requests.get("https://raw.githubusercontent.com/JimChr-R4GN4R/FilesCrypterSavior/main/FCS.py").text.split('\n')
latest_version = float( url_response[0].split(' ')[2] )
if latest_version > FCS_Version:
Logger('info',"There is a newer version! Please update FCS.")
else:
Logger('info',"You are up to date.")
except ValueError:
Logger('error',"[F_UC-0]")
except requests.exceptions.RequestException:
Logger('error',"[F_UC-1]")
except requests.exceptions.HTTPError:
Logger('error',"[F_UC-2]")
except requests.exceptions.ConnectionError:
Logger('error',"[F_UC-3]")
except requests.exceptions.Timeout:
Logger('error',"[F_UC-4]")
def BrowseFiles(self):
self.files_list = QFileDialog.getOpenFileNames(self,'Single File','.','All Files (*.*)')[0]
if self.option_Store_key_nonce_in_DB.isChecked(): # Check if DB file is working right (If user wants to store key/nonce)
DB.DBFileChecker(self)
if len(self.files_list) > 0:
try: # Clear enc/dec buttons (Pyqt5 issues)
UIWindow.enc_button.clicked.disconnect()
UIWindow.dec_button.clicked.disconnect()
except TypeError:
pass
p1 = File(self.files_list) # Create file(s) object
self.files_counter = 0
UIWindow.enc_files_counter_progressBar.setMaximum(len(self.files_list))
UIWindow.dec_files_counter_progressBar.setMaximum(len(self.files_list))
self.enc_files_counter_progressBar.setValue(self.files_counter)
self.enc_files_counter_progressBar.setFormat('')
self.dec_files_counter_progressBar.setValue(self.files_counter)
self.dec_files_counter_progressBar.setFormat('')
## enc/dec ProgressBars ##
self.enc_progressBar.setFormat('Ready To Encrypt')
UIWindow.enc_progressBar.setValue(0)
self.dec_progressBar.setFormat('Ready To Decrypt')
UIWindow.dec_progressBar.setValue(0)
def LabelSwitcher(self, label_name):
if label_name == 'enc_key_label':
if self.enc_key_label.text() == 'Key (B):':
self.enc_key_label.setText('Key (H):')
self.enc_key_input.setPlaceholderText('Type your key in Hex format (example: 736563726574313233)')
else:
self.enc_key_label.setText('Key (B):')
self.enc_key_input.setPlaceholderText('Type your key in Bytes format (example: secret123)')
elif label_name == 'dec_key_label':
if self.dec_key_label.text() == 'Key (B):':
self.dec_key_label.setText('Key (H):')
self.dec_key_input.setPlaceholderText('Type your key in Hex format (example: 736563726574313233)')
else:
self.dec_key_label.setText('Key (B):')
self.dec_key_input.setPlaceholderText('Type your key in Bytes format (example: secret123)')
elif label_name == 'enc_system_label':
if self.Enc_system_label.text() == 'AES-EAX':
self.Enc_system_label.setText('AES-GCM')
else:
self.Enc_system_label.setText('AES-EAX')
def DBBrowser(self):
DB_file_address = QFileDialog.getOpenFileName(self,'Single File','.','All Files (*.*)')[0]
if DB_file_address and path.exists(DB_file_address):
try:
with open(DB_file_address, 'r') as DB_file:
data = json.load(DB_file)
self.DATABASE_FILE = DB_file_address
Logger('info',f"{self.DATABASE_FILE} is now the new DB.")
self.SetShortcuts('DB')
self.SaveOptions()
except (json.decoder.JSONDecodeError, UnicodeDecodeError) as e:
Logger('error','[UI_DBB-0]')
except UnicodeDecodeError:
Logger('error','[UI_DBB-1]')
def SetDefaultOptions(self):
try: # If Options.txt exists and is okay
OPT_file = open('Options.txt', 'r')
options = json.load(OPT_file)
if len(options) < 7: raise ValueError()
except (json.decoder.JSONDecodeError, ValueError,FileNotFoundError): # If Options.txt has a problem, then remake it
Logger('warn',"There was something wrong with options file.\nOptions have been set to default.")
open('Options.txt', 'w').close() # Empty file
OPT_file = open('Options.txt', 'w') # Write default options
options = {
"DATABASE_FILE":None,
"USABLE_RAM":65536,
"option_not_verified_keep_key_nonce_DB":True,
"option_Check_Update_on_program_startup":True,
"option_Delete_original_file":True,
"option_Store_key_nonce_in_DB":True,
"option_Delete_key_nonce_after_decryption":True,
"option_Check_for_dublicate_key_nonce_in_DB":True,
"option_not_decrypted_verified_keep_original_file":True,
"Enc_system_label":'AES-EAX'
}
try: # Check if Database file exists
if not path.exists(options['DATABASE_FILE']):
options["DATABASE_FILE"] = None
self.DATABASE_FILE = None
except TypeError as e:
options["DATABASE_FILE"] = None
self.DATABASE_FILE = None
json.dump(options, OPT_file)
try: # Check if Database file exists
if not path.exists(options['DATABASE_FILE']):
Logger('warn',f"The database file {options['DATABASE_FILE']} has not been found.")
options["DATABASE_FILE"] = None
self.DATABASE_FILE = None
except TypeError as e:
options["DATABASE_FILE"] = None
self.DATABASE_FILE = None
self.DATABASE_FILE = options['DATABASE_FILE']
self.USABLE_RAM = options['USABLE_RAM']
self.option_not_verified_keep_key_nonce_DB.setChecked(options['option_not_verified_keep_key_nonce_DB'])
self.option_Check_Update_on_program_startup.setChecked(options['option_Check_Update_on_program_startup'])
self.option_Delete_original_file.setChecked(options['option_Delete_original_file'])
self.option_Store_key_nonce_in_DB.setChecked(options['option_Store_key_nonce_in_DB'])
self.option_Delete_key_nonce_after_decryption.setChecked(options['option_Delete_key_nonce_after_decryption'])
self.option_Check_for_dublicate_key_nonce_in_DB.setChecked(options['option_Check_for_dublicate_key_nonce_in_DB'])
self.option_not_decrypted_verified_keep_original_file.setChecked(options['option_not_decrypted_verified_keep_original_file'])
self.Enc_system_label.setText(options['Enc_system_label'])
OPT_file.close()
def SaveOptions(self):
open('Options.txt', 'w').close() # Empty file
with open('Options.txt', 'w') as OPT_file:
options = {
"DATABASE_FILE":self.DATABASE_FILE,
"USABLE_RAM":self.USABLE_RAM,
"option_not_verified_keep_key_nonce_DB":self.option_not_verified_keep_key_nonce_DB.isChecked(),
"option_Check_Update_on_program_startup":self.option_Check_Update_on_program_startup.isChecked(),
"option_Delete_original_file":self.option_Delete_original_file.isChecked(),
"option_Store_key_nonce_in_DB":self.option_Store_key_nonce_in_DB.isChecked(),
"option_Delete_key_nonce_after_decryption":self.option_Delete_key_nonce_after_decryption.isChecked(),
"option_Check_for_dublicate_key_nonce_in_DB":self.option_Check_for_dublicate_key_nonce_in_DB.isChecked(),
"option_not_decrypted_verified_keep_original_file":self.option_not_decrypted_verified_keep_original_file.isChecked(),
"Enc_system_label":self.Enc_system_label.text()
}
json.dump(options, OPT_file)
def SetShortcuts(self,obj=None,f=None):
if (obj == 'DB'):
if self.DATABASE_FILE:
self.DB_shortcut_value.setText(os.path.split(self.DATABASE_FILE)[1])
## Enable DB options
self.option_Store_key_nonce_in_DB.setEnabled(True)
self.option_Delete_key_nonce_after_decryption.setEnabled(True)
self.option_Check_for_dublicate_key_nonce_in_DB.setEnabled(True)
self.option_not_verified_keep_key_nonce_DB.setEnabled(True)
## Check DB options
self.option_Store_key_nonce_in_DB.setChecked(True)
self.option_Delete_key_nonce_after_decryption.setChecked(True)
self.option_Check_for_dublicate_key_nonce_in_DB.setChecked(True)
self.option_not_verified_keep_key_nonce_DB.setChecked(True)
return
elif (obj == 'cur_file'):
self.Cur_file_shortcut_value.setText(os.path.split(f)[1])
return
elif (obj == 'DB-clear'):
### Set default DB settings if no database file has been selected
self.DATABASE_FILE = None
self.DB_shortcut_value.setText('[None]')
self.Cur_file_shortcut_value.setText('[None]')
## Uncheck DB options
self.option_Store_key_nonce_in_DB.setChecked(False)
self.option_Delete_key_nonce_after_decryption.setChecked(False)
self.option_Check_for_dublicate_key_nonce_in_DB.setChecked(False)
self.option_not_verified_keep_key_nonce_DB.setChecked(False)
## Disable DB options
self.option_Store_key_nonce_in_DB.setEnabled(False)
self.option_Delete_key_nonce_after_decryption.setEnabled(False)
self.option_Check_for_dublicate_key_nonce_in_DB.setEnabled(False)
self.option_not_verified_keep_key_nonce_DB.setEnabled(False)
return
def FeedBackRedirect(self):
import webbrowser
webbrowser.open_new(UIWindow.FEEDBACKURL)
if __name__ == "__main__":
app = QApplication(sys.argv)
UIWindow = UI() # Main Window
UIWindow.SetDefaultOptions() # Set settings
if UIWindow.option_Check_Update_on_program_startup.isChecked(): # AutoCheck for updates
UIWindow.UpdateChecker()
UIWindow.SetShortcuts('DB') # Set DB sortcut
app.exec_()
``` |
{
"source": "jimcinbrisbane/QUTEFS-emailscrapper",
"score": 3
} |
#### File: jimcinbrisbane/QUTEFS-emailscrapper/run.py
```python
YOUR_API_KEY = 'your google api key' # your google place api, (with billing api enabled)
where = 'QUT, Queensland' # Where would you be?
search = 'festival' "what you would put in a google map search bar"
mongoadd = "mongodb://localhost:27017/" # your mongo address
#init email scraping
#! python3
import re, urllib.request, time
# regex to match email
emailRegex = re.compile(r'''
#example :
<EMAIL>
(?:[a-z0-9!#$%&'*+/=?^_`{|}~-]+(?:\.[a-z0-9!#$%&'*+/=?^_`{|}~-]+)*|"(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21\x23-\x5b\x5d-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])*")@(?:(?:[a-z0-9](?:[a-z0-9-]*[a-z0-9])?\.)+[a-z0-9](?:[a-z0-9-]*[a-z0-9])?|\[(?:(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?)\.){3}(?:25[0-5]|2[0-4][0-9]|[01]?[0-9][0-9]?|[a-z0-9-]*[a-z0-9]:(?:[\x01-\x08\x0b\x0c\x0e-\x1f\x21-\x5a\x53-\x7f]|\\[\x01-\x09\x0b\x0c\x0e-\x7f])+)\])
''', re.VERBOSE)
#Extacting Emails
def extractEmailsFromUrlText(urlText):
extractedEmail = emailRegex.findall(urlText.replace('%20',''))
return extractedEmail
#HtmlPage Read Func
def htmlPageRead(url):
try:
headers = { 'User-Agent' : 'Mozilla/68.0' }
request = urllib.request.Request(url, None, headers)
response = urllib.request.urlopen(request,timeout=15)
urlHtmlPageRead = response.read()
urlText = urlHtmlPageRead.decode()
email = extractEmailsFromUrlText(urlText)
return email
except:
pass
#EmailsLeechFunction
def emailsLeechFunc(url):
try:
email = htmlPageRead(url)
return email
except urllib.error.HTTPError as err:
if err.code == 404:
try:
url = 'http://webcache.googleusercontent.com/search?q=cache:'+url
email = htmlPageRead(url)
return email
except:
pass
else:
pass
#init mongo'
import pymongo
myclient = pymongo.MongoClient(mongoadd)
mydb = myclient["qutefs"]
mycol = mydb["scrape"]
#mongo sigma
def sigma(_id,name,local_phone_number,website,url,email,email0):
print(email,email0)
bson={"_id" : _id, "name": name,"local_phone_number":local_phone_number,"website":website, "url":url,"email":email, "email0":email0 }
mycol.insert_one(bson)
# init google places
from googleplaces import GooglePlaces, types, lang
google_places = GooglePlaces(YOUR_API_KEY)
# You may prefer to use the text_search API, instead.
query_result = google_places.nearby_search(
location=where, keyword=search,
radius=30000)
# If types param contains only 1 item the request to Google Places API
# will be send as type param to fullfil:
# http://googlegeodevelopers.blogspot.com.au/2016/02/changes-and-quality-improvements-in_16.html
# get data from api json list
if query_result.has_attributions:
print (query_result.html_attributions)
for place in query_result.places:
# Returned places from a query are place summaries.
name = place.name
# print (place.geo_location)
_id = place.place_id
# The following method has to make a further API call.
place.get_details()
# Referencing any of the attributes below, prior to making a call to
# get_details() will raise a googleplaces.GooglePlacesAttributeError.
# print (place.details) # A dict matching the JSON response from Google.
local_phone_number = place.local_phone_number
website = place.website
# try to scrape email from the website
# if wordpress, it might be in example.com/contact , sometimes both
con = str(website)+"contact"
url = place.url
email0 = emailsLeechFunc(website)
email = emailsLeechFunc(con)
try: # insert all data
sigma(_id,name,local_phone_number,website,url,email,email0)
except:
print(name, "in list")
print(name)
# # Getting place photos
# for photo in place.photos:
# # 'maxheight' or 'maxwidth' is required
# photo.get(maxheight=500, maxwidth=500)
# # MIME-type, e.g. 'image/jpeg'
# photo.mimetype
# # Image URL
# photo.url
# # Original filename (optional)
# photo.filename
# # Raw image data
# photo.data
# Are there any additional pages of results?
if query_result.has_next_page_token:
query_result_next_page = google_places.nearby_search(
pagetoken=query_result.next_page_token)
``` |
{
"source": "JimCircadian/dask",
"score": 2
} |
#### File: dask/array/wrap.py
```python
from functools import partial
from itertools import product
import numpy as np
from tlz import curry
from ..base import tokenize
from ..layers import BlockwiseCreateArray
from ..utils import funcname
from .core import Array, normalize_chunks
from .utils import (
empty_like_safe,
full_like_safe,
meta_from_array,
ones_like_safe,
zeros_like_safe,
)
def _parse_wrap_args(func, args, kwargs, shape):
if isinstance(shape, np.ndarray):
shape = shape.tolist()
if not isinstance(shape, (tuple, list)):
shape = (shape,)
name = kwargs.pop("name", None)
chunks = kwargs.pop("chunks", "auto")
dtype = kwargs.pop("dtype", None)
if dtype is None:
dtype = func(shape, *args, **kwargs).dtype
dtype = np.dtype(dtype)
chunks = normalize_chunks(chunks, shape, dtype=dtype)
name = name or funcname(func) + "-" + tokenize(
func, shape, chunks, dtype, args, kwargs
)
return {
"shape": shape,
"dtype": dtype,
"kwargs": kwargs,
"chunks": chunks,
"name": name,
}
def wrap_func_shape_as_first_arg(func, *args, **kwargs):
"""
Transform np creation function into blocked version
"""
if "shape" not in kwargs:
shape, args = args[0], args[1:]
else:
shape = kwargs.pop("shape")
if isinstance(shape, Array):
raise TypeError(
"Dask array input not supported. "
"Please use tuple, list, or a 1D numpy array instead."
)
parsed = _parse_wrap_args(func, args, kwargs, shape)
shape = parsed["shape"]
dtype = parsed["dtype"]
chunks = parsed["chunks"]
name = parsed["name"]
kwargs = parsed["kwargs"]
func = partial(func, dtype=dtype, **kwargs)
graph = BlockwiseCreateArray(
name,
func,
shape,
chunks,
)
return Array(graph, name, chunks, dtype=dtype, meta=kwargs.get("meta", None))
def wrap_func_like(func, *args, **kwargs):
"""
Transform np creation function into blocked version
"""
x = args[0]
meta = meta_from_array(x)
shape = kwargs.get("shape", x.shape)
parsed = _parse_wrap_args(func, args, kwargs, shape)
shape = parsed["shape"]
dtype = parsed["dtype"]
chunks = parsed["chunks"]
name = parsed["name"]
kwargs = parsed["kwargs"]
keys = product([name], *[range(len(bd)) for bd in chunks])
shapes = product(*chunks)
shapes = list(shapes)
kw = [kwargs for _ in shapes]
for i, s in enumerate(list(shapes)):
kw[i]["shape"] = s
vals = ((partial(func, dtype=dtype, **k),) + args for (k, s) in zip(kw, shapes))
dsk = dict(zip(keys, vals))
return Array(dsk, name, chunks, meta=meta.astype(dtype))
def wrap_func_like_safe(func, func_like, *args, **kwargs):
"""
Safe implementation for wrap_func_like(), attempts to use func_like(),
if the shape keyword argument, falls back to func().
"""
try:
return func_like(*args, **kwargs)
except TypeError:
return func(*args, **kwargs)
@curry
def wrap(wrap_func, func, **kwargs):
func_like = kwargs.pop("func_like", None)
if func_like is None:
f = partial(wrap_func, func, **kwargs)
else:
f = partial(wrap_func, func_like, **kwargs)
template = """
Blocked variant of %(name)s
Follows the signature of %(name)s exactly except that it also features
optional keyword arguments ``chunks: int, tuple, or dict`` and ``name: str``.
Original signature follows below.
"""
if func.__doc__ is not None:
f.__doc__ = template % {"name": func.__name__} + func.__doc__
f.__name__ = "blocked_" + func.__name__
return f
w = wrap(wrap_func_shape_as_first_arg)
@curry
def _broadcast_trick_inner(func, shape, meta=(), *args, **kwargs):
if shape == ():
return np.broadcast_to(func(meta, shape=(), *args, **kwargs), shape)
else:
return np.broadcast_to(func(meta, shape=1, *args, **kwargs), shape)
def broadcast_trick(func):
"""
Provide a decorator to wrap common numpy function with a broadcast trick.
Dask arrays are currently immutable; thus when we know an array is uniform,
we can replace the actual data by a single value and have all elements point
to it, thus reducing the size.
>>> x = np.broadcast_to(1, (100,100,100))
>>> x.base.nbytes
8
Those array are not only more efficient locally, but dask serialisation is
aware of the _real_ size of those array and thus can send them around
efficiently and schedule accordingly.
Note that those array are read-only and numpy will refuse to assign to them,
so should be safe.
"""
inner = _broadcast_trick_inner(func)
if func.__doc__ is not None:
inner.__doc__ = func.__doc__
inner.__name__ = func.__name__
if inner.__name__.endswith("_like_safe"):
inner.__name__ = inner.__name__[:-10]
return inner
ones = w(broadcast_trick(ones_like_safe), dtype="f8")
zeros = w(broadcast_trick(zeros_like_safe), dtype="f8")
empty = w(broadcast_trick(empty_like_safe), dtype="f8")
w_like = wrap(wrap_func_like_safe)
empty_like = w_like(np.empty, func_like=np.empty_like)
# full and full_like require special casing due to argument check on fill_value
# Generate wrapped functions only once
_full = w(broadcast_trick(full_like_safe))
_full_like = w_like(np.full, func_like=np.full_like)
# workaround for numpy doctest failure: https://github.com/numpy/numpy/pull/17472
_full.__doc__ = _full.__doc__.replace(
"array([0.1, 0.1, 0.1, 0.1, 0.1, 0.1])",
"array([0.1, 0.1, 0.1, 0.1, 0.1, 0.1])",
)
def full(shape, fill_value, *args, **kwargs):
# np.isscalar has somewhat strange behavior:
# https://docs.scipy.org/doc/numpy/reference/generated/numpy.isscalar.html
if np.ndim(fill_value) != 0:
raise ValueError(
f"fill_value must be scalar. Received {type(fill_value).__name__} instead."
)
if "dtype" not in kwargs:
kwargs["dtype"] = type(fill_value)
return _full(shape=shape, fill_value=fill_value, *args, **kwargs)
def full_like(a, fill_value, *args, **kwargs):
if np.ndim(fill_value) != 0:
raise ValueError(
f"fill_value must be scalar. Received {type(fill_value).__name__} instead."
)
return _full_like(
a=a,
fill_value=fill_value,
*args,
**kwargs,
)
full.__doc__ = _full.__doc__
full_like.__doc__ = _full_like.__doc__
``` |
{
"source": "JimCircadian/inconsistentrecords",
"score": 2
} |
#### File: scripts/utils/__init__.py
```python
import configparser
import logging
import os
from github import Github
def github_conn(filename="$HOME/.github_api"):
config = configparser.ConfigParser()
config.read(os.path.expandvars(filename))
token = config['api']['token']
return Github(token)
``` |
{
"source": "JimCircadian/provisioning",
"score": 2
} |
#### File: provisioning/tasks/ansible.py
```python
import os
import logging
import re
import subprocess
import sys
import yaml
from invoke import task
from pprint import pprint
from glob import glob
from yaml import Dumper
#import utils
from . import utils
@task(default=True, pre=[
utils.check_ssh_agent,
utils.ch_rundir,
])
def run_playbook(ctx, playbook,
environment = None,
tags = None,
user = None,
hosts = None,
prompt = False,
dry = False):
"""Run a single playbook"""
if not re.match(r'\.yml$', playbook):
playbook = "{0}.yml".format(playbook)
if not environment and "environment" in ctx.vars:
environment = ctx.vars.environment
logging.debug("Set to default environment: {}".format(environment))
additional_arguments = ""
if not user and "user" in ctx.vars:
user = ctx.vars.user
logging.debug("Set to default user: {}".format(user))
else:
additional_arguments += "-u {} ".format(user)
if hosts:
additional_arguments += "-l '{}' ".format(hosts)
if prompt:
additional_arguments += "-K "
# TODO: There's a big gap in the parser for list type in invoke (issue #132)
if tags:
tag_list = tags.split(",")
tag_str = " -t \"" + "\" -t \"".join(tag_list) + "\""
additional_arguments += tag_str
vault_pass_file = os.path.expandvars("$HOME/.vault_{0}.password".format(environment))
vault_arg = ""
if os.path.exists(vault_pass_file):
vault_arg = "--vault-id {}".format(vault_pass_file)
command = os.path.expandvars("ansible-playbook -v -b \
-e env={0} \
{2} {3} \
-i inventory/{0} \
playbooks/{1}".format(
environment, playbook, additional_arguments, vault_arg
))
print(os.getcwd())
logging.info("COMMAND: {0}".format(command))
if not dry:
ctx.run(command)
@task(pre=[
utils.ch_rundir,
])
def server_facts(ctx, hostname, environment=None, user=None):
if not environment:
environment = ctx.vars.environment
logging.debug("Set to default environment: {}".format(environment))
if not user:
user = ctx.vars.user
logging.debug("Set to default user: {}".format(user))
cmd = "ansible -m setup -u {} -i inventory/{} {}".format(user, environment, hostname)
logging.debug("COMMAND: {}".format(cmd))
ctx.run(cmd)
@task(pre=[
# TODO: Better way to this within the invoke Collection?
utils.ch_rundir
])
def install_requirements(ctx):
cmd = "ansible-galaxy role install -r roles/requirements.yml"
logging.debug("COMMAND: {}".format(cmd))
ctx.run(cmd)
@task(pre=[
# TODO: Better way to this within the invoke Collection?
utils.ch_rundir
])
def gen_ssh_config(ctx):
#ansible -i inventory/staging --list-hosts libvirt_host | grep -v 'hosts' | awk '{ print $1 }'
# ssh root@staging-host 'virsh list --name' | grep -v '^$'
# Host staging-proxy
# User root
# Hostname proxy
# ProxyJump staging-host
# TODO: Key injection to kickstarts!
pass
```
#### File: provisioning/tasks/vault.py
```python
import getpass
import logging
import os
import sys
from invoke import task
@task
def write_pass(ctx, name):
try:
password = getpass.getpass("Please enter the vault password: ")
except getpass.GetPassWarning:
logging.exception("Something wrong whilst reading the password")
sys.exit(1)
password_file = os.path.expandvars("$HOME/.vault_{0}.password".format(name))
good_to_go = True
if os.path.exists(password_file):
good_to_go = False
response = input("Are you sure you want to overwrite (y/n)?")
if response.strip().lower() != 'y':
logging.warning("Abandoning as user did not confirm overwrite")
else:
good_to_go = True
if good_to_go:
try:
with open(password_file, "w") as fh:
fh.write(password)
os.chmod(password_file, 0o600)
except Exception:
logging.exception("Something went wrong, deleting file if required")
if os.path.exists(password_file):
os.unlink(password_file)
else:
logging.info("Vault password written...")
``` |
{
"source": "JimCircadian/smtpnodes",
"score": 3
} |
#### File: JimCircadian/smtpnodes/receiver.py
```python
import sys
import asyncore
import email
import smtpd
import time
from threading import Thread
DEBUG = True
class CustomSMTPServer(smtpd.SMTPServer):
num = 0
def process_message(self, peer, mailfrom, recvtos, data):
msg = email.message_from_string(data).get_payload().strip()
self.num += 1
if DEBUG:
print >>sys.stderr, "DEBUG: Received message {0}".format(msg)
def get_num(self):
return self.num
class Receiver(object):
def __init__(self, host, port):
self.host = host
self.port = port
def start(self):
self.smtp = CustomSMTPServer((self.host, self.port), None)
self.thread = Thread(target = asyncore.loop, kwargs = {'timeout': 1})
self.thread.daemon = True
self.thread.start()
def status(self):
return "running" if self.thread.is_alive() else "stopped"
def stop(self):
self.smtp.close()
self.thread.join()
def get_received(self):
return str(self.smtp.get_num())
if __name__ == "__main__":
recv = Receiver("0.0.0.0", 2255)
recv.start()
print "HELLO - going to sleep but can receive messages"
time.sleep(30)
print "All done"
recv.stop()
```
#### File: JimCircadian/smtpnodes/sender.py
```python
import sys
import pprint
import smtplib
import time
import uuid
from email.mime.text import MIMEText
from threading import Thread, Event
DEBUG = True
class Sender():
# TODO: Private, underscore
args = None
db = {}
dur = 10
emails = 10
def __init__(self, host, port):
self.init_db()
self.host = host
self.port = port
self.stopped = Event()
self.thread = Thread(target = self.send)
self.thread.daemon = True
def send(self):
delay = self.dur / self.emails
while not self.stopped.wait(delay):
avail = filter(lambda x: not x[1]['sent'], self.db.items())
if len(avail) > 0:
(ident, det) = avail.pop()
msg = det['msg']
if DEBUG:
print >>sys.stderr, "DEBUG: Sending email {0}".format(ident)
try:
sender = smtplib.SMTP(self.host, self.port)
sender.sendmail(msg['From'], msg['To'], msg.as_string())
sender.quit()
if DEBUG:
print >>sys.stderr, "SEND SUCCESS: {0}".format(ident)
self.db[ident]['sent'] = True
except:
if DEBUG:
print >>sys.stderr, "SEND FAILURE: {0}".format(ident)
def duration(self, d):
sent = len(filter(lambda x: x['sent'], self.db.values()))
if sent > 0:
return False
try:
self.dur = int(d)
except:
raise ValueError("What the hell is this: {0} of type {1}".format(d, type(d)))
return True
def get_db(self):
return self.db
def get_duration(self):
return self.dur
def get_from(self):
return '<EMAIL>'
def get_limit(self):
return str(len(self.db.items()))
def get_sent(self):
return str(len(filter(lambda x: x[1]['sent'], self.db.items())))
def get_subject(self, ident):
return "Generated test email {0}".format(ident)
def get_to(self):
return '<EMAIL>'
def init_db(self, num = 0):
while num < self.emails:
key = format(uuid.uuid4())
msg = MIMEText(key)
msg['From'] = self.get_from()
msg['To'] = self.get_to()
msg['Subject'] = self.get_subject(num)
value = {
'msg': msg,
'sent': False,
'received': False,
}
num += 1
self.db[key] = value
def limit(self, msgs):
sent = len(filter(lambda x: x['sent'], self.db.values()))
num = len(self.db.values())
if sent > 0:
return False
try:
if int(msgs) > self.emails:
self.emails = int(msgs)
self.init_db(num)
elif int(msgs) < self.emails:
newdb = { k: self.db[k] for k in self.db.keys()[0:int(msgs)] }
self.db = newdb
except:
raise ValueError("What the hell is this: {0} of type {1}".format(msgs, type(msgs)))
return True
# if msgs < len(lambda x: x['sent'], self.db):
# # TODO: Set message feedback
# return
#
#
# # TODO: stop sending and reset db to new limit
# self.emails = msgs
def running(self):
return not self.stopped.is_set()
def start(self):
self.thread.start()
def status(self):
return "running" if self.thread.is_alive() else "stopped"
def stop(self):
self.stopped.set()
if __name__ == "__main__":
s = Sender('localhost', 2255)
s.start()
while s.running():
print "DEBUG: Waiting for sends to finish {0}".format(len(filter(lambda e: not e['sent'], s.get_db().values())))
time.sleep(2)
if len(filter(lambda e: not e['sent'], s.get_db().values())) == 0:
s.stop()
``` |
{
"source": "jimclauwaert/prompred_goes_NN",
"score": 2
} |
#### File: src/data/data_utils.py
```python
import numpy as np
import pandas as pd
from statsmodels import robust
def GetDataLocations(sigma):
""" Helper function for quick access of ChIP-chip data
sigma: string
Sigma-factor for which data is loaded
OUTPUT
-------
"""
experiments = {"RPOD":3, "RPOS":3, "RNAP":3, "SIGMA":2, "BETA":2}
if sigma in experiments:
for i in range(experiments[sigma]):
data_ip = ["../data/processed/{}_EXP_{}_635.extr".format(sigma, u+1) for u in range(experiments[sigma])]
data_mock_ip = ["../data/processed/{}_EXP_{}_532.extr".format(sigma, u+1) for u in range(experiments[sigma])]
else:
return [], []
return data_ip, data_mock_ip
``` |
{
"source": "jimcoggeshall/web-infrastructure",
"score": 3
} |
#### File: usr/workspace/socket_server.py
```python
import socket
import socketserver
import datetime
import json
from json import JSONDecodeError
class MessageHandler(socketserver.BaseRequestHandler):
def handle(self):
streamer = iter(self._stream_packets(self._parse_message))
while True:
p = next(streamer)
for m in p:
self.request.sendall(
bytes(
json.dumps(m, separators=(",", ":")) + "\n",
"utf-8"
)
)
def _stream_packets(self, _parse):
with socket.socket(socket.AF_INET, socket.SOCK_DGRAM) as s:
s.bind(("0.0.0.0", 22055))
while True:
p, _ = s.recvfrom(65536)
parsed = ""
while parsed == "":
try:
parsed = _parse(p)
except JSONDecodeError:
parsed = ""
pn, _ = s.recvfrom(65536)
p += pn
yield parsed
def _parse_message(self, x):
s = x.decode("utf-8").rstrip()
return [json.loads(m) for m in s.split("\n")]
def main():
with socketserver.TCPServer(("127.0.0.1", 11111), MessageHandler) as server:
server.serve_forever()
if __name__ == "__main__":
main()
``` |
{
"source": "jimconner/digital_sky",
"score": 3
} |
#### File: digital_sky/core/led_control.py
```python
import sys,time, urllib.request, urllib.parse, urllib.error, traceback, random
from PIL import Image
from numpy import array, bitwise_xor, clip, greater, dstack, full, uint8, maximum
from neopixel import *
from filters.make_it_red import make_it_red
# LED strip configuration:
POWER_PIN = 15 # GPIO pin which controlls the 36V power supply.
LED_PIN = 18 # GPIO pin connected to the pixels (must support PWM!).
LED_FREQ_HZ = 800000 # LED signal frequency in hertz (usually 800khz)
LED_DMA = 10 # DMA channel to use for generating signal (try 10)
LED_BRIGHTNESS = 255 # Set to 0 for darkest and 255 for brightest
LED_INVERT = False # True to invert the signal (when using NPN transistor level shift)
LED_CHANNEL = 0
LED_STRIP = ws.SK6812W_STRIP
class LED_Control():
def __init__(self, datastore):
self.datastore = datastore
self.strip = Adafruit_NeoPixel(self.datastore.LED_COUNT, LED_PIN, LED_FREQ_HZ, LED_DMA, LED_INVERT, LED_BRIGHTNESS, LED_CHANNEL, LED_STRIP)
self.strip.begin()
def service_leds(self):
try:
if self.datastore.power != 0:
# Set up an empty blank row of Strip pixels
self.datastore.strips=full((self.datastore.LED_COUNT,4),0)
for animation in self.datastore.strip_animations:
self.datastore.strips=maximum(self.datastore.strips, animation.emit_row())
# Set up an empty blank row of RGBW pixels
rowdata=full((self.datastore.LED_COUNT,4),0)
for animation in self.datastore.animations:
rowdata=maximum(rowdata, animation.emit_row())
# Scale RGBW elements individually (colour tint)
rowdata=rowdata*self.datastore.rgbw_brightness
# Then scale everything by master_brightness
rowdata=rowdata*float(self.datastore.master_brightness)
rowdata=uint8(rowdata)
# Update each LED color in the buffer.
for i in range(self.strip.numPixels()):
if i % self.datastore.LAMP_LENGTH < self.datastore.STRIP_LEDS:
#self.strip.setPixelColor(i, Color(ib,ww,nw,dw))
self.strip._led_data[i]=(int(self.datastore.strips[i][3]) << 24) | \
(int(self.datastore.strips[i][0]) << 16) | \
(int(self.datastore.strips[i][1]) << 8 ) | \
int(self.datastore.strips[i][2])
else:
# Set the LED color buffer value.
#self.strip.setPixelColor(i, Color(r,g,b,w))
self.strip._led_data[i]=(int(rowdata[i][3]) << 24) | \
(int(rowdata[i][0]) << 16) | \
(int(rowdata[i][1]) << 8 ) | \
int(rowdata[i][2])
# Send the LED color data to the hardware.
self.strip.show()
except Exception as err:
print((self.datastore.strips))
print(err)
traceback.print_exc(file=sys.stdout)
def Color(red, green, blue, white = 0):
"""Convert the provided red, green, blue color to a 24-bit color value.
Each color component should be a value 0-255 where 0 is the lowest intensity
and 255 is the highest intensity.
"""
return (white << 24) | (red << 16)| (green << 8) | blue
```
#### File: digital_sky/core/ssh.py
```python
from twisted.cred import portal
from twisted.cred.checkers import InMemoryUsernamePasswordDatabaseDontUse
from twisted.conch import avatar
from twisted.conch.checkers import SSHPublicKeyChecker, InMemorySSHKeyDB
from twisted.conch.ssh import factory, userauth, connection, keys, session
from twisted.conch.ssh.transport import SSHServerTransport
from twisted.internet import reactor, protocol
from twisted.python import log
from zope.interface import implementer
from twisted.conch import recvline
from numpy import uint8
import sys
class ExampleAvatar(avatar.ConchUser):
"""
The avatar is used to configure SSH services/sessions/subsystems for
an account.
This account will use L{session.SSHSession} to handle a channel of
type I{session}.
"""
def __init__(self, username, datastore):
avatar.ConchUser.__init__(self)
self.datastore = datastore
self.username = username
self.channelLookup.update({b'session':session.SSHSession})
@implementer(portal.IRealm)
class ExampleRealm(object):
"""
When using Twisted Cred, the pluggable authentication framework, the
C{requestAvatar} method should return a L{avatar.ConchUser} instance
as required by the Conch SSH server.
"""
def __init__(self, datastore):
self.datastore = datastore
def requestAvatar(self, avatarId, mind, *interfaces):
"""
See: L{portal.IRealm.requestAvatar}
"""
return interfaces[0], ExampleAvatar(avatarId, self.datastore), lambda: None
class CLIProtocol(protocol.Protocol):
def __init__(self, datastore):
self.line=b''
self.datastore=datastore
def dataReceived(self, data):
if data == b'\r':
self.transport.write(b'\r\n')
self.lineReceived(self.line)
self.line=b''
elif data == b'\x03': #^C
self.transport.loseConnection()
return
self.line+=data
self.transport.write(data)
def sendLine(self, line):
self.transport.write(line+b'\r\n')
def lineReceived(self, line):
# Ignore blank lines
if not line: return
line = line.decode("ascii")
# Parse the command
commandParts = line.split()
command = commandParts[0].lower()
args = commandParts[1:]
# Dispatch the command to the appropriate method. Note that all you
# need to do to implement a new command is add another do_* method.
try:
method = getattr(self, 'do_' + command)
except AttributeError as e:
self.sendLine(b'Error: no such command.')
self.transport.write(b'$ ')
else:
try:
method(*args)
self.transport.write(b'$ ')
except Exception as e:
self.sendLine(b'Error: ' + str(e).encode("ascii"))
self.transport.write(b'$ ')
def do_help(self, command=None):
"""help [command]: List commands, or show help on the given command"""
if command:
doc = getattr(self, 'do_' + command).__doc__
self.sendLine(doc.encode("ascii"))
else:
commands = [cmd[3:].encode("ascii")
for cmd in dir(self)
if cmd.startswith('do_')]
self.sendLine(b"Valid commands: " + b" ".join(commands))
def do_quit(self):
"""quit: Quit this session"""
self.sendLine(b'Goodbye.')
self.transport.loseConnection()
def do_plugins(self):
"""List the available plugins"""
for plugin in self.datastore.plugins:
self.sendLine(str(plugin.__name__).encode()[8:])
def do_plugins(self):
"""List the available plugins"""
for plugin in self.datastore.plugins:
self.sendLine(str(plugin.__name__).encode()[8:])
def do_animations(self):
"""List running animations"""
self.sendLine(b'RGB Animations')
for animation in self.datastore.animations:
self.sendLine(str(animation.__module__[8:]).encode())
self.sendLine(b'Strip Animations')
for animation in self.datastore.strip_animations:
self.sendLine(str(animation.__module__[8:]).encode())
def do_add(self, pluginname, extra=None, extra2=None):
""" Add an instance of a plugin to the running animations list"""
self.datastore.add_animation(pluginname, extra, extra2)
def do_power(self, state):
""" Add an instance of a plugin to the running animations list"""
self.datastore.set_power(state)
def do_del(self, pluginname):
""" Add an instance of a plugin to the running animations list"""
self.datastore.del_animation(pluginname)
def do_nw(self, val):
"""Set level if Natural White strips (0-255)"""
self.datastore.strip_vals[0]=uint8(val)
def do_dw(self, val):
"""Set level if Daylight White strips (0-255)"""
self.datastore.strip_vals[1]=uint8(val)
def do_ib(self, val):
"""Set level if Ice Blue strips (0-255)"""
self.datastore.strip_vals[2]=uint8(val)
def do_ww(self, val):
"""Set level if Warm White strips (0-255)"""
self.datastore.strip_vals[3]=uint8(val)
def do_lightsout(self):
"""Stop all animations and turn all lights off"""
self.strip_vals = [0,0,0,0]
self.datastore.animations=[]
self.datastore.strip_animations=[]
self.datastore.add_animation("set_strips")
def do_brt(self, val):
"""Set the master brightness. Range: 0.00-1.00"""
self.datastore.master_brightness=float(val)
def do_brtr(self, val):
"""Set the brightness for the Red channel. Range: 0.00-1.00"""
self.datastore.rgbw_brightness[0]=float(val)
def do_brtg(self, val):
"""Set the brightness for the Green channel. Range: 0.00-1.00"""
self.datastore.rgbw_brightness[1]=float(val)
def do_brtb(self, val):
"""Set the brightness for the Blue channel. Range: 0.00-1.00"""
self.datastore.rgbw_brightness[2]=float(val)
def do_brtw(self, val):
"""Set the brightness for the White channel. Range: 0.00-1.00"""
self.datastore.rgbw_brightness[3]=float(val)
class ExampleSession(object):
def __init__(self, avatar):
"""
In this example the avatar argument is not used for session selection,
but for example you can use it to limit I{shell} or I{exec} access
only to specific accounts.
"""
self.datastore = avatar.datastore
def getPty(self, term, windowSize, attrs):
"""
We don't support pseudo-terminal sessions.
"""
def execCommand(self, proto, cmd):
"""
We don't support command execution sessions.
"""
raise Exception("not executing commands")
def openShell(self, transport):
"""
Use our protocol as shell session.
"""
protocol = CLIProtocol(self.datastore)
# Connect the new protocol to the transport and the transport
# to the new protocol so they can communicate in both directions.
protocol.makeConnection(transport)
transport.makeConnection(session.wrapProtocol(protocol))
protocol.transport.write(b'Welcome to Digital Sky\r\nType "help" for help.\r\n$ ')
def eofReceived(self):
pass
def closed(self):
pass
```
#### File: digital_sky/plugins/crumbling_in.py
```python
import sys, traceback, random
from numpy import array,full
class animation():
def __init__(self,datastore):
self.max_led = datastore.LED_COUNT
self.pos = 0
self.direction=0
self.cols = [ \
[255,0,0,0], \
[0,255,0,0], \
[0,0,255,0], \
[0,0,0,255], \
[255,255,0,0], \
[255,0,255,0], \
[0,255,255,0], \
[0,0,255,64], \
]
self.row=full((self.max_led,4),0)
def emit_row(self):
try:
if self.pos >= self.max_led/2:
self.direction=1
if self.pos <= 0:
self.direction=0
col=self.cols[random.randint(0,7)]
if self.direction==1:
col=[0,0,0,0]
self.row[self.pos]=col
self.row[(self.max_led-1)-self.pos]=col
if self.direction==0:
self.pos+=1
else:
self.pos-=1
return self.row
except Exception as err:
print(err)
traceback.print_exc(file=sys.stdout)
```
#### File: digital_sky/plugins/strip_sweep.py
```python
import sys, traceback, random
from numpy import array,full
class strip_animation():
def __init__(self,datastore):
self.sweep_pos=0
self.colpos=0
self.max_led=datastore.LED_COUNT
self.lamp_length=datastore.LAMP_LENGTH
self.brightness_scaling=255/self.lamp_length
def emit_row(self):
try:
if self.sweep_pos == 0:
self.colpos=(self.colpos+1) %4
self.sweep_pos = (self.sweep_pos +1) % (self.max_led-1)
row_arr=full((self.max_led,4),0)
brt=int((self.sweep_pos % self.lamp_length)*self.brightness_scaling)
row_arr[(((int(self.sweep_pos/self.lamp_length)-1)*self.lamp_length)) % self.max_led][self.colpos]=255-brt
row_arr[int(self.sweep_pos/self.lamp_length)*self.lamp_length][self.colpos]=255
row_arr[(((int(self.sweep_pos/self.lamp_length)+1)*self.lamp_length)) % self.max_led][self.colpos]=brt
return row_arr
except Exception as err:
print(err)
traceback.print_exc(file=sys.stdout)
``` |
{
"source": "jimcortez/spotipy_twisted",
"score": 2
} |
#### File: spotipy_twisted/tests/client_credentials_tests.py
```python
import spotipy_twisted
from spotipy_twisted.oauth2 import SpotifyClientCredentials
from twisted.internet import defer
from twisted.trial import unittest
'''
Client Credentials Requests Tests
'''
class ClientCredentialsTestSpotipy(unittest.TestCase):
'''
These tests require user authentication
'''
muse_urn = 'spotify:artist:12Chz98pHFMPJEknJQMWvI'
@defer.inlineCallbacks
def test_request_with_token(self):
artist = yield spotify.artist(self.muse_urn)
self.assertTrue(artist['name'] == u'Muse')
if __name__ == '__main__':
spotify_cc = SpotifyClientCredentials()
spotify = spotipy_twisted.Spotify(client_credentials_manager=spotify_cc)
spotify.trace = False
import sys
from twisted.scripts import trial
sys.argv.extend([__name__])
trial.run()
``` |
{
"source": "jimcs1/CS50-1",
"score": 4
} |
#### File: PSET6/sentiments/analyzer.py
```python
import nltk
class Analyzer:
"""Implements sentiment analysis."""
def __init__(self, positives, negatives):
self.positives = []
with open(positives) as positives_file:
for line in positives_file:
if line[0].isalpha():
self.positives.append(line.rstrip("\n"))
else:
continue
self.negatives = []
with open(negatives) as negatives_file:
for line in negatives_file:
if line[0].isalpha():
self.negatives.append(line.rstrip("\n"))
else:
continue
def analyze(self, text):
"""Analyze text for sentiment, returning its score."""
score = 0
text = text.lower()
tokenizer = nltk.tokenize.TweetTokenizer()
for word in tokenizer.tokenize(text):
if word in self.positives:
score += 1
elif word in self.negatives:
score -= 1
return score
``` |
{
"source": "Jimcumming/Super-SloMo",
"score": 2
} |
#### File: Jimcumming/Super-SloMo/script.py
```python
import boto3
import os
import numpy as np
import os
import subprocess
from botocore.exceptions import ClientError
def getFramerate(video):
con = 'ffprobe -v error -select_streams v:0 -show_entries stream=avg_frame_rate -of default=noprint_wrappers=1:nokey=1 "' + video + '"'
proc = subprocess.Popen(con, stdout=subprocess.PIPE, shell=True)
framerateString = str(proc.stdout.read())[2:-3]
a = int(framerateString.split('/')[int(0)])
b = int(framerateString.split('/')[int(1)])
return int(np.round(np.divide(a,b)))
def processVideo(args):
con = 'python video_to_slomo.py --ffmpeg ' + args["ffmpeg"] + ' --video ' + args["video"] + ' --sf ' + args["sf"] + \
' --checkpoint ' + args["checkpoint"] + ' --fps ' + args["fps"] + ' --output ' + args["output"]
print(con)
proc = subprocess.Popen(con, stdout=subprocess.PIPE, shell=True)
print(proc.stdout.read())
def create_presigned_url(bucket_name, object_name, expiration=36000):
# Generate a presigned URL for the S3 object
s3_client = boto3.client('s3', config=boto3.session.Config(signature_version='s3v4'), region_name='eu-west-2')
try:
response = s3_client.generate_presigned_url('get_object',
Params={'Bucket': bucket_name,
'Key': object_name},
ExpiresIn=expiration)
except ClientError as e:
logging.error(e)
return None
# The response contains the presigned URL
return response
def sendEmail(email, downloadbucket, video):
# Replace <EMAIL> with your "From" address.
# This address must be verified with Amazon SES.
SENDER = "Slomotatron <<EMAIL>>"
download_link = create_presigned_url(downloadbucket, video)
# Replace <EMAIL> with a "To" address. If your account
# is still in the sandbox, this address must be verified.
RECIPIENT = email
# If necessary, replace us-west-2 with the AWS Region you're using for Amazon SES.
AWS_REGION = "eu-west-1"
# The subject line for the email.
SUBJECT = "Your super slomo video"
# The email body for recipients with non-HTML email clients.
BODY_TEXT = ("Slomotatron has processed your video\r\n" +
"donwload your video from the link below\r\n" + download_link
)
# The HTML body of the email.
BODY_HTML = """<html>
<head></head>
<body>
<h1>Slomotatron</h1>
<p>Slomotatron has processed your video, download it using the following link
<a href='"""+ download_link +"""'>"""+ video[video.rfind("/")+1:] + """</a></p>
</body>
</html>
"""
# The character encoding for the email.
CHARSET = "UTF-8"
# Create a new SES resource and specify a region.
client = boto3.client('ses',region_name=AWS_REGION)
# Try to send the email.
try:
#Provide the contents of the email.
response = client.send_email(
Destination={
'ToAddresses': [
RECIPIENT,
],
},
Message={
'Body': {
'Html': {
'Charset': CHARSET,
'Data': BODY_HTML,
},
'Text': {
'Charset': CHARSET,
'Data': BODY_TEXT,
},
},
'Subject': {
'Charset': CHARSET,
'Data': SUBJECT,
},
},
Source=SENDER,
# If you are not using a configuration set, comment or delete the
# following line
# ConfigurationSetName=CONFIGURATION_SET,
)
# Display an error if something goes wrong.
except ClientError as e:
print(e.response['Error']['Message'])
else:
print("Email sent! Message ID:"),
print(response['MessageId'])
# Create SQS client
sqs = boto3.client('sqs', region_name='eu-west-2')
s3 = boto3.client('s3', region_name='eu-west-2')
queue_url = 'https://sqs.eu-west-2.amazonaws.com/839229338431/dev-slomo.fifo'
s3attachmentsBucket = 'slomo-app-api-dev-uploadsbucket-ko2jzrics82r'
s3downloadBucket = 'slomo-app-api-dev-downloadsbucket-t4wypnf2t36r'
videoInputFolder = os.path.join('..', 'input')
while 1:
# Receive message from SQS queue
response = sqs.receive_message(
QueueUrl=queue_url,
AttributeNames=[
'SentTimestamp'
],
MaxNumberOfMessages=1,
MessageAttributeNames=[
'All'
],
VisibilityTimeout=3600,
WaitTimeSeconds=10
)
if 'Messages' in response:
message = response['Messages'][0]
messageAttrbibutes = message['MessageAttributes']
uploadAttachment = messageAttrbibutes['uploadAttachment']['StringValue']
savePath = os.path.join(videoInputFolder, uploadAttachment)
userId = messageAttrbibutes['userId']['StringValue']
email = messageAttrbibutes['email']['StringValue']
model = messageAttrbibutes['aiModel']['StringValue']
slomoFactor = messageAttrbibutes['slomoFactor']['StringValue']
receipt_handle = message['ReceiptHandle']
#download attachment
attachmentPath = "private/" + userId + "/" + uploadAttachment
s3.download_file(s3attachmentsBucket, attachmentPath, savePath)
#get fps of file using ffmpeg
framerate = getFramerate(savePath)
#process file to slow motion
outputFile = os.path.join("..", "output", uploadAttachment)
args = {
"ffmpeg": "/usr/bin",
"video": '"' + savePath + '"',
"sf": str(slomoFactor),
"checkpoint": "checkpoints/" + model + ".ckpt",
"fps": str(framerate),
"output": '"' + outputFile + '"',
}
result = processVideo(args)
#copy file to download folder
s3.upload_file(outputFile, s3downloadBucket, attachmentPath)
#generate email to client
sendEmail(email, s3downloadBucket, attachmentPath)
# Delete received message from queue
sqs.delete_message(
QueueUrl=queue_url,
ReceiptHandle=receipt_handle
)
print('Received and deleted message: %s' % message)
else:
print('no messages')
``` |
{
"source": "JimCurryWang/Deep-Learning-Jot",
"score": 2
} |
#### File: CNN/ResNet/test.py
```python
import torch
from ResNet import Block
from ResNet import ResNet_test
from ResNet import ResNet50, ResNet101, ResNet152
# test for residual Block
block_src = Block(in_channels=256, intermediate_channels=64)
print(block_src)
# test for mock ResNet
net = ResNet_test(img_channel=3, num_classes=1000)
print(net)
# test for ResNet-101
def test():
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
net = ResNet101(img_channel=3, num_classes=1000)
y = net(torch.randn(4, 3, 224, 224)).to(device)
print(y.size())
test()
```
#### File: RNN/Embedding/Embedding_RNN.py
```python
import torch
import torchvision
import torch.nn.functional as F
import torchvision.datasets as datasets
import torchvision.transforms as transforms
from torch import optim
from torch import nn
from torch.utils.data import DataLoader
from tqdm import tqdm
# Set device
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
# Hyperparameters
sequence_length = 28
input_size = 28
hidden_size = 256
num_layers = 2
num_classes = 10
learning_rate = 0.005
batch_size = 64
num_epochs = 3
class LSTM(nn.Module):
'''Recurrent neural network with LSTM (many-to-one)
'''
def __init__(self, input_size, hidden_size, num_layers, num_classes):
super(LSTM, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.lstm = nn.LSTM(input_size, hidden_size, num_layers, batch_first=True)
# Using the last rnn output with fc to obtain the final classificaiton result
self.fc = nn.Linear(hidden_size, num_classes)
def forward(self, x):
'''
'''
out, _ = self.lstm(x) # x=[64, 28, 28], out=[64, 28, 256]=(batch, seq_len, 1 * hidden_size)
# Decode the hidden state of the last time step
# only take the last hidden state and send it into fc
out = out[:, -1, :] # out = [64, 256]
out = self.fc(out)
return out
def check_accuracy(loader, model):
'''Check accuracy on training & test to see how good our model
'''
num_correct = 0
num_samples = 0
# Set model to eval
model.eval()
with torch.no_grad():
for x, y in loader:
x = x.to(device=device).squeeze(1)
y = y.to(device=device)
scores = model(x)
_, predictions = scores.max(1)
num_correct += (predictions == y).sum()
num_samples += predictions.size(0)
# Toggle model back to train
model.train()
return num_correct / num_samples
# Load Data
train_dataset = datasets.MNIST(root="mnist/MNIST", train=True,
transform=transforms.ToTensor(), download=True)
test_dataset = datasets.MNIST(root="mnist/MNIST", train=False,
transform=transforms.ToTensor(), download=True)
train_loader = DataLoader(dataset=train_dataset, batch_size=batch_size, shuffle=True)
test_loader = DataLoader(dataset=test_dataset, batch_size=batch_size, shuffle=True)
# Initialize network (try out just using simple RNN, or GRU, and then compare with LSTM)
model = LSTM(input_size, hidden_size, num_layers, num_classes).to(device)
# model = BLSTM(input_size, hidden_size, num_layers, num_classes).to(device)
# Loss and optimizer
criterion = nn.CrossEntropyLoss()
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
# Train Network
for epoch in range(num_epochs):
for batch_idx, (data, targets) in enumerate(tqdm(train_loader)):
# (torch.Size([64, 1, 28, 28]), torch.Size([64]))
# Get data to cuda if possible
data = data.to(device=device).squeeze(1) # [64, 1, 28, 28] -> [64, 28, 28]
targets = targets.to(device=device)
# forward
scores = model(data)
loss = criterion(scores, targets)
# backward
optimizer.zero_grad()
loss.backward()
# gradient descent update step/adam step
optimizer.step()
print(f"Accuracy on training set: {check_accuracy(train_loader, model)*100:2f}")
print(f"Accuracy on test set: {check_accuracy(test_loader, model)*100:.2f}")
```
#### File: Deep-Learning-Jot/RNN/RNN_dynamic_seq.py
```python
import torch
from torch import nn
import torch.nn.utils.rnn as rnn_utils
from torch.utils.data import Dataset
from torch.utils.data import DataLoader
EPOCH = 2
batchsize = 3
hiddensize = 4
num_layers = 2
learning_rate = 0.001
class ExampleDataset(Dataset):
def __init__(self, data, label):
self.data = data
self.label = label
def __len__(self):
return len(self.data)
def __getitem__(self, index):
tuples = (self.data[index], self.label[index])
return tuples
def collate_fn(data_tuple):
'''data_tuple是一个列表,列表中包含batchsize个元组,每个元组中包含数据和标签
'''
data_tuple.sort(key=lambda x: len(x[0]), reverse=True)
data = [sq[0] for sq in data_tuple]
label = [sq[1] for sq in data_tuple]
data_length = [len(sq) for sq in data]
data = rnn_utils.pad_sequence(data, batch_first=True, padding_value=0.0) # 用零补充,使长度对齐
label = rnn_utils.pad_sequence(label, batch_first=True, padding_value=0.0) # 这行代码只是为了把列表变为tensor
return data.unsqueeze(-1), label, data_length
if __name__ == '__main__':
# data
train_x = [
torch.FloatTensor([1, 1, 1, 1, 1, 1, 1]),
torch.FloatTensor([2, 2, 2, 2, 2, 2]),
torch.FloatTensor([3, 3, 3, 3, 3]),
torch.FloatTensor([4, 4, 4, 4]),
torch.FloatTensor([5, 5, 5]),
torch.FloatTensor([6, 6]),
torch.FloatTensor([7])
]
# label
train_y = [
torch.rand(7, hiddensize),
torch.rand(6, hiddensize),
torch.rand(5, hiddensize),
torch.rand(4, hiddensize),
torch.rand(3, hiddensize),
torch.rand(2, hiddensize),
torch.rand(1, hiddensize)
]
example = ExampleDataset(data=train_x, label=train_y)
data_loader = DataLoader(example, batch_size=batchsize, shuffle=True, collate_fn=collate_fn)
net = nn.LSTM(input_size=1, hidden_size=hiddensize, num_layers=num_layers, batch_first=True)
criteria = nn.MSELoss()
optimizer = torch.optim.Adam(net.parameters(), lr=learning_rate)
# Training Methods 2
for epoch in range(EPOCH):
for batch_id, (batch_x, batch_y, batch_x_len) in enumerate(data_loader):
batch_x_pack = rnn_utils.pack_padded_sequence(batch_x, batch_x_len, batch_first=True)
out, _ = net(batch_x_pack) # out.data's shape (所有序列总长度, hiddensize)
out_pad, out_len = rnn_utils.pad_packed_sequence(out, batch_first=True)
loss = criteria(out_pad, batch_y)
optimizer.zero_grad()
loss.backward()
optimizer.step()
print('epoch:{:2d}, batch_id:{:2d}, loss:{:6.4f}'.format(epoch, batch_id, loss))
# Training Methods 2
for epoch in range(EPOCH):
for batch_id, (batch_x, batch_y, batch_x_len) in enumerate(data_loader):
batch_x_pack = rnn_utils.pack_padded_sequence(batch_x, batch_x_len, batch_first=True)
batch_y_pack = rnn_utils.pack_padded_sequence(batch_y, batch_x_len, batch_first=True)
out, _ = net(batch_x_pack) # out.data's shape (所有序列总长度, hiddensize)
loss = criteria(out.data, batch_y_pack.data)
optimizer.zero_grad()
loss.backward()
optimizer.step()
print('epoch:{:2d}, batch_id:{:2d}, loss:{:6.4f}'.format(epoch, batch_id, loss))
print('Training done!')
```
#### File: Segmentation/UNet/model.py
```python
import torch
import torch.nn as nn
import torchvision.transforms.functional as TF
class DoubleConv(nn.Module):
def __init__(self, in_channels, out_channels):
'''
::Using "same" convolution
::Add BatchNorm2d which is not used in original papers
(the BatchNorm2d concept is launched after Unet...)
'''
super(DoubleConv, self).__init__()
self.conv = nn.Sequential(
# Conv1
nn.Conv2d(in_channels, out_channels,
kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(num_features=out_channels),
nn.ReLU(inplace=True),
# Conv2
nn.Conv2d(out_channels, out_channels,
kernel_size=3, stride=1, padding=1, bias=False),
nn.BatchNorm2d(num_features=out_channels),
nn.ReLU(inplace=True),
)
def forward(self, x):
return self.conv(x)
class UNet(nn.Module):
'''U-Net architecture usually uses 3 channel input size for RGB image processing
and 1 channel output size
::in_channels=3
image input with 3 channel(R-G-B)
::out_channels=1
Output would be binary, so set out_channels= 1 as default
::features=[64, 128, 256, 512]
the number of feature map in each block
::torch.cat((x1, x2), dim=1)
https://pytorch.org/docs/stable/generated/torch.cat.html
::TORCHVISION.TRANSFORMS.FUNCTIONAL.resize(img, size, interpolation)
https://pytorch.org/vision/master/_modules/torchvision/transforms/functional.html
'''
def __init__(self, in_channels=3, out_channels=1, features=[64, 128, 256, 512]):
super(UNet, self).__init__()
self.ups = nn.ModuleList()
self.downs = nn.ModuleList()
self.pool = nn.MaxPool2d(kernel_size=2, stride=2)
# Down part (2*conv)
for feature in features:
self.downs.append(
DoubleConv(in_channels=in_channels, out_channels=feature)
)
# update the next in_channels size
in_channels = feature
# Up part (Up + 2*conv)
for feature in reversed(features):
self.ups.append(
nn.ConvTranspose2d(
in_channels=feature*2, out_channels=feature,
kernel_size=2, stride=2,
)
)
self.ups.append(
DoubleConv(in_channels=feature*2, out_channels=feature)
)
# Bottleneck
# Refer the last features size as bottleneck in and out
# (i.g. 512->1024)
self.bottleneck = DoubleConv(in_channels=features[-1], out_channels=features[-1]*2)
# Final output part
self.final_conv = nn.Conv2d(
in_channels=features[0], out_channels=out_channels,
kernel_size=1
)
def forward(self, x):
# --- downsampling ---
# forward through the downsample part and save each output for skip_connections
skip_connections = []
for down in self.downs:
x = down(x)
skip_connections.append(x)
x = self.pool(x)
# --- bottleneck ---
x = self.bottleneck(x)
# reversed the skip_connections list, x[::-1] == list(reversed(x))
skip_connections = skip_connections[::-1]
# --- upsampling ---
# for i in range(0,8,2): -> [0,1,2,3,4,5,6,7] -> 0,2,4,6
for idx in range(0, len(self.ups), 2):
# --- ConvTranspose2d ---
# fetch ConvTranspose2d layers
x = self.ups[idx](x)
# do floot division to get the corresponding skip_connection
# 0,2,4,6 -> 0,1,2,3
skip_connection = skip_connections[idx//2]
# In the original paper, the authors use cropping to solve the size issues
# But resize, add padding, or consider the input be even is all suitable in here
if x.shape != skip_connection.shape:
# resize by transpolation
# torch.Size([3, 512, 20, 20]) -> torch.Size([20, 20])
x = TF.resize(x, size=skip_connection.shape[2:])
# --- Concatenate ---
# channel-wise dimension feature concat
# dim=1 -> along the channel dimension, not on dim=0 which will increase dimension
concat_skip = torch.cat((skip_connection, x), dim=1)
# print(skip_connection.shape)
# print(x.shape)
# print(concat_skip.shape)
# --- DoubleConv ---
# throw the concat layer into DoubleConv
x = self.ups[idx+1](concat_skip)
# --- final output part ---
x = self.final_conv(x)
return x
def unittest():
'''
batch_size = 1
channel_size = 3
kernel_size = 572x572
'''
x = torch.randn((1, 3, 572, 572))
model = UNet(in_channels=3, out_channels=1)
# print(model.ups)
preds = model(x)
# print(preds.shape)
assert preds.shape[2:] == x.shape[2:] , (preds.shape, x.shape)
if __name__ == "__main__":
unittest()
```
#### File: Deep-Learning-Jot/Transformer/Seq2Seq.py
```python
import torch
import torch.nn as nn
import torch.optim as optim
from torchtext.legacy.datasets import Multi30k
from torchtext.legacy.data import Field, BucketIterator
import torchtext
import spacy
import numpy as np
import random
from torch.utils.tensorboard import SummaryWriter # to print to tensorboard
# from utils import translate_sentence, bleu, save_checkpoint, load_checkpoint
'''
Multi30K is a dataset to stimulate multilingual multimodal research for English-German.
It is based on the Flickr30k dataset, which contains 31,014 images sourced from online photo-sharing websites.
Each image is paired with five English descriptions, which were collected from Amazon Mechanical Turk.
The dataset contains 145,000 training, 5,070 development, and 5,000 test descriptions.
The Multi30K dataset extends the Flickr30K dataset with translated and independent German sentences.
BucketIterator
train_iter, val_iter = BucketIterator.splits(
(trn, vld), # we pass in the datasets we want the iterator to draw data from
batch_sizes=(64, 64),
device=-1, # if you want to use the GPU, specify the GPU number here
sort_key=lambda x: len(x.comment_text), # the BucketIterator needs to be told what function it should use to group the data.
sort_within_batch=False,
repeat=False # we pass repeat=False because we want to wrap this Iterator layer.
)
test_iter = Iterator(tst, batch_size=64, device=-1, sort=False, sort_within_batch=False, repeat=False)
Field
+ the "unk_token" for out-of-vocabulary words
+ the "pad_token" for padding
+ the "eos_token" for the end of a sentence
+ an optional "init_token" for the start of the sentence
'''
def tokenize_ger(text):
return [tok.text for tok in spacy_ger.tokenizer(text)]
def tokenize_eng(text):
return [tok.text for tok in spacy_eng.tokenizer(text)]
spacy_ger = spacy.load("de_core_news_sm")
spacy_eng = spacy.load("en_core_web_sm")
german = Field(tokenize=tokenize_ger, lower=True,
init_token="<sos>", eos_token="<eos>")
english = Field(tokenize=tokenize_eng, lower=True,
init_token="<sos>", eos_token="<eos>")
train_data, valid_data, test_data = Multi30k.splits( exts=(".de", ".en"),
fields=(german, english))
german.build_vocab(train_data, max_size=10000, min_freq=2)
english.build_vocab(train_data, max_size=10000, min_freq=2)
class Encoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, num_layers, dropout_rate):
super(Encoder, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.dropout = nn.Dropout(dropout_rate)
self.embedding = nn.Embedding(
num_embeddings=input_size, embedding_dim=embedding_size
)
self.rnn = nn.LSTM(
input_size=embedding_size, hidden_size=hidden_size,
num_layers=num_layers, dropout=dropout_rate
)
def forward(self, x):
'''
x shape: (seq_length, N) where N is batch size
embedding shape: (seq_length, N, embedding_size)
outputs shape: (seq_length, N, hidden_size)
'''
embedding = self.embedding(x)
embedding = self.dropout(embedding)
outputs, (hidden, cell) = self.rnn(embedding)
return hidden, cell
class Decoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, num_layers, dropout_rate, output_size):
super(Decoder, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.dropout = nn.Dropout(dropout_rate)
self.embedding = nn.Embedding(
num_embeddings=input_size, embedding_dim=embedding_size
)
self.rnn = nn.LSTM(
input_size=embedding_size, hidden_size=hidden_size,
num_layers=num_layers, dropout=dropout_rate
)
self.fc = nn.Linear(in_features=hidden_size, out_features=output_size)
def forward(self, x, hidden, cell):
'''
x shape: (N) -> (1, N)
(N) where N is for batch size, but we want it to be (1, N),
seq_length is 1 here because we are sending in a single word and not a sentence
embedding shape: (1, N, embedding_size)
outputs shape: (1, N, hidden_size)
predictions shape: (1, N, length_of_target_vocabulary) -> (N, length_of_target_vocabulary)
(1, N, length_of_target_vocabulary) to send it to loss function
we want it to be (N, length_of_target_vocabulary)
so we're just gonna remove the first dim
'''
x = x.unsqueeze(0)
embedding = self.embedding(x)
embedding = self.dropout(embedding)
outputs, (hidden, cell) = self.rnn(embedding, (hidden, cell))
predictions = self.fc(outputs)
predictions = predictions.squeeze(0)
return predictions, hidden, cell
class Seq2Seq(nn.Module):
def __init__(self, encoder, decoder):
super(Seq2Seq, self).__init__()
self.encoder = encoder
self.decoder = decoder
def forward(self, source, target, teacher_force_ratio=0.5):
''' source shape: (target_len, batch_size)
'''
batch_size = source.shape[1]
target_len = target.shape[0]
target_vocab_size = len(english.vocab)
outputs = torch.zeros(target_len, batch_size, target_vocab_size).to(device)
hidden, cell = self.encoder(source)
# Grab the first input to the Decoder which will be <SOS> token
x = target[0]
for t in range(1, target_len):
# Use previous hidden, cell as context from encoder at start
output, hidden, cell = self.decoder(x, hidden, cell)
# Store next output prediction
outputs[t] = output
# Get the best word the Decoder predicted (index in the vocabulary)
# argmax for second dimension
best_guess = output.argmax(1)
# With probability of teacher_force_ratio we take the actual next word
# otherwise we take the word that the Decoder predicted it to be.
# Teacher Forcing is used so that the model gets used to seeing
# similar inputs at training and testing time, if teacher forcing is 1
# then inputs at test time might be completely different than what the
# network is used to. This was a long comment.
x = target[t] if random.random() < teacher_force_ratio else best_guess
return outputs
### We're ready to define everything we need for training our Seq2Seq model ###
# Training hyperparameters
num_epochs = 10
learning_rate = 0.001
batch_size = 64
# Model hyperparameters
load_model = False
device = torch.device("cuda" if torch.cuda.is_available() else "cpu")
input_size_encoder = len(german.vocab)
input_size_decoder = len(english.vocab)
output_size = len(english.vocab)
encoder_embedding_size = 200
decoder_embedding_size = 200
hidden_size = 1024 # Needs to be the same for both RNN's
num_layers = 2
encoder_dropout = 0.5
decoder_dropout = 0.5
# Tensorboard to get nice loss plot
writer = SummaryWriter(f"runs/loss_plot")
step = 0
train_iterator, valid_iterator, test_iterator = BucketIterator.splits(
(train_data, valid_data, test_data), # we pass in the datasets we want the iterator to draw data from
batch_size=batch_size,
sort_within_batch=True,
sort_key=lambda x: len(x.src), # the BucketIterator needs to be told what function it should use to group the data.
device=device,
)
encoder_net = Encoder(
input_size=input_size_encoder,
embedding_size=encoder_embedding_size,
hidden_size=hidden_size,
num_layer=num_layers,
dropout_rate=encoder_dropout
).to(device)
decoder_net = Decoder(
input_size=input_size_decoder,
embedding_size=decoder_embedding_size,
hidden_size=hidden_size,
num_layer=num_layers,
dropout_rate=decoder_dropout,
output_size=output_size,
).to(device)
model = Seq2Seq(encoder=encoder_net, decoder=decoder_net).to(device)
optimizer = optim.Adam(model.parameters(), lr=learning_rate)
pad_idx = english.vocab.stoi["<pad>"]
criterion = nn.CrossEntropyLoss(ignore_index=pad_idx)
if load_model:
load_checkpoint(filename="seq2seq.pth.tar", model=model, optimizer=optimizer)
sentence = "ein boot mit mehreren männern darauf wird von einem großen pferdegespann ans ufer gezogen."
for epoch in range(num_epochs):
print(f"[Epoch {epoch} / {num_epochs}]")
state_of_checkpoint = {
"state_dict": model.state_dict(),
"optimizer": optimizer.state_dict()
}
save_checkpoint(state=state_of_checkpoint, filename="seq2seq.pth.tar")
model.eval()
translated_sentence = translate_sentence(
model, sentence, german, english, device, max_length=50
)
print(f"Translated example sentence: \n {translated_sentence}")
model.train()
for batch_idx, batch in enumerate(train_iterator):
# Get input and targets and get to cuda
inp_data = batch.src.to(device)
target = batch.trg.to(device)
# Forward prop
output = model(inp_data, target)
# Output is of shape (trg_len, batch_size, output_dim) but Cross Entropy Loss
# doesn't take input in that form. For example if we have MNIST we want to have
# output to be: (N, 10) and targets just (N). Here we can view it in a similar
# way that we have output_words * batch_size that we want to send in into
# our cost function, so we need to do some reshapin. While we're at it
# Let's also remove the start token while we're at it
output = output[1:].reshape(-1, output.shape[2])
target = target[1:].reshape(-1)
optimizer.zero_grad()
loss = criterion(output, target)
# Back prop
loss.backward()
# Clip to avoid exploding gradient issues, makes sure grads are
# within a healthy range
torch.nn.utils.clip_grad_norm_(model.parameters(), max_norm=1)
# Gradient descent step
optimizer.step()
# Plot to tensorboard
writer.add_scalar("Training loss", loss, global_step=step)
step += 1
score = bleu(test_data[1:100], model, german, english, device)
print(f"Bleu score {score*100:.2f}")
``` |
{
"source": "JimDeanSpivey/ATF-for-Vista-FOIA",
"score": 2
} |
#### File: Packages/Scheduling/SCMain01_suite.py
```python
import sys
sys.path = ['./FunctionalTest/RAS/lib'] + ['./dataFiles'] + ['./lib/vista'] + sys.path
from SCActions import SCActions
import TestHelper
def sc_test001(test_suite_details):
'''Basic appointment managment options
Make an Appointment, Check in, Check Out'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
VistA = test_driver.connect_VistA(test_suite_details)
SC = SCActions(VistA, scheduling='Scheduling')
time = SC.schtime()
SC.signon()
SC.makeapp(patient='333224444', datetime=time)
time = SC.schtime(plushour=1)
now = datetime.datetime.now()
hour = now.hour + 1
SC.signon()
SC.checkin(vlist=['Three', str(hour), 'CHECKED-IN:'])
SC.signon()
SC.checkout(vlist1=['Three', str(hour), 'Checked In'], vlist2=['305.91', 'OTHER DRUG', 'RESULTING'], icd='305.91')
SC.signoff()
test_driver.post_test_run(test_suite_details)
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
def sc_test002(test_suite_details):
'''Basic appointment managment options
Make an Appointment (Scheduled and Unscheduled),
record a No-Show, Cancel an appointment and change patients'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
VistA = test_driver.connect_VistA(test_suite_details)
SC = SCActions(VistA, scheduling='Scheduling')
time = SC.schtime()
SC.signon()
SC.makeapp(patient='655447777', datetime=time)
time = SC.schtime(plushour=1)
SC.signon()
SC.unschvisit(patient='345678233', patientname='Twelve')
SC.signon()
SC.noshow(appnum='3')
SC.signon()
SC.canapp(mult='1')
SC.signon()
SC.chgpatient(patient1='345678233', patient2='345238901', patientname1='Twelve', patientname2='Ten')
SC.signoff()
test_driver.post_test_run(test_suite_details)
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
def sc_test003(test_suite_details):
'''This tests clinic features such as change clinic, change daterange,
expand the entry, add and edit, and Patient demographics'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
VistA = test_driver.connect_VistA(test_suite_details)
SC = SCActions(VistA, scheduling='Scheduling')
SC.signon()
SC.chgclinic()
SC.signon()
SC.chgdaterange()
SC.signon()
SC.teaminfo()
SC.signoff()
test_driver.post_test_run(test_suite_details)
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
def sc_test004(test_suite_details):
'''This tests clinic features such as expand the entry, add and edit, and Patient demographics'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
VistA = test_driver.connect_VistA(test_suite_details)
SC = SCActions(VistA, scheduling='Scheduling')
time = SC.schtime(plushour=1)
SC.signon()
SC.makeapp(patient='345238901', datetime=time)
SC.signon()
SC.patdem(name='Ten', mult='2')
SC.signon()
SC.expandentry(vlist1=['TEN', 'SCHEDULED', '30'], vlist2=['Event', 'Date', 'User', 'TESTMASTER'],
vlist3=['NEXT AVAILABLE', 'NO', '0'], vlist4=['1933', 'MALE', 'UNANSWERED'],
vlist5=['Combat Veteran:', 'No check out information'], mult='2')
SC.signon()
SC.addedit(name='345623902', icd='305.91')
SC.signoff()
test_driver.post_test_run(test_suite_details)
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
def sc_test005(test_suite_details):
'''This test checks a patient into a clinic, then discharges him, then deletes his checkout'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
VistA = test_driver.connect_VistA(test_suite_details)
SC = SCActions(VistA)
SC.signon()
SC.enroll(patient='543236666')
SC = SCActions(VistA, scheduling='Scheduling')
time = SC.schtime(plushour=1)
SC.signon()
SC.makeapp(patient='543236666', datetime=time)
SC.signon()
SC.discharge(patient='543236666', appnum='3')
SC.signon()
SC.checkout(vlist1=['One', 'No Action'], vlist2=['305.91', 'RESULTING'], icd='305.91', mult='3')
SC = SCActions(VistA, user='fakedoc1', code='1Doc!@#$')
SC.signon()
SC.deletecheckout(appnum='3')
SC.signoff()
test_driver.post_test_run(test_suite_details)
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
def sc_test006(test_suite_details):
'''This test will exercise the wait list functionality'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
VistA = test_driver.connect_VistA(test_suite_details)
SC = SCActions(VistA, user='fakedoc1', code='1Doc!@#$')
SC.signon()
SC.waitlistentry(patient='323554545')
SC.waitlistdisposition(patient='323554545')
SC.signoff()
test_driver.post_test_run(test_suite_details)
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
def startmon(test_suite_details):
'''Starts Coverage Monitor'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
VistA1 = test_driver.connect_VistA(test_suite_details)
VistA1.startCoverage(routines=['SC*', 'SD*'])
test_driver.post_test_run(test_suite_details)
'''
Close Vista
'''
VistA1.write('^\r^\r^\r')
VistA1.write('h\r')
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
def stopmon (test_suite_details):
''' STOP MONITOR'''
testname = sys._getframe().f_code.co_name
test_driver = TestHelper.TestDriver(testname)
test_driver.pre_test_run(test_suite_details)
try:
# Connect to VistA
VistA1 = test_driver.connect_VistA(test_suite_details)
VistA1.stopCoverage(path=(test_suite_details.result_dir + '/' + 'Scheduling_coverage.txt'))
test_driver.post_test_run(test_suite_details)
'''
Close Vista
'''
VistA1.write('^\r^\r^\r')
VistA1.write('h\r')
except TestHelper.TestError, e:
test_driver.exception_handling(test_suite_details, e)
else:
test_driver.try_else_handling(test_suite_details)
finally:
test_driver.finally_handling(test_suite_details)
test_driver.end_method_handling(test_suite_details)
'''
def connect_VistA(testname, result_dir):
# Connect to VistA
from OSEHRAHelper import ConnectToMUMPS, PROMPT
VistA = ConnectToMUMPS(logfile=result_dir + '/' + testname + '.txt', instance='', namespace='')
if VistA.type == 'cache':
try:
VistA.ZN('VISTA')
except IndexError, no_namechange:
pass
VistA.wait(PROMPT)
return VistA
'''
```
#### File: Packages/SSH Demo/ssh_connect_demo_test.py
```python
import os
import sys
#apparently these are not needed... at least not on windows. Will need to retest this on linux
#sys.path = ['./FunctionalTest/RAS/lib'] + ['./lib/vista'] + sys.path
#sys.path = ['./'] + ['../lib/vista'] + sys.path
import ssh_connect_demo_suite
import TestHelper
def main():
test_suite_driver = TestHelper.TestSuiteDriver(__file__)
test_suite_details = test_suite_driver.generate_test_suite_details()
try:
test_suite_driver.pre_test_suite_run(test_suite_details)
#Begin Tests
ssh_connect_demo_suite.dive_into_menus(test_suite_details)
ssh_connect_demo_suite.demo_screen_man(test_suite_details)
#End Tests
test_suite_driver.post_test_suite_run(test_suite_details)
except Exception, e:
test_suite_driver.exception_handling(test_suite_details, e)
else:
test_suite_driver.try_else_handling(test_suite_details)
finally:
test_suite_driver.finally_handling(test_suite_details)
test_suite_driver.end_method_handling(test_suite_details)
if __name__ == '__main__':
main()
``` |
{
"source": "Jim-Dev/BlenderAddons",
"score": 2
} |
#### File: Jim-Dev/BlenderAddons/monogusatools.py
```python
import bpy
import random
from bpy.types import Menu, Panel
bl_info = {
"name": "Monogusa Tools",
"author": "isidourou",
"version": (1, 0),
"blender": (2, 65, 0),
"location": "View3D > Toolbar",
"description": "MonogusaTools",
"warning": "",
"wiki_url": "",
"tracker_url": "",
"category": 'CTNAME'}
atobj = None
def mode_interpret(emode):
if emode == 'PAINT_TEXTURE':
return 'TEXTURE_PAINT'
if emode == 'SCULPT':
return 'SCULPT'
if emode == 'PAINT_VERTEX':
return 'VERTEX_PAINT'
if emode == 'PAINT_WEIGHT':
return 'WEIGHT_PAINT'
if emode == 'OBJECT':
return 'OBJECT'
if emode == 'POSE':
return 'POSE'
if emode=='EDIT_MESH' or emode=='EDIT_ARMATURE' or emode=='EDIT_CURVE' or emode=='EDIT_TEXT' or emode=='EDIT_METABALL' or emode=='EDIT_SURFACE':
return 'EDIT'
def check_active():
count = 0
slist = bpy.context.selected_objects
for i in slist:
count += 1
return count
def check_mode():
emode = bpy.context.mode
if emode != 'OBJECT':
bpy.ops.object.mode_set(mode='OBJECT')
return emode
# Menu in tools region
class MonogusaToolsPanel(bpy.types.Panel):
bl_label = "Monogusa Tools"
bl_space_type = "VIEW_3D"
bl_region_type = "TOOLS"
def draw(self, context):
layout = self.layout
#3D Cursor
col = layout.column(align=True)
col.label(text="3d cursor:")
row = col.row(align=True)
row.operator("to.selected", text="to Selected")
row.operator("to.cursor", text="to Cursor")
#select
col = layout.column(align=True)
col.label(text="Select:")
row = col.row(align=True)
row.operator("select.type", text="Type")
row.operator("select.group", text="Group")
row.operator("select.obdata", text="OBData")
row.operator("select.mat", text="Mat")
row = col.row(align=True)
row.operator("select.invert", text="Invert")
row.operator("select.all", text=" All")
row.operator("deselect.all", text="Deselect")
#execute
#col = layout.column(align=True)
col.label(text="Execute:")
row = col.row(align=True)
row.operator("hide.selected", text="Hide")
row.operator("unhide.all", text="Unhide")
row.operator("execute.delete", text="Delete")
#sendlayer layer
col.label(text="Move to Layer:")
row = col.row(align=True)
row.operator("sendlayer.l00",text=' ')
row.operator("sendlayer.l01",text=' ')
row.operator("sendlayer.l02",text=' ')
row.operator("sendlayer.l03",text=' ')
row.operator("sendlayer.l04",text=' ')
row.operator("sendlayer.l05",text=' ')
row.operator("sendlayer.l06",text=' ')
row.operator("sendlayer.l07",text=' ')
row.operator("sendlayer.l08",text=' ')
row.operator("sendlayer.l09",text=' ')
row = col.row(align=True)
row.operator("sendlayer.l10",text=' ')
row.operator("sendlayer.l11",text=' ')
row.operator("sendlayer.l12",text=' ')
row.operator("sendlayer.l13",text=' ')
row.operator("sendlayer.l14",text=' ')
row.operator("sendlayer.l15",text=' ')
row.operator("sendlayer.l16",text=' ')
row.operator("sendlayer.l17",text=' ')
row.operator("sendlayer.l18",text=' ')
row.operator("sendlayer.l19",text=' ')
#convert
col = layout.column(align=True)
col.label(text="Convert:")
row = col.row(align=True)
row.operator("convert.tomesh", text="to Mesh")
row.operator("convert.tocurve", text="to Curve")
#subdivide
col = layout.column(align=True)
col.label(text="Sub Divide:")
row = col.row(align=True)
row.operator("div.simple", text="Simple Divide")
row = col.row(align=True)
row.operator("div.smooth", text="Smooth Div")
row.operator("div.rand", text="Random Div")
row = col.row(align=False)
row.operator("ver.smooth", text="Smoothing Vertex / Points")
#add mirror modifire
col = layout.column(align=True)
col = layout.column(align=True)
col.label(text="Add Mirror Modifier:")
row = col.row(align=True)
row.operator("add.mmx", text="X")
row.operator("add.mmy", text="Y")
row.operator("add.mmz", text="Z")
row = col.row(align=True)
row.operator("add.mmmx", text="-X")
row.operator("add.mmmy", text="-Y")
row.operator("add.mmmz", text="-Z")
#add mirror modifire
col = layout.column(align=True)
col.label(text="Set Template Empty:")
row = col.row(align=True)
row.operator("temp.single", text="Single")
row.operator("temp.separate", text="3D Separate")
row.operator("temp.contact", text="3D Contact")
#---- main ------
#select
class SelectType(bpy.types.Operator):
bl_idname = "select.type"
bl_label = "SelectType"
def execute(self, context):
check_mode()
if check_active() == 0:
return{'FINISHED'}
bpy.ops.object.select_grouped(type='TYPE')
return{'FINISHED'}
class SelectGroup(bpy.types.Operator):
bl_idname = "select.group"
bl_label = "SelectGroup"
def execute(self, context):
check_mode()
if check_active() == 0:
return{'FINISHED'}
bpy.ops.object.select_grouped(type='GROUP')
return{'FINISHED'}
class SelectObjdata(bpy.types.Operator):
bl_idname = "select.obdata"
bl_label = "SelectObjdata"
def execute(self, context):
check_mode()
if check_active() == 0:
return{'FINISHED'}
bpy.ops.object.select_linked(type='OBDATA')
return{'FINISHED'}
class SelectMat(bpy.types.Operator):
bl_idname = "select.mat"
bl_label = "SelectMat"
def execute(self, context):
check_mode()
if check_active() == 0:
return{'FINISHED'}
bpy.ops.object.select_linked(type='MATERIAL')
return{'FINISHED'}
class SelectInvert(bpy.types.Operator):
bl_idname = "select.invert"
bl_label = "SelectInvert"
def execute(self, context):
cobj = bpy.context.object
if cobj == None:
return{'FINISHED'}
objtype = cobj.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if objtype == 'MESH':
if emode == 'EDIT':
bpy.ops.mesh.select_all(action='INVERT')
if objtype == 'CURVE' or objtype == 'SURFACE':
if emode == 'EDIT':
bpy.ops.curve.select_all(action='INVERT')
if objtype == 'ARMATURE':
if emode == 'POSE':
bpy.ops.pose.select_all(action='INVERT')
if emode == 'EDIT':
bpy.ops.armature.select_all(action='INVERT')
if objtype == 'META':
if emode == 'EDIT':
bpy.ops.mball.select_all(action='INVERT')
if emode == 'OBJECT':
bpy.ops.object.select_all(action='INVERT')
return{'FINISHED'}
class SelectAll(bpy.types.Operator):
bl_idname = "select.all"
bl_label = "SelectAll"
def execute(self, context):
cobj = bpy.context.object
if cobj == None:
return{'FINISHED'}
objtype = cobj.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if objtype == 'MESH':
if emode == 'EDIT':
bpy.ops.mesh.select_all(action='SELECT')
if objtype == 'CURVE' or objtype == 'SURFACE':
if emode == 'EDIT':
bpy.ops.curve.select_all(action='SELECT')
if objtype == 'ARMATURE':
if emode == 'POSE':
bpy.ops.pose.select_all(action='SELECT')
if emode == 'EDIT':
bpy.ops.armature.select_all(action='SELECT')
if objtype == 'META':
if emode == 'EDIT':
bpy.ops.mball.select_all(action='SELECT')
if emode == 'OBJECT':
bpy.ops.object.select_all(action='SELECT')
return{'FINISHED'}
class DeselectAll(bpy.types.Operator):
bl_idname = "deselect.all"
bl_label = "DeselectAll"
def execute(self, context):
cobj = bpy.context.object
if cobj == None:
return{'FINISHED'}
objtype = cobj.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if objtype == 'MESH':
if emode == 'EDIT':
bpy.ops.mesh.select_all(action='DESELECT')
if objtype == 'CURVE' or objtype == 'SURFACE':
if emode == 'EDIT':
bpy.ops.curve.select_all(action='DESELECT')
if objtype == 'ARMATURE':
if emode == 'POSE':
bpy.ops.pose.select_all(action='DESELECT')
if emode == 'EDIT':
bpy.ops.armature.select_all(action='DESELECT')
if objtype == 'META':
if emode == 'EDIT':
bpy.ops.mball.select_all(action='DESELECT')
if emode == 'OBJECT':
bpy.ops.object.select_all(action='DESELECT')
return{'FINISHED'}
#execute
class HideSelected(bpy.types.Operator):
bl_idname = "hide.selected"
bl_label = "HideSelected"
def execute(self, context):
global atobj
cobj = bpy.context.object
if cobj == None:
return{'FINISHED'}
objtype = cobj.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if objtype == 'MESH':
if emode == 'EDIT':
bpy.ops.mesh.hide(unselected=False)
if objtype == 'CURVE' or objtype == 'SURFACE':
if emode == 'EDIT':
bpy.ops.curve.hide(unselected=False)
if objtype == 'ARMATURE':
if emode == 'POSE':
bpy.ops.pose.hide(unselected=False)
if emode == 'EDIT':
bpy.ops.armature.hide(unselected=False)
if objtype == 'META':
if emode == 'EDIT':
bpy.ops.mball.hide_metaelems(unselected=False)
if emode == 'OBJECT':
bpy.ops.object.hide_view_set(unselected=False)
atobj = cobj
return{'FINISHED'}
class UnhideAll(bpy.types.Operator):
bl_idname = "unhide.all"
bl_label = "UnhideAll"
def execute(self, context):
global atobj
cobj = bpy.context.object
if cobj == None:
bpy.context.scene.objects.active = atobj
obj=bpy.context.object
obj.select = True
emode = bpy.context.mode
emode = mode_interpret(emode)
if emode == 'OBJECT':
#bpy.ops.object.select_all(action='DESELECT')
bpy.ops.object.hide_view_clear()
return{'FINISHED'}
objtype = bpy.context.object.type
if objtype == 'MESH':
if emode == 'EDIT':
bpy.ops.mesh.reveal()
if objtype == 'CURVE' or objtype == 'SURFACE':
if emode == 'EDIT':
bpy.ops.curve.reveal()
if objtype == 'ARMATURE':
if emode == 'POSE':
bpy.ops.pose.reveal()
if emode == 'EDIT':
bpy.ops.armature.reveal()
if objtype == 'META':
if emode == 'EDIT':
bpy.ops.mball.reveal_metaelems()
return{'FINISHED'}
class ExecuteDelete(bpy.types.Operator):
bl_idname = "execute.delete"
bl_label = "ExecuteDelete"
def execute(self, context):
emode = bpy.context.mode
emode = mode_interpret(emode)
if emode == 'OBJECT':
bpy.ops.object.delete(use_global=False)
return{'FINISHED'}
objtype = bpy.context.object.type
if objtype == 'MESH':
if emode == 'EDIT':
bpy.ops.mesh.delete()
if objtype == 'CURVE' or objtype == 'SURFACE':
if emode == 'EDIT':
bpy.ops.curve.delete()
if objtype == 'ARMATURE':
if emode == 'POSE':
bpy.ops.object.editmode_toggle()
bpy.ops.armature.delete()
bpy.ops.object.posemode_toggle()
if emode == 'EDIT':
bpy.ops.armature.delete()
if objtype == 'META':
if emode == 'EDIT':
bpy.ops.mball.delete_metaelems()
return{'FINISHED'}
#move to Layer
class Send00(bpy.types.Operator):
bl_idname = "sendlayer.l00"
bl_label = "Send00"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(True,False,False,False,False,False,False,False,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send01(bpy.types.Operator):
bl_idname = "sendlayer.l01"
bl_label = "Send01"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,True,False,False,False,False,False,False,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send02(bpy.types.Operator):
bl_idname = "sendlayer.l02"
bl_label = "Send02"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,True,False,False,False,False,False,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send03(bpy.types.Operator):
bl_idname = "sendlayer.l03"
bl_label = "Send03"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,True,False,False,False,False,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send04(bpy.types.Operator):
bl_idname = "sendlayer.l04"
bl_label = "Send04"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,True,False,False,False,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send05(bpy.types.Operator):
bl_idname = "sendlayer.l05"
bl_label = "Send05"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,True,False,False,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send06(bpy.types.Operator):
bl_idname = "sendlayer.l06"
bl_label = "Send06"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,True,False,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send07(bpy.types.Operator):
bl_idname = "sendlayer.l07"
bl_label = "Send07"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,True,False,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send08(bpy.types.Operator):
bl_idname = "sendlayer.l08"
bl_label = "Send08"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,True,False,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send09(bpy.types.Operator):
bl_idname = "sendlayer.l09"
bl_label = "Send09"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,True,
False,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send10(bpy.types.Operator):
bl_idname = "sendlayer.l10"
bl_label = "Send10"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
True,False,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send11(bpy.types.Operator):
bl_idname = "sendlayer.l11"
bl_label = "Send11"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,True,False,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send12(bpy.types.Operator):
bl_idname = "sendlayer.l12"
bl_label = "Send12"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,True,False,False,False,False,False,False,False))
return{'FINISHED'}
class Send13(bpy.types.Operator):
bl_idname = "sendlayer.l13"
bl_label = "Send13"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,False,True,False,False,False,False,False,False))
return{'FINISHED'}
class Send14(bpy.types.Operator):
bl_idname = "sendlayer.l14"
bl_label = "Send14"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,False,False,True,False,False,False,False,False))
return{'FINISHED'}
class Send15(bpy.types.Operator):
bl_idname = "sendlayer.l15"
bl_label = "Send15"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,False,False,False,True,False,False,False,False))
return{'FINISHED'}
class Send16(bpy.types.Operator):
bl_idname = "sendlayer.l16"
bl_label = "Send16"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,False,False,False,False,True,False,False,False))
return{'FINISHED'}
class Send17(bpy.types.Operator):
bl_idname = "sendlayer.l17"
bl_label = "Send17"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,False,False,False,False,False,True,False,False))
return{'FINISHED'}
class Send18(bpy.types.Operator):
bl_idname = "sendlayer.l18"
bl_label = "Send18"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,False,False,False,False,False,False,True,False))
return{'FINISHED'}
class Send19(bpy.types.Operator):
bl_idname = "sendlayer.l19"
bl_label = "Send19"
def execute(self, context):
check_mode()
bpy.ops.object.move_to_layer(
layers=(False,False,False,False,False,False,False,False,False,False,
False,False,False,False,False,False,False,False,False,True))
return{'FINISHED'}
#3D cursor
class ToSelected(bpy.types.Operator):
bl_idname = "to.selected"
bl_label = "ToSelected"
def execute(self, context):
bpy.ops.view3d.snap_cursor_to_selected()
return{'FINISHED'}
class ToCursor(bpy.types.Operator):
bl_idname = "to.cursor"
bl_label = "ToCursor"
def execute(self, context):
bpy.ops.view3d.snap_selected_to_cursor()
return{'FINISHED'}
#subdivide
class DivSimple(bpy.types.Operator):
bl_idname = "div.simple"
bl_label = "DivSimple"
def execute(self, context):
objtype = bpy.context.object.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if objtype == 'MESH':
if emode != 'EDIT':
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.subdivide(smoothness=0)
if emode != 'EDIT':
bpy.ops.object.mode_set(mode=emode)
if objtype == 'ARMATURE':
if emode != 'EDIT':
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.armature.subdivide()
if emode != 'EDIT':
bpy.ops.object.mode_set(mode=emode)
if objtype == 'CURVE':
if emode != 'EDIT':
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.curve.subdivide()
if emode != 'EDIT':
bpy.ops.object.mode_set(mode=emode)
return{'FINISHED'}
class DivSmooth(bpy.types.Operator):
bl_idname = "div.smooth"
bl_label = "DivSmooth"
def execute(self, context):
objtype = bpy.context.object.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if bpy.context.object.type == 'MESH':
if emode != 'EDIT':
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.subdivide(smoothness=1)
if emode != 'EDIT':
bpy.ops.object.mode_set(mode=emode)
return{'FINISHED'}
class DivRand(bpy.types.Operator):
bl_idname = "div.rand"
bl_label = "DivRand"
def execute(self, context):
objtype = bpy.context.object.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if bpy.context.object.type == 'MESH':
if emode != 'EDIT':
bpy.ops.object.mode_set(mode='EDIT')
frc = random.random()*6
sed = int(random.random()*10)
bpy.ops.mesh.subdivide(smoothness=0, fractal=frc, seed=sed)
if emode != 'EDIT':
bpy.ops.object.mode_set(mode=emode)
return{'FINISHED'}
class VerSmooth(bpy.types.Operator):
bl_idname = "ver.smooth"
bl_label = "DivSmooth"
def execute(self, context):
objtype = bpy.context.object.type
emode = bpy.context.mode
emode = mode_interpret(emode)
if bpy.context.object.type == 'MESH':
if emode != 'EDIT':
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.mesh.vertices_smooth()
if emode != 'EDIT':
bpy.ops.object.mode_set(mode=emode)
if objtype == 'CURVE':
if emode != 'EDIT':
bpy.ops.object.mode_set(mode='EDIT')
bpy.ops.curve.smooth()
if emode != 'EDIT':
bpy.ops.object.mode_set(mode=emode)
return{'FINISHED'}
#convert
class ConverttoMesh(bpy.types.Operator):
bl_idname = "convert.tomesh"
bl_label = "ConverttoMesh"
def execute(self, context):
objtype = bpy.context.object.type
emode = bpy.context.mode
if emode == 'SCULPT' or emode.find('PAINT') != -1:
return{'FINISHED'}
emode = mode_interpret(emode)
if objtype == 'CURVE' or objtype == 'FONT' or objtype == 'META' or objtype == 'SURFACE':
if emode != 'OBJECT':
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.convert(target='MESH')
bpy.ops.object.editmode_toggle()
bpy.ops.mesh.select_all(action='SELECT')
bpy.ops.object.editmode_toggle()
if emode != 'OBJECT':
bpy.ops.object.mode_set(mode=emode)
return{'FINISHED'}
class ConverttoCurve(bpy.types.Operator):
bl_idname = "convert.tocurve"
bl_label = "ConverttoCurve"
def execute(self, context):
objtype = bpy.context.object.type
emode = bpy.context.mode
if emode == 'SCULPT' or emode.find('PAINT') != -1:
return{'FINISHED'}
emode = mode_interpret(emode)
if objtype == 'MESH' or objtype == 'FONT':
if emode != 'OBJECT':
bpy.ops.object.mode_set(mode='OBJECT')
bpy.ops.object.convert(target='CURVE')
if emode != 'OBJECT':
bpy.ops.object.mode_set(mode=emode)
return{'FINISHED'}
#add mirror modifier
def add_mm(direction):
emode = bpy.context.mode
emode = mode_interpret(emode)
obj = bpy.ops.object
cobj = bpy.context.object
mesh = cobj.data
obj.mode_set(mode='EDIT')
bpy.ops.mesh.select_all(action='DESELECT')
obj.mode_set(mode='OBJECT')
ct = 0
exist = False
for i in cobj.modifiers:
s = cobj.modifiers[ct].name
if s.find('Mirror') != -1:
exist = True
break
if exist == False:
obj.modifier_add(type='MIRROR')
if direction == 'X':
for vertex in mesh.vertices:
if (vertex.co.x < -0.000001):
vertex.select = True
cobj.modifiers["Mirror"].use_x = True
if exist == False:
cobj.modifiers["Mirror"].use_y = False
cobj.modifiers["Mirror"].use_z = False
if direction == '-X':
for vertex in mesh.vertices:
if (vertex.co.x > 0.000001):
vertex.select = True
cobj.modifiers["Mirror"].use_x = True
if exist == False:
cobj.modifiers["Mirror"].use_y = False
cobj.modifiers["Mirror"].use_z = False
if direction == 'Y':
for vertex in mesh.vertices:
if (vertex.co.y < -0.000001):
vertex.select = True
cobj.modifiers["Mirror"].use_y = True
if exist == False:
cobj.modifiers["Mirror"].use_x = False
cobj.modifiers["Mirror"].use_z = False
if direction == '-Y':
for vertex in mesh.vertices:
if (vertex.co.y > 0.000001):
vertex.select = True
cobj.modifiers["Mirror"].use_y = True
if exist == False:
cobj.modifiers["Mirror"].use_x = False
cobj.modifiers["Mirror"].use_z = False
if direction == 'Z':
for vertex in mesh.vertices:
if (vertex.co.z < -0.000001):
vertex.select = True
cobj.modifiers["Mirror"].use_z = True
if exist == False:
cobj.modifiers["Mirror"].use_x = False
cobj.modifiers["Mirror"].use_y = False
if direction == '-Z':
for vertex in mesh.vertices:
if (vertex.co.z > 0.000001):
vertex.select = True
cobj.modifiers["Mirror"].use_z = True
if exist == False:
cobj.modifiers["Mirror"].use_x = False
cobj.modifiers["Mirror"].use_y = False
cobj.modifiers["Mirror"].use_clip = True
obj.mode_set(mode='EDIT')
bpy.ops.mesh.delete(type='VERT')
bpy.ops.mesh.select_all(action='SELECT')
obj.mode_set(mode='OBJECT')
if emode != 'OBJECT':
bpy.ops.object.mode_set(mode=emode)
class AddMmx(bpy.types.Operator):
bl_idname = "add.mmx"
bl_label = "AddMmx"
def execute(self, context):
if bpy.context.object.type == 'MESH':
add_mm('X')
return{'FINISHED'}
class AddMm_x(bpy.types.Operator):
bl_idname = "add.mmmx"
bl_label = "AddMmx"
def execute(self, context):
if bpy.context.object.type == 'MESH':
add_mm('-X')
return{'FINISHED'}
class AddMmy(bpy.types.Operator):
bl_idname = "add.mmy"
bl_label = "AddMmx"
def execute(self, context):
if bpy.context.object.type == 'MESH':
add_mm('Y')
return{'FINISHED'}
class AddMm_y(bpy.types.Operator):
bl_idname = "add.mmmy"
bl_label = "AddMmx"
def execute(self, context):
if bpy.context.object.type == 'MESH':
add_mm('-Y')
return{'FINISHED'}
class AddMmz(bpy.types.Operator):
bl_idname = "add.mmz"
bl_label = "AddMmx"
def execute(self, context):
if bpy.context.object.type == 'MESH':
add_mm('Z')
return{'FINISHED'}
class AddMm_z(bpy.types.Operator):
bl_idname = "add.mmmz"
bl_label = "AddMmx"
def execute(self, context):
if bpy.context.object.type == 'MESH':
add_mm('-Z')
return{'FINISHED'}
#set template empty
def objselect(objct,selection):
if (selection == 'ONLY'):
bpy.ops.object.select_all(action='DESELECT')
bpy.context.scene.objects.active = objct
objct.select = True
def makecenterempty():
bpy.ops.object.empty_add(type='PLAIN_AXES',
view_align=False,
location=(0, 0, 0))
centerempty = bpy.context.object
centerempty.name = 'CenterEmpty'
return centerempty
def makeempty(loc,rot):
bpy.ops.object.empty_add(type='PLAIN_AXES',
view_align=False,
location= loc,
rotation= rot
)
empty = bpy.context.object
empty.empty_draw_type = 'IMAGE'
empty.empty_draw_size = 10
empty.name = 'Template Empty'
empty.color[3] = 0.3 #Transparency
empty.show_x_ray = True
return empty
class TempSingle(bpy.types.Operator):
bl_idname = "temp.single"
bl_label = "TempSingle"
def execute(self, context):
pi = 3.141595
pq = pi/2
#sn = bpy.context.scene
erot = [(pq, 0, 0),(pq, 0, pq),(0, 0, 0)]
eloc = [(-5, 0, -5),(0, -5, -5),(-5, -5, 0)]
cempty = makecenterempty()
bpy.ops.group.create(name="TemplateEmpty")
empty = makeempty(eloc[0],erot[0])
bpy.ops.object.group_link(group='TemplateEmpty')
objselect(cempty,'ADD')
bpy.ops.object.parent_set(type='OBJECT')
objselect(cempty,'ONLY')
bpy.ops.view3d.snap_selected_to_cursor()
return{'FINISHED'}
class TempSeparate(bpy.types.Operator):
bl_idname = "temp.separate"
bl_label = "TempSeparate"
def execute(self, context):
pi = 3.141595
pq = pi/2
#sn = bpy.context.scene
erot = [(pq, 0, 0),(pq, 0, pq),(0, 0, 0)]
eloc = [(-5, 5, -5),(-5, -5, -5),(-5, -5, -5)]
cempty = makecenterempty()
bpy.ops.group.create(name="TemplateEmpty")
for i in range(3):
empty = makeempty(eloc[i],erot[i])
bpy.ops.object.group_link(group='TemplateEmpty')
objselect(cempty,'ADD')
bpy.ops.object.parent_set(type='OBJECT')
objselect(cempty,'ONLY')
bpy.ops.view3d.snap_selected_to_cursor()
return{'FINISHED'}
class TempContact(bpy.types.Operator):
bl_idname = "temp.contact"
bl_label = "TempContact"
def execute(self, context):
pi = 3.141595
pq = pi/2
#sn = bpy.context.scene
erot = [(pq, 0, 0),(pq, 0, pq),(0, 0, 0)]
eloc = [(-5, 0, -5),(0, -5, -5),(-5, -5, 0)]
cempty = makecenterempty()
bpy.ops.group.create(name="TemplateEmpty")
for i in range(3):
empty = makeempty(eloc[i],erot[i])
bpy.ops.object.group_link(group='TemplateEmpty')
objselect(cempty,'ADD')
bpy.ops.object.parent_set(type='OBJECT')
objselect(cempty,'ONLY')
bpy.ops.view3d.snap_selected_to_cursor()
return{'FINISHED'}
# Registration
def register():
bpy.utils.register_class(MonogusaToolsPanel)
#select
bpy.utils.register_class(SelectType)
bpy.utils.register_class(SelectGroup)
bpy.utils.register_class(SelectObjdata)
bpy.utils.register_class(SelectMat)
bpy.utils.register_class(SelectInvert)
bpy.utils.register_class(SelectAll)
bpy.utils.register_class(DeselectAll)
#execute
bpy.utils.register_class(HideSelected)
bpy.utils.register_class(UnhideAll)
bpy.utils.register_class(ExecuteDelete)
#move to layer
bpy.utils.register_class(Send00)
bpy.utils.register_class(Send01)
bpy.utils.register_class(Send02)
bpy.utils.register_class(Send03)
bpy.utils.register_class(Send04)
bpy.utils.register_class(Send05)
bpy.utils.register_class(Send06)
bpy.utils.register_class(Send07)
bpy.utils.register_class(Send08)
bpy.utils.register_class(Send09)
bpy.utils.register_class(Send10)
bpy.utils.register_class(Send11)
bpy.utils.register_class(Send12)
bpy.utils.register_class(Send13)
bpy.utils.register_class(Send14)
bpy.utils.register_class(Send15)
bpy.utils.register_class(Send16)
bpy.utils.register_class(Send17)
bpy.utils.register_class(Send18)
bpy.utils.register_class(Send19)
#3d cursor
bpy.utils.register_class(ToSelected)
bpy.utils.register_class(ToCursor)
#subdvide
bpy.utils.register_class(DivSimple)
bpy.utils.register_class(DivSmooth)
bpy.utils.register_class(DivRand)
bpy.utils.register_class(VerSmooth)
bpy.utils.register_class(ConverttoMesh)
bpy.utils.register_class(ConverttoCurve)
bpy.utils.register_class(AddMmx)
bpy.utils.register_class(AddMm_x)
bpy.utils.register_class(AddMmy)
bpy.utils.register_class(AddMm_y)
bpy.utils.register_class(AddMmz)
bpy.utils.register_class(AddMm_z)
#set template empty
bpy.utils.register_class(TempSingle)
bpy.utils.register_class(TempSeparate)
bpy.utils.register_class(TempContact)
def unregister():
bpy.utils.unregister_class(MonogusaToolsPanel)
#select
bpy.utils.unregister_class(SelectType)
bpy.utils.unregister_class(SelectGroup)
bpy.utils.unregister_class(SelectObjdata)
bpy.utils.unregister_class(SelectMat)
bpy.utils.unregister_class(SelectInvert)
bpy.utils.unregister_class(SelectAll)
bpy.utils.unregister_class(DeselectAll)
#execute
bpy.utils.unregister_class(HideSelected)
bpy.utils.unregister_class(UnhideAll)
bpy.utils.unregister_class(ExecuteDelete)
#move to layer
bpy.utils.unregister_class(Send00)
bpy.utils.unregister_class(Send01)
bpy.utils.unregister_class(Send02)
bpy.utils.unregister_class(Send03)
bpy.utils.unregister_class(Send04)
bpy.utils.unregister_class(Send05)
bpy.utils.unregister_class(Send06)
bpy.utils.unregister_class(Send07)
bpy.utils.unregister_class(Send08)
bpy.utils.unregister_class(Send09)
bpy.utils.unregister_class(Send10)
bpy.utils.unregister_class(Send11)
bpy.utils.unregister_class(Send12)
bpy.utils.unregister_class(Send13)
bpy.utils.unregister_class(Send14)
bpy.utils.unregister_class(Send15)
bpy.utils.unregister_class(Send16)
bpy.utils.unregister_class(Send17)
bpy.utils.unregister_class(Send18)
bpy.utils.unregister_class(Send19)
#3d cursor
bpy.utils.unregister_class(ToSelected)
bpy.utils.unregister_class(ToCursor)
#subdvide
bpy.utils.unregister_class(DivSimple)
bpy.utils.unregister_class(DivSmooth)
bpy.utils.unregister_class(DivRand)
bpy.utils.unregister_class(VerSmooth)
bpy.utils.unregister_class(ConverttoMesh)
bpy.utils.unregister_class(ConverttoCurve)
bpy.utils.unregister_class(AddMmx)
bpy.utils.unregister_class(AddMm_x)
bpy.utils.unregister_class(AddMmy)
bpy.utils.unregister_class(AddMm_y)
bpy.utils.unregister_class(AddMmz)
bpy.utils.unregister_class(AddMm_z)
#set template empty
bpy.utils.unregister_class(TempSingle)
bpy.utils.unregister_class(TempSeparate)
bpy.utils.unregister_class(TempContact)
if __name__ == "__main__":
register()
``` |
{
"source": "jimdickinson/astrapy",
"score": 2
} |
#### File: astrapy/astrapy/collections.py
```python
from astrapy.rest import http_methods
from astrapy.rest import create_client as create_astra_client
import logging
import json
logger = logging.getLogger(__name__)
DEFAULT_PAGE_SIZE = 20
DEFAULT_BASE_PATH = "/api/rest/v2/namespaces"
class AstraCollection():
def __init__(self, astra_client=None, namespace_name=None, collection_name=None):
self.astra_client = astra_client
self.namespace_name = namespace_name
self.collection_name = collection_name
self.base_path = f"{DEFAULT_BASE_PATH}/{namespace_name}/collections/{collection_name}"
def _get(self, path=None, options=None):
full_path = f"{self.base_path}/{path}" if path else self.base_path
response = self.astra_client.request(method=http_methods.GET,
path=full_path,
url_params=options)
if isinstance(response, dict):
return response["data"]
return None
def _put(self, path=None, document=None):
return self.astra_client.request(method=http_methods.PUT,
path=f"{self.base_path}/{path}",
json_data=document)
def upgrade(self):
return self.astra_client.request(method=http_methods.POST,
path=f"{self.base_path}/upgrade")
def get_schema(self):
return self.astra_client.request(method=http_methods.GET,
path=f"{self.base_path}/json-schema")
def create_schema(self, schema=None):
return self.astra_client.request(method=http_methods.PUT,
path=f"{self.base_path}/json-schema",
json_data=schema)
def update_schema(self, schema=None):
return self.astra_client.request(method=http_methods.PUT,
path=f"{self.base_path}/json-schema",
json_data=schema)
def get(self, path=None):
return self._get(path=path)
def find(self, query=None, options=None):
options = {} if options is None else options
request_params = {"where": json.dumps(
query), "page-size": DEFAULT_PAGE_SIZE}
request_params.update(options)
return self._get(path=None, options=request_params)
def find_one(self, query=None, options=None):
options = {} if options is None else options
request_params = {"where": json.dumps(query), "page-size": 1}
request_params.update(options)
response = self._get(path=None, options=request_params)
if response is not None:
keys = list(response.keys())
if(len(keys) == 0):
return None
return response[keys[0]]
return None
def create(self, path=None, document=None):
if path is not None:
return self._put(path=path, document=document)
return self.astra_client.request(method=http_methods.POST,
path=self.base_path,
json_data=document)
def update(self, path, document):
return self.astra_client.request(method=http_methods.PATCH,
path=f"{self.base_path}/{path}",
json_data=document)
def replace(self, path, document):
return self._put(path=path, document=document)
def delete(self, path):
return self.astra_client.request(method=http_methods.DELETE,
path=f"{self.base_path}/{path}")
def batch(self, documents=None, id_path=""):
if id_path == "":
id_path = "documentId"
return self.astra_client.request(method=http_methods.POST,
path=f"{self.base_path}/batch",
json_data=documents,
url_params={"id-path": id_path})
def push(self, path=None, value=None):
json_data = {"operation": "$push", "value": value}
res = self.astra_client.request(method=http_methods.POST,
path=f"{self.base_path}/{path}/function",
json_data=json_data)
return res.get("data")
def pop(self, path=None):
json_data = {"operation": "$pop"}
res = self.astra_client.request(method=http_methods.POST,
path=f"{self.base_path}/{path}/function",
json_data=json_data)
return res.get("data")
class AstraNamespace():
def __init__(self, astra_client=None, namespace_name=None):
self.astra_client = astra_client
self.namespace_name = namespace_name
self.base_path = f"{DEFAULT_BASE_PATH}/{namespace_name}"
def collection(self, collection_name):
return AstraCollection(astra_client=self.astra_client,
namespace_name=self.namespace_name,
collection_name=collection_name)
def get_collections(self):
res = self.astra_client.request(method=http_methods.GET,
path=f"{self.base_path}/collections")
return res.get("data")
def create_collection(self, name=""):
return self.astra_client.request(method=http_methods.POST,
path=f"{self.base_path}/collections",
json_data={"name": name})
def delete_collection(self, name=""):
return self.astra_client.request(method=http_methods.DELETE,
path=f"{self.base_path}/collections/{name}")
class AstraDocumentClient():
def __init__(self, astra_client=None):
self.astra_client = astra_client
def namespace(self, namespace_name):
return AstraNamespace(astra_client=self.astra_client, namespace_name=namespace_name)
def create_client(astra_database_id=None,
astra_database_region=None,
astra_application_token=None,
base_url=None,
debug=False):
astra_client = create_astra_client(astra_database_id=astra_database_id,
astra_database_region=astra_database_region,
astra_application_token=astra_application_token,
base_url=base_url)
return AstraDocumentClient(astra_client=astra_client)
```
#### File: astrapy/astrapy/rest.py
```python
import logging
import requests
logger = logging.getLogger(__name__)
REQUESTED_WITH = "AstraPy"
DEFAULT_AUTH_PATH = "/api/rest/v1/auth"
DEFAULT_TIMEOUT = 30000
DEFAULT_AUTH_HEADER = "X-Cassandra-Token"
class http_methods():
GET = "GET"
POST = "POST"
PUT = "PUT"
PATCH = "PATCH"
DELETE = "DELETE"
class AstraClient():
def __init__(self, astra_database_id=None,
astra_database_region=None,
astra_application_token=None,
base_url=None,
auth_header=None):
self.astra_database_id = astra_database_id
self.astra_database_region = astra_database_region
self.astra_application_token = astra_application_token
self.base_url = base_url
self.auth_header = DEFAULT_AUTH_HEADER
def request(self, method=http_methods.GET, path=None, json_data=None, url_params=None):
r = requests.request(method=method, url=f"{self.base_url}{path}",
params=url_params, json=json_data, timeout=DEFAULT_TIMEOUT,
headers={self.auth_header: self.astra_application_token})
try:
return r.json()
except:
return None
def create_client(astra_database_id=None,
astra_database_region=None,
astra_application_token=None,
base_url=None,
debug=False):
if base_url is None:
base_url = f"https://{astra_database_id}-{astra_database_region}.apps.astra.datastax.com"
return AstraClient(astra_database_id=astra_database_id,
astra_database_region=astra_database_region,
astra_application_token=astra_application_token,
base_url=base_url)
``` |
{
"source": "jimdickinson/stardog",
"score": 2
} |
#### File: jimdickinson/stardog/stardog_server.py
```python
import os
import cherrypy
import requests
import uuid
import json
from urllib.parse import urlparse
from cherrypy.lib.static import serve_file
from jinja2 import Environment, PackageLoader, select_autoescape
# Environment variables to connect to Astra
ASTRA_BASE_URL_ENV = "BASE_URL" # e.g. https://asdfasdfadsf-us-east1.apps.astra.datastax.com/
ASTRA_USERNAME = "ASTRA_USERNAME"
ASTRA_PASSWORD = "<PASSWORD>"
env = Environment(
loader=PackageLoader('stardog.endpoints', 'resources'),
autoescape=select_autoescape(['html', 'xml'])
)
CURRENT_DIR = os.path.dirname(os.path.abspath(__file__))
class StardogServer:
@cherrypy.expose
def static(self, name):
if name.endswith('css'):
return serve_file(os.path.join(CURRENT_DIR, 'stardog', 'endpoints', 'resources', 'static', name), content_type='text/css')
if name.endswith('svg'):
return serve_file(os.path.join(CURRENT_DIR, 'stardog', 'endpoints', 'resources', 'static', name), content_type='image/svg+xml')
@cherrypy.expose
def query(self, name):
if name == 'index.html':
template = env.get_template(name)
return template.render({'pods': [{'name': 'pods'}, {'name': 'namespaces'}, {'name': 'deployments'}]})
raise cherrypy.HTTPError(404, message="Resource Not Found")
@cherrypy.expose
def item(self, name):
if name.endswith('js'):
return serve_file(os.path.join(CURRENT_DIR, 'stardog', 'endpoints', 'resources', name), content_type='text/javascript')
template = env.get_template('item_explorer.html')
return template.render({'name': name})
class Api:
pass
def api_url(path):
baseurl = os.getenv(ASTRA_BASE_URL_ENV)
o = urlparse(baseurl)
o = o._replace(path=path)
return o.geturl()
def authenticate():
username = os.getenv(ASTRA_USERNAME)
password = os.getenv(ASTRA_PASSWORD)
url = api_url('/api/rest/v1/auth')
payload = {"username": username, "password": password}
headers = {'accept': '*/*',
'content-type': 'application/json'}
# make auth request to Astra
r = requests.post(url,
data=json.dumps(payload),
headers=headers)
# extract and return the auth token
data = json.loads(r.text)
return data["authToken"]
class Proxy:
def __init__(self):
self.token = None
@cherrypy.expose
def default(self, *args, **kwargs):
# get the correct request uri for astra
request_uri = cherrypy.request.request_line.split()[1]
request_uri_parsed = urlparse(request_uri)
api_base_parsed = urlparse(os.getenv(ASTRA_BASE_URL_ENV))
# The path of the request should be the same as the one we proxy to,
# but we need to fix the scheme and hostname/port.
apiuri_parsed = request_uri_parsed._replace(
scheme=api_base_parsed.scheme, netloc=api_base_parsed.netloc
)
apiuri = apiuri_parsed.geturl()
content = None
try:
content = cherrypy.request.body.read()
except Exception as e:
pass
api_request_headers = {}
for (header, header_value) in cherrypy.request.header_list:
if not ( header.lower() in ('remote-addr', 'host', 'user-agent', 'content-length') ):
api_request_headers[header] = header_value
# Get the appropriate requests function for this http method
req_func = getattr(requests, cherrypy.request.method.lower())
def do_api_request():
# if we have a token, use it
if self.token:
api_request_headers['x-cassandra-token'] = self.token
return req_func(apiuri, data=content, headers=api_request_headers, verify=False)
api_resp = do_api_request()
if api_resp.status_code in (401, 403, 500):
# hmmm.... didn't work... maybe we have no token or an expired one?
self.token = authenticate()
api_resp = do_api_request()
cherrypy.response.status = api_resp.status_code
for (header, header_value) in api_resp.headers.items():
if not ( header.lower() in ('content-length', 'server', 'content-encoding', 'transfer-encoding') ):
cherrypy.response.headers[header] = header_value
return api_resp.content
CHERRY_TREE_CONFIG = {
}
def setup_cherry_tree(port=8080):
# Don't show traceback as HTML to the client on error
# Run as if we're in production (so no 'debug' mode)
cherrypy.config.update({
'server.socket_host': '0.0.0.0',
'server.socket_port': port,
'environment': 'production',
'log.screen': True,
'show_tracebacks': True,
})
service = StardogServer()
service.api = Proxy()
return service
def startup_server():
# Three endpoints, defined here:
# /query (returns HTML)
# /api/executeQuery
# /api/newDocument
service = setup_cherry_tree()
print('Stardog server running on port 8080')
cherrypy.config.update({'server.socket_host': '0.0.0.0'})
cherrypy.quickstart(service, '/', CHERRY_TREE_CONFIG)
if __name__ == '__main__':
startup_server()
``` |
{
"source": "jimdn/monitor-toolkits",
"score": 3
} |
#### File: bin/qcloudsms_py/util.py
```python
from __future__ import absolute_import, division, print_function
import random
import time
import hashlib
from qcloudsms_py.httpclient import HTTPError, http_fetch, utf8
def get_random():
"""Get a random number"""
return random.randint(100000, 999999)
def get_current_time():
"""Get current time"""
return int(time.time())
def calculate_signature(appkey, rand, time, phone_numbers=None):
"""Calculate a request signature according to parameters.
:param appkey: sdk appkey
:param random: random string
:param time: unix timestamp time
:param phone_numbers: phone number array
"""
raw_text = "appkey={}&random={}&time={}".format(appkey, rand, time)
if phone_numbers:
raw_text += "&mobile={}".format(
",".join(map(str, phone_numbers)))
return hashlib.sha256(utf8(raw_text)).hexdigest()
def api_request(req):
"""Make a API request and return response.
:param req: `qcloudsms_py.httpclient.HTTPRequest` instance
"""
res = http_fetch(req)
if not res.ok():
raise HTTPError(res.code, res.reason)
return res.json()
```
#### File: monitor-toolkits/moni-exporter/svrmonitor.py
```python
from __future__ import division
import sys
import re
import os
import math
import glob
import platform
import traceback
from time import time, sleep
g_attr = {
# /proc/net/dev statistics
'eth0_in_pkg': {'type': 'gauge', 'id': 10001, 'desc': 'pkg/s'},
'eth0_out_pkg': {'type': 'gauge', 'id': 10002, 'desc': 'pkg/s'},
'eth0_in_traff': {'type': 'gauge', 'id': 10003, 'desc': 'bits/s'},
'eth0_out_traff': {'type': 'gauge', 'id': 10004, 'desc': 'bits/s'},
'eth1_in_pkg': {'type': 'gauge', 'id': 10011, 'desc': 'pkg/s'},
'eth1_out_pkg': {'type': 'gauge', 'id': 10012, 'desc': 'pkg/s'},
'eth1_in_traff': {'type': 'gauge', 'id': 10013, 'desc': 'bits/s'},
'eth1_out_traff': {'type': 'gauge', 'id': 10014, 'desc': 'bits/s'},
'bond0_in_pkg': {'type': 'gauge', 'id': 10021, 'desc': 'pkg/s'},
'bond0_out_pkg': {'type': 'gauge', 'id': 10022, 'desc': 'pkg/s'},
'bond0_in_traff': {'type': 'gauge', 'id': 10023, 'desc': 'bits/s'},
'bond0_out_traff': {'type': 'gauge', 'id': 10024, 'desc': 'bits/s'},
'bond1_in_pkg': {'type': 'gauge', 'id': 10031, 'desc': 'pkg/s'},
'bond1_out_pkg': {'type': 'gauge', 'id': 10032, 'desc': 'pkg/s'},
'bond1_in_traff': {'type': 'gauge', 'id': 10033, 'desc': 'bits/s'},
'bond1_out_traff': {'type': 'gauge', 'id': 10034, 'desc': 'bits/s'},
# CPU usage
'cpu': {'type': 'gauge', 'id': 20000, 'desc': '%'},
'cpu0': {'type': 'gauge', 'id': 20001, 'desc': '%'},
'cpu1': {'type': 'gauge', 'id': 20002, 'desc': '%'},
'cpu2': {'type': 'gauge', 'id': 20003, 'desc': '%'},
'cpu3': {'type': 'gauge', 'id': 20004, 'desc': '%'},
'cpu4': {'type': 'gauge', 'id': 20005, 'desc': '%'},
'cpu5': {'type': 'gauge', 'id': 20006, 'desc': '%'},
'cpu6': {'type': 'gauge', 'id': 20007, 'desc': '%'},
'cpu7': {'type': 'gauge', 'id': 20008, 'desc': '%'},
'cpu8': {'type': 'gauge', 'id': 20009, 'desc': '%'},
'cpu9': {'type': 'gauge', 'id': 20010, 'desc': '%'},
'cpu10': {'type': 'gauge', 'id': 20011, 'desc': '%'},
'cpu11': {'type': 'gauge', 'id': 20012, 'desc': '%'},
'cpu12': {'type': 'gauge', 'id': 20013, 'desc': '%'},
'cpu13': {'type': 'gauge', 'id': 20014, 'desc': '%'},
'cpu14': {'type': 'gauge', 'id': 20015, 'desc': '%'},
'cpu15': {'type': 'gauge', 'id': 20016, 'desc': '%'},
'cpu16': {'type': 'gauge', 'id': 20017, 'desc': '%'},
'cpu17': {'type': 'gauge', 'id': 20018, 'desc': '%'},
'cpu18': {'type': 'gauge', 'id': 20019, 'desc': '%'},
'cpu19': {'type': 'gauge', 'id': 20020, 'desc': '%'},
'cpu20': {'type': 'gauge', 'id': 20021, 'desc': '%'},
'cpu21': {'type': 'gauge', 'id': 20022, 'desc': '%'},
'cpu22': {'type': 'gauge', 'id': 20023, 'desc': '%'},
'cpu23': {'type': 'gauge', 'id': 20024, 'desc': '%'},
'cpu24': {'type': 'gauge', 'id': 20025, 'desc': '%'},
'cpu25': {'type': 'gauge', 'id': 20026, 'desc': '%'},
'cpu26': {'type': 'gauge', 'id': 20027, 'desc': '%'},
'cpu27': {'type': 'gauge', 'id': 20028, 'desc': '%'},
'cpu28': {'type': 'gauge', 'id': 20029, 'desc': '%'},
'cpu29': {'type': 'gauge', 'id': 20030, 'desc': '%'},
'cpu30': {'type': 'gauge', 'id': 20031, 'desc': '%'},
'cpu31': {'type': 'gauge', 'id': 20032, 'desc': '%'},
'cpu32': {'type': 'gauge', 'id': 20033, 'desc': '%'},
'cpu33': {'type': 'gauge', 'id': 20034, 'desc': '%'},
'cpu34': {'type': 'gauge', 'id': 20035, 'desc': '%'},
'cpu35': {'type': 'gauge', 'id': 20036, 'desc': '%'},
'cpu36': {'type': 'gauge', 'id': 20037, 'desc': '%'},
'cpu37': {'type': 'gauge', 'id': 20038, 'desc': '%'},
'cpu38': {'type': 'gauge', 'id': 20039, 'desc': '%'},
'cpu39': {'type': 'gauge', 'id': 20040, 'desc': '%'},
'cpu40': {'type': 'gauge', 'id': 20041, 'desc': '%'},
'cpu41': {'type': 'gauge', 'id': 20042, 'desc': '%'},
'cpu42': {'type': 'gauge', 'id': 20043, 'desc': '%'},
'cpu43': {'type': 'gauge', 'id': 20044, 'desc': '%'},
'cpu44': {'type': 'gauge', 'id': 20045, 'desc': '%'},
'cpu45': {'type': 'gauge', 'id': 20046, 'desc': '%'},
'cpu46': {'type': 'gauge', 'id': 20047, 'desc': '%'},
'cpu47': {'type': 'gauge', 'id': 20048, 'desc': '%'},
# memory usage
'mem_total': {'type': 'gauge', 'id': 30000, 'desc': 'Bytes'},
'mem_used': {'type': 'gauge', 'id': 30001, 'desc': 'Bytes'},
'mem_free': {'type': 'gauge', 'id': 30002, 'desc': 'Bytes'},
'shm_num': {'type': 'gauge', 'id': 30003, 'desc': '-'},
'shm_use': {'type': 'gauge', 'id': 30004, 'desc': '-'},
'dev_shm_size': {'type': 'gauge', 'id': 30005, 'desc': 'Bytes'},
'dev_shm_use': {'type': 'gauge', 'id': 30006, 'desc': 'Bytes'},
# swap
'swap_total': {'type': 'gauge', 'id': 31001, 'desc': 'Bytes'},
'swap_free': {'type': 'gauge', 'id': 31002, 'desc': 'Bytes'},
'swap_in': {'type': 'gauge', 'id': 31003, 'desc': 'Bytes/s'},
'swap_out': {'type': 'gauge', 'id': 31004, 'desc': 'Bytes/s'},
# hard disk usage
'/': {'type': 'gauge', 'id': 40001, 'desc': '%'},
'/usr/local': {'type': 'gauge', 'id': 40002, 'desc': '%'},
'/data': {'type': 'gauge', 'id': 40003, 'desc': '%'},
'/data1': {'type': 'gauge', 'id': 40004, 'desc': '%'},
'/data2': {'type': 'gauge', 'id': 40005, 'desc': '%'},
'/data3': {'type': 'gauge', 'id': 40006, 'desc': '%'},
'/data4': {'type': 'gauge', 'id': 40007, 'desc': '%'},
'/data5': {'type': 'gauge', 'id': 40008, 'desc': '%'},
'/data6': {'type': 'gauge', 'id': 40009, 'desc': '%'},
'/data7': {'type': 'gauge', 'id': 40010, 'desc': '%'},
'/data8': {'type': 'gauge', 'id': 40011, 'desc': '%'},
'/ssd/data': {'type': 'gauge', 'id': 40012, 'desc': '%'},
'/ssd/data1': {'type': 'gauge', 'id': 40013, 'desc': '%'},
'/ssd/data2': {'type': 'gauge', 'id': 40014, 'desc': '%'},
'/ssd/data3': {'type': 'gauge', 'id': 40015, 'desc': '%'},
'/ssd/data4': {'type': 'gauge', 'id': 40016, 'desc': '%'},
'/ssd/data5': {'type': 'gauge', 'id': 40017, 'desc': '%'},
'/ssd/data6': {'type': 'gauge', 'id': 40018, 'desc': '%'},
'/ssd/data7': {'type': 'gauge', 'id': 40019, 'desc': '%'},
'/ssd/data8': {'type': 'gauge', 'id': 40020, 'desc': '%'},
# hard disk io usage
'sda_rio': {'type': 'gauge', 'id': 41001, 'desc': 'sda每秒读请求(次/秒)'},
'sda_wio': {'type': 'gauge', 'id': 41002, 'desc': 'sda每秒写请求(次/秒)'},
'sda_rsect': {'type': 'gauge', 'id': 41003, 'desc': 'sda磁盘io读(KB/s)'},
'sda_wsect': {'type': 'gauge', 'id': 41004, 'desc': 'sda磁盘io写(KB/s)'},
'sda_await': {'type': 'gauge', 'id': 41005, 'desc': 'sda平均每次I/O操作的等待时间(微秒)'},
'sda_svctm': {'type': 'gauge', 'id': 41006, 'desc': 'sda平均每次I/O操作的服务时间(微秒)'},
'sdb_rio': {'type': 'gauge', 'id': 41011, 'desc': 'sdb每秒读请求(次/秒)'},
'sdb_wio': {'type': 'gauge', 'id': 41012, 'desc': 'sdb每秒写请求(次/秒)'},
'sdb_rsect': {'type': 'gauge', 'id': 41013, 'desc': 'sdb磁盘io读(KB/s)'},
'sdb_wsect': {'type': 'gauge', 'id': 41014, 'desc': 'sdb磁盘io写(KB/s)'},
'sdb_await': {'type': 'gauge', 'id': 41015, 'desc': 'sdb平均每次I/O操作的等待时间(微秒)'},
'sdb_svctm': {'type': 'gauge', 'id': 41016, 'desc': 'sdb平均每次I/O操作的服务时间(微秒)'},
'sdc_rio': {'type': 'gauge', 'id': 41021, 'desc': 'sdc每秒读请求(次/秒)'},
'sdc_wio': {'type': 'gauge', 'id': 41022, 'desc': 'sdc每秒写请求(次/秒)'},
'sdc_rsect': {'type': 'gauge', 'id': 41023, 'desc': 'sdc磁盘io读(KB/s)'},
'sdc_wsect': {'type': 'gauge', 'id': 41024, 'desc': 'sdc磁盘io写(KB/s)'},
'sdc_await': {'type': 'gauge', 'id': 41025, 'desc': 'sdc平均每次I/O操作的等待时间(微秒)'},
'sdc_svctm': {'type': 'gauge', 'id': 41026, 'desc': 'sdc平均每次I/O操作的服务时间(微秒)'},
'sdd_rio': {'type': 'gauge', 'id': 41031, 'desc': 'sdd每秒读请求(次/秒)'},
'sdd_wio': {'type': 'gauge', 'id': 41032, 'desc': 'sdd每秒写请求(次/秒)'},
'sdd_rsect': {'type': 'gauge', 'id': 41033, 'desc': 'sdd磁盘io读(KB/s)'},
'sdd_wsect': {'type': 'gauge', 'id': 41034, 'desc': 'sdd磁盘io写(KB/s)'},
'sdd_await': {'type': 'gauge', 'id': 41035, 'desc': 'sdd平均每次I/O操作的等待时间(微秒)'},
'sdd_svctm': {'type': 'gauge', 'id': 41036, 'desc': 'sdd平均每次I/O操作的服务时间(微秒)'},
'sde_rio': {'type': 'gauge', 'id': 41041, 'desc': 'sda每秒读请求(次/秒)'},
'sde_wio': {'type': 'gauge', 'id': 41042, 'desc': 'sda每秒写请求(次/秒)'},
'sde_rsect': {'type': 'gauge', 'id': 41043, 'desc': 'sda磁盘io读(KB/s)'},
'sde_wsect': {'type': 'gauge', 'id': 41044, 'desc': 'sda磁盘io写(KB/s)'},
'sde_await': {'type': 'gauge', 'id': 41045, 'desc': 'sda平均每次I/O操作的等待时间(微秒)'},
'sde_svctm': {'type': 'gauge', 'id': 41046, 'desc': 'sda平均每次I/O操作的服务时间(微秒)'},
'sdf_rio': {'type': 'gauge', 'id': 41051, 'desc': 'sda每秒读请求(次/秒)'},
'sdf_wio': {'type': 'gauge', 'id': 41052, 'desc': 'sda每秒写请求(次/秒)'},
'sdf_rsect': {'type': 'gauge', 'id': 41053, 'desc': 'sda磁盘io读(KB/s)'},
'sdf_wsect': {'type': 'gauge', 'id': 41054, 'desc': 'sda磁盘io写(KB/s)'},
'sdf_await': {'type': 'gauge', 'id': 41055, 'desc': 'sda平均每次I/O操作的等待时间(微秒)'},
'sdf_svctm': {'type': 'gauge', 'id': 41056, 'desc': 'sda平均每次I/O操作的服务时间(微秒)'},
'sdg_rio': {'type': 'gauge', 'id': 41061, 'desc': 'sda每秒读请求(次/秒)'},
'sdg_wio': {'type': 'gauge', 'id': 41062, 'desc': 'sda每秒写请求(次/秒)'},
'sdg_rsect': {'type': 'gauge', 'id': 41063, 'desc': 'sda磁盘io读(KB/s)'},
'sdg_wsect': {'type': 'gauge', 'id': 41064, 'desc': 'sda磁盘io写(KB/s)'},
'sdg_await': {'type': 'gauge', 'id': 41065, 'desc': 'sda平均每次I/O操作的等待时间(微秒)'},
'sdg_svctm': {'type': 'gauge', 'id': 41066, 'desc': 'sda平均每次I/O操作的服务时间(微秒)'},
'sdh_rio': {'type': 'gauge', 'id': 41071, 'desc': 'sda每秒读请求(次/秒)'},
'sdh_wio': {'type': 'gauge', 'id': 41072, 'desc': 'sda每秒写请求(次/秒)'},
'sdh_rsect': {'type': 'gauge', 'id': 41073, 'desc': 'sda磁盘io读(KB/s)'},
'sdh_wsect': {'type': 'gauge', 'id': 41074, 'desc': 'sda磁盘io写(KB/s)'},
'sdh_await': {'type': 'gauge', 'id': 41075, 'desc': 'sda平均每次I/O操作的等待时间(微秒)'},
'sdh_svctm': {'type': 'gauge', 'id': 41076, 'desc': 'sda平均每次I/O操作的服务时间(微秒)'},
# Ip statistics
'Ip_InReceives': {'type': 'gauge', 'id': 50001, 'desc': 'IP统计-入包总数(pkg/m)'},
'Ip_InHdrErrors': {'type': 'gauge', 'id': 50002, 'desc': 'IP统计-入包头错误(pkg/m)'},
'Ip_InDiscards': {'type': 'gauge', 'id': 50003, 'desc': 'IP统计-入包丢包(pkg/m)'},
'Ip_InDelivers': {'type': 'gauge', 'id': 50004, 'desc': 'IP统计-入包送达上层协议(pkg/m)'},
'Ip_OutRequests': {'type': 'gauge', 'id': 50005, 'desc': 'IP统计-出包总数(pkg/m)'},
'Ip_OutDiscards': {'type': 'gauge', 'id': 50006, 'desc': 'IP统计-出包丢包(pkg/m)'},
'Ip_ReasmTimeout': {'type': 'gauge', 'id': 50007, 'desc': 'IP统计-分片重组超时(每分钟)'},
'Ip_ReasmReqds': {'type': 'gauge', 'id': 50008, 'desc': 'IP统计-入包需重组(每分钟)'},
'Ip_ReasmOKs': {'type': 'gauge', 'id': 50009, 'desc': 'IP统计-分片重组成功(每分钟)'},
'Ip_ReasmFails': {'type': 'gauge', 'id': 50010, 'desc': 'IP统计-分片重组失败(每分钟)'},
'Ip_FragOKs': {'type': 'gauge', 'id': 50011, 'desc': 'IP统计-分片成功(每分钟)'},
'Ip_FragFails': {'type': 'gauge', 'id': 50012, 'desc': 'IP统计-分片失败(每分钟)'},
'Ip_FragCreates': {'type': 'gauge', 'id': 50013, 'desc': 'IP统计-创建分片数(每分钟)'},
# Tcp statistics
'Tcp_InSegs': {'type': 'gauge', 'id': 51001, 'desc': 'TCP统计-TCP Received(pkg/s)'},
'Tcp_OutSegs': {'type': 'gauge', 'id': 51002, 'desc': 'TCP统计-TCP Sent(pkg/s)'},
'Tcp_CurrEstab': {'type': 'gauge', 'id': 51003, 'desc': 'TCP统计-TCP当前连接数'},
'Tcp_NewEstab': {'type': 'gauge', 'id': 51004, 'desc': 'TCP统计-TCP连接变化数(新增or减少)'},
'Tcp_ActiveOpens': {'type': 'gauge', 'id': 51005, 'desc': 'TCP统计-服务器主动连接的TCP数(每分钟)'},
'Tcp_PassiveOpens': {'type': 'gauge', 'id': 51006, 'desc': 'TCP统计-服务器接收TCP连接数(每分钟)'},
'Tcp_AttemptFails': {'type': 'gauge', 'id': 51007, 'desc': 'TCP统计-TCP连接建立时被对方重置(每分钟)'},
'Tcp_RetransSegs': {'type': 'gauge', 'id': 51008, 'desc': 'TCP统计-TCP报文重传数(pkg/m)'},
'Tcp_RetransRatio': {'type': 'gauge', 'id': 51009, 'desc': 'TCP统计-TCP重传率(%,当前分钟)'},
'Tcp_InErrs': {'type': 'gauge', 'id': 51010, 'desc': 'TCP统计-TCP入包错误(pkg/m,通常是校验错误)'},
'Tcp_TcpInCsumErrors': {'type': 'gauge', 'id': 51011, 'desc': 'TCP统计-TCP入包校验错误(pkg/m)'},
"Tcp_OutRsts": {'type': 'gauge', 'id': 51012, 'desc': 'TCP统计-TCP发送重置包(pkg/m)'},
"Tcp_EstabResets": {'type': 'gauge', 'id': 51013, 'desc': 'TCP统计-TCP已建立的连接被重置(每分钟)'},
'TcpExt_ListenOverflows': {'type': 'gauge', 'id': 51014, 'desc': 'TCP统计-TCP监听队列溢出(每分钟)'},
'TcpExt_TCPTimeouts': {'type': 'gauge', 'id': 51015, 'desc': 'TCP统计-TCP超时(每分钟)'},
# Udp statistics
'Udp_InDatagrams': {'type': 'gauge', 'id': 52001, 'desc': 'UDP统计-UDP Received(pkg/s)'},
'Udp_OutDatagrams': {'type': 'gauge', 'id': 52002, 'desc': 'UDP统计-UDP Sent(pkg/s)'},
'Udp_InErrors': {'type': 'gauge', 'id': 52003, 'desc': 'UDP统计-UDP InErrors(pkg/m)'},
'Udp_NoPorts': {'type': 'gauge', 'id': 52004, 'desc': 'UDP统计-UDP NoPorts(pkg/m)'},
'Udp_InCsumErrors': {'type': 'gauge', 'id': 52005, 'desc': 'UDP统计-UDP InCsumErrors(pkg/m)'},
'Udp_RcvbufErrors': {'type': 'gauge', 'id': 52006, 'desc': 'UDP统计-UDP RcvbufErrors(pkg/m)'},
'Udp_SndbufErrors': {'type': 'gauge', 'id': 52007, 'desc': 'UDP统计-UDP SndbufErrors(pkg/m)'},
# Icmp statistic
'Icmp_InDestUnreachs': {'type': 'gauge', 'id': 53001, 'desc': 'ICMP统计-收到目标不可达消息(pkg/m)'},
'Icmp_OutDestUnreachs': {'type': 'gauge', 'id': 53002, 'desc': 'ICMP统计-发送目标不可达消息(pkg/m)'},
# File descriptor statistics
'fd_used': {'type': 'gauge', 'id': 60001, 'desc': '文件句柄-已分配使用数'},
'fd_unuse': {'type': 'gauge', 'id': 60002, 'desc': '文件句柄-已分配未使用数'},
'fd_max': {'type': 'gauge', 'id': 60003, 'desc': '文件句柄-系统最大数'},
# Process information
'total_process': {'type': 'gauge', 'id': 70001, 'desc': '进程统计-总进程数'},
'procs_running': {'type': 'gauge', 'id': 70002, 'desc': '进程统计-可运行进程数'},
'procs_blocked': {'type': 'gauge', 'id': 70003, 'desc': '进程统计-阻塞中进程数'},
'new_process': {'type': 'gauge', 'id': 70004, 'desc': '进程统计-新创建进程数(每分钟)'},
}
class BaseProcessor(object):
"""
Base Processor for collecting and reporting data.
All specific instance should inherit from this class.
"""
def __init__(self, interval):
# time to sleep (seconds)
# result1 is the result before sleep
# result2 is the result after sleep
self.interval = interval
self.result1 = {}
self.result2 = {}
def collect(self):
"""
Implemented by subclasses
There are 2 kinds of return value:
result = {'key1': v1, 'key2': v2, ...}
result = {'key1': [v1, v2, ...], 'key2': [v1, v2, ...], ...}
"""
return {}
def process(self):
"""
If interval is zero, return instantaneous value
If interval is not zero, return increasing value
There are 2 kinds of return value:
result = {'key1': v1, 'key2': v2, ...}
result = {'key1': [v1, v2, ...], 'key2': [v1, v2, ...], ...}
"""
result = self.collect()
self.result1 = result.copy()
if self.interval > 0:
sleep(self.interval)
self.result2 = self.collect()
if type(self.result1) == dict and type(self.result2) == dict and len(self.result1) > 0 and len(self.result2) > 0:
for key in self.result2.keys():
if type(self.result2[key]) == list:
for i in range(len(self.result2[key])):
try:
tmp = int(self.result2[key][i]) - int(self.result1[key][i])
except Exception:
print(traceback.format_exc())
tmp = 0
if tmp < 0:
result[key][i] = tmp + 4294967296
else:
result[key][i] = tmp
else:
try:
tmp = int(self.result2[key]) - int(self.result1[key])
except Exception:
print(traceback.format_exc())
tmp = 0
if tmp < 0:
result[key] = tmp + 4294967296
else:
result[key] = tmp
return result
def report(self):
"""
Report to shm
"""
global g_attr
result = self.process()
if len(result) > 0:
for key in result.keys():
if key not in g_attr:
continue
typ = g_attr[key]["type"]
attrid = g_attr[key]["id"]
value = result[key]
if typ == "counter":
tool = "%s/%s" % (os.path.split(os.path.realpath(__file__))[0], 'rpt-counter')
cmd = "%s %s %d" % (tool, str(attrid).strip(), value)
os.system(cmd)
# print("(%s) %s" % (key, cmd))
elif typ == "gauge":
tool = "%s/%s" % (os.path.split(os.path.realpath(__file__))[0], 'rpt-gauge')
cmd = "%s %s %d" % (tool, str(attrid).strip(), value)
os.system(cmd)
# print("(%s) %s" % (key, cmd))
class NetTraffic(BaseProcessor):
"""
Get network traffic information
Calculate by: /proc/net/dev
"""
def collect(self):
fd = open("/proc/net/dev")
sep = re.compile(r'[:\s]+')
traffic_dict = {}
for line in fd:
# skip header line
if ":" not in line: continue
fields = sep.split(line.strip())
intf = fields.pop(0)
traffic_dict[intf + "_in_traff"] = int(fields[0])
traffic_dict[intf + "_in_pkg"] = int(fields[1])
traffic_dict[intf + "_out_traff"] = int(fields[8+0])
traffic_dict[intf + "_out_pkg"] = int(fields[8+1])
fd.close()
return traffic_dict
def process(self):
result = {}
base_result = super(NetTraffic, self).process()
for key in base_result.keys():
try:
if "traff" in key:
# traffic ( bits/s ) = bytes * 8 / 60
result[key] = int(math.ceil(int(base_result[key]) * 8 / self.interval))
elif "pkg" in key:
# traffic ( pkg/s ) = pkg / 60
result[key] = int(math.ceil(int(base_result[key]) / self.interval))
except Exception:
print(traceback.format_exc())
result[key] = 0
return result
class CpuUsage(BaseProcessor):
"""
Get each CPU usage information
Calculate by: /proc/stat
"""
def collect(self):
fd = open('/proc/stat')
cpus_info_list = [ l for l in fd.readlines() if l.startswith('cpu') ]
fd.close()
cpus_use_dict = {}
for line in cpus_info_list:
cpu_list = line.split()
cpus_use_dict[cpu_list[0]] = cpu_list[1:]
return cpus_use_dict
def process(self):
result = {}
base_result = super(CpuUsage, self).process()
for key in base_result.keys():
try:
total = 0.0
for item in base_result[key]:
total += float(item)
# CPU Usage = 100 * (total - idle)/toal
result[key] = int(math.ceil(100 * (total - base_result[key][3] - base_result[key][4]) / total))
except Exception:
print(traceback.format_exc())
result[key] = 0
return result
class MemUsage(BaseProcessor):
'''
Get memory usage information
Calculate by: /proc/meminfo
'''
def collect(self):
fd = open("/proc/meminfo")
mem_info_list = fd.readlines()
fd.close()
mem_use_dict = {}
for line in mem_info_list:
tmp = line.split(":")
try:
mem_use_dict[tmp[0]] = int(tmp[1].split()[0])
except Exception:
print(traceback.format_exc())
mem_use_dict[tmp[0]] = 0
return mem_use_dict
def process(self):
result = {}
base_result = super(MemUsage, self).process()
if 'Mapped' in base_result:
result['mem_free'] = int((base_result['MemFree'] + base_result['Cached'] - base_result['Dirty'] - base_result['Mapped'])) * 1024
else:
result['mem_free'] = int(base_result['MemFree']) * 1024
result['mem_total'] = int(base_result['MemTotal']) * 1024
result['mem_used'] = int(base_result['MemTotal']) * 1024 - result['mem_free']
result['swap_total'] = int(base_result['SwapTotal']) * 1024
result['swap_free'] = int(base_result['SwapFree']) * 1024
return result
class ShmUsage(BaseProcessor):
"""
Get shm usage information
"""
def collect(self):
cmd = "ipcs -mb | grep -E '^0x'"
fd = os.popen(cmd)
shm_list = fd.readlines()
fd.close()
shm_use_dict = {}
shm_use_dict["shm_num"] = int(len(shm_list))
use_bytes = 0
for line in shm_list:
tmp = line.split()
use_bytes += int(tmp[4])
shm_use_dict["shm_use"] = use_bytes
return shm_use_dict
class PosixShmUsage(BaseProcessor):
"""
Get posix shm usage information
Calculate by: df -k
"""
def collect(self):
cmd = "df -k | grep /dev/shm"
fd = os.popen(cmd)
shm_list = fd.read().strip().split()
fd.close()
posix_shm_dict = {}
posix_shm_dict['dev_shm_size'] = int(shm_list[1]) * 1024
posix_shm_dict['dev_shm_use'] = int(shm_list[2]) * 1024
return posix_shm_dict
class SwapUsage(BaseProcessor):
"""
Get swap in and out amount
Calculate by: /proc/vmstat
"""
def collect(self):
cmd = "grep -E '^(pswpin|pswpout)' /proc/vmstat"
fd = os.popen(cmd)
swap_list = fd.readlines()
fd.close()
swap_dict = {}
for line in swap_list:
tmp = line.split()
try:
swap_dict[tmp[0]] = int(tmp[1])
except Exception:
print(traceback.format_exc())
swap_dict[tmp[0]] = 0
return swap_dict
def process(self):
result = {}
base_result = super(SwapUsage, self).process()
result['swap_in'] = int(base_result['pswpin'] * 1024 / self.interval)
result['swap_out'] = int(base_result['pswpout'] * 1024 / self.interval)
return result
class HdUsage(BaseProcessor):
"""
Get Hard disk use percentage
"""
def collect(sef):
cmd = 'df -k'
fd = os.popen(cmd)
re_obj = re.compile(r'^/dev/.+\s+(?P<used>\d+)%\s+(?P<mount>.+)')
hd_use_dict = {}
for line in fd:
match = re_obj.search(line)
if match is not None:
hd_use_dict[match.groupdict()['mount']] = int(match.groupdict()['used'])
fd.close()
return hd_use_dict
class HdIoRatio(BaseProcessor):
"""
Get hard disk IO usage
"""
def collect(self):
fd = open('/proc/diskstats')
disk_io_list = fd.readlines()
fd.close()
disk_io_dict = {}
if len(disk_io_list) > 0:
for line in disk_io_list:
io_list = line.split()
disk_io_dict[io_list[2]] = io_list[3:]
return disk_io_dict
def process(self):
hd_io_ratio_dict = {}
base_result = super(HdIoRatio, self).process()
for key in base_result.keys():
hd_io_ratio_dict[key + '_rio'] = int(base_result[key][0] / self.interval)
hd_io_ratio_dict[key + '_wio'] = int(base_result[key][4] / self.interval)
# each sector is 512 bytes
hd_io_ratio_dict[key + '_rsect'] = int(base_result[key][2] / self.interval / 2)
hd_io_ratio_dict[key + '_wsect'] = int(base_result[key][6] / self.interval / 2)
rw_num = base_result[key][0] + base_result[key][4]
if rw_num == 0:
hd_io_ratio_dict[key + '_await'] = 0
hd_io_ratio_dict[key + '_svctm'] = 0
else:
hd_io_ratio_dict[key + '_await'] = int((base_result[key][3] + base_result[key][7]) * 1000 / rw_num)
hd_io_ratio_dict[key + '_svctm'] = int(base_result[key][9] * 1000 / rw_num)
return hd_io_ratio_dict
class NetSnmpIpTcpUdp(BaseProcessor):
"""
IP statistics
Calculate by: /proc/net/snmp & /proc/net/netstat
"""
def collect(self):
fd1 = open("/proc/net/snmp")
fd2 = open("/proc/net/netstat")
lines = fd1.readlines()
lines.extend(fd2.readlines())
fd1.close()
fd2.close()
snmp_dict = {}
sep = re.compile(r'[:\s]+')
n = 0
for line in lines:
n += 1
fields = sep.split(line.strip())
proto = fields.pop(0)
if n % 2 == 1:
# header line
keys = []
for field in fields:
key = "%s_%s" % (proto, field)
keys.append(key)
else:
# value line
try:
values = [int(f) for f in fields]
except Exception as e:
print(e)
kv = dict(zip(keys, values))
snmp_dict.update(kv)
return snmp_dict
def process(self):
result = super(NetSnmpIpTcpUdp, self).process()
# RetransRatio during this interval time
if result['Tcp_OutSegs'] == 0:
result['Tcp_RetransRatio'] = 0
else:
result['Tcp_RetransRatio'] = int(result['Tcp_RetransSegs'] * 100 / result['Tcp_OutSegs'])
result['Tcp_NewEstab'] = result.pop('Tcp_CurrEstab', 0)
if result['Tcp_NewEstab'] > 2147483648:
result['Tcp_NewEstab'] = abs(result['Tcp_NewEstab'] - 4294967296)
# CurrEstab is a tmp value, not inc/dec value
result['Tcp_CurrEstab'] = self.result1['Tcp_CurrEstab']
result['Tcp_InSegs'] = int(result['Tcp_InSegs'] / self.interval)
result['Tcp_OutSegs'] = int(result['Tcp_OutSegs'] / self.interval)
result['Udp_InDatagrams'] = int(result['Udp_InDatagrams'] / self.interval)
result['Udp_OutDatagrams'] = int(result['Udp_OutDatagrams'] / self.interval)
return result
class FdUsage(BaseProcessor):
"""
Get file descriptor amount
"""
def collect(self):
tmp = open("/proc/sys/fs/file-nr").read().strip().split()
fd_dict = {}
fd_dict['fd_used'] = int(tmp[0])
fd_dict['fd_unuse'] = int(tmp[1])
fd_dict['fd_max'] = int(tmp[2])
return fd_dict
class ProcessInfo(BaseProcessor):
"""
Get process information
"""
def collect(self):
stat_dict = {}
proc_dict = {}
fd = open('/proc/stat')
for line in fd:
key, value = line.strip().split(None, 1)
stat_dict[key] = value
fd.close()
keys = ('processes', 'procs_running','procs_blocked')
for k in keys:
proc_dict[k] = int(stat_dict[k])
return proc_dict
def process(self):
result = super(ProcessInfo, self).process()
result['total_process'] = len(glob.glob('/proc/*/stat'))
result['procs_running'] = self.result2['procs_running']
result['procs_blocked'] = self.result2['procs_blocked']
result['new_process'] = result.pop('processes', 0)
return result
if __name__ == "__main__":
print(platform.python_version())
start_time = time()
print("Start Time: %s" % start_time)
# key: function class
# value: sleep time (seconds)
jobs = {
NetTraffic: 60,
CpuUsage: 5,
MemUsage: 0,
ShmUsage: 0,
PosixShmUsage: 0,
SwapUsage: 60,
HdUsage: 0,
HdIoRatio: 60,
NetSnmpIpTcpUdp: 60,
FdUsage: 0,
ProcessInfo: 60,
}
child_pid_list = []
for key in jobs.keys():
try:
pid = os.fork()
except OSError:
sys.exit("Unable to create child process!")
if pid == 0:
instance = key(jobs[key])
instance.report()
sys.exit(0)
else:
child_pid_list.append(pid)
for pid in child_pid_list:
os.wait()
# calculate run time
end_time = time()
run_time = (int(end_time * 10) - int(start_time * 10)) / 10
print("End Time: %s" % end_time)
print("Cost Time: %ss" % run_time)
``` |
{
"source": "Jimdo/ansible-role-fastly",
"score": 2
} |
#### File: ansible-role-fastly/tests/test_fastly_logging_s3.py
```python
import os
import unittest
import sys
from test_common import TestCommon
sys.path.append(os.path.join(os.path.dirname(__file__), '..', 'library'))
from fastly_service import FastlyConfiguration
class TestFastlyLoggingS3(TestCommon):
@TestCommon.vcr.use_cassette()
def test_fastly_s3s(self):
s3s_configuration = self.minimal_configuration.copy()
s3s_configuration.update({
's3s': [{
'name' : 'test_s3',
'domain' : self.FASTLY_TEST_DOMAIN,
'secret_key' : 'SECRET',
'period' : 60,
'bucket_name' : 'prod-fastly-logs',
'timestamp_format' : '%Y-%m-%dT%H:%M:%S.000',
'redundancy' : 'standard',
'access_key' : 'ACCESS_KEY',
'format' : '%{%Y-%m-%dT%H:%S.000}t %h "%r" %>s %b',
}],
})
configuration = FastlyConfiguration(s3s_configuration)
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
svc_conf = service.active_version.configuration
self.assertEqual(svc_conf.s3s[0].name, 'test_s3')
self.assertEqual(svc_conf.s3s[0].domain, self.FASTLY_TEST_DOMAIN)
self.assertEqual(svc_conf.s3s[0].secret_key, 'SECRET')
self.assertEqual(svc_conf.s3s[0].period, 60)
self.assertEqual(svc_conf.s3s[0].bucket_name, 'prod-fastly-logs')
self.assertEqual(svc_conf.s3s[0].timestamp_format, '%Y-%m-%dT%H:%M:%S.000')
self.assertEqual(svc_conf.s3s[0].redundancy, 'standard')
self.assertEqual(svc_conf.s3s[0].access_key, 'ACCESS_KEY')
self.assertEqual(svc_conf.s3s[0].format, '%{%Y-%m-%dT%H:%S.000}t %h "%r" %>s %b')
self.assertEqual(svc_conf, configuration)
active_version_number = service.active_version.number
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
self.assertEqual(service.active_version.number, active_version_number)
@TestCommon.vcr.use_cassette()
def test_fastly_s3s_remove(self):
s3s_configuration = self.minimal_configuration.copy()
s3s_configuration.update({
's3': [{
'name' : 'test_s3',
}],
})
configuration = FastlyConfiguration(s3s_configuration)
# Configure S3 logging
self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, configuration).service
# Now apply a configuration without S3 logging
service = self.enforcer.apply_configuration(self.FASTLY_TEST_SERVICE, FastlyConfiguration(self.minimal_configuration.copy())).service
svc_conf = service.active_version.configuration
self.assertEqual(svc_conf.s3s, [])
if __name__ == '__main__':
unittest.main()
``` |
{
"source": "jim/documenters-aggregator",
"score": 3
} |
#### File: city_scrapers/spiders/wayne_cow.py
```python
from datetime import datetime
from urllib.parse import urljoin
from city_scrapers.spider import Spider
class Wayne_cowSpider(Spider):
name = 'wayne_cow'
agency_id = 'Detroit Committee of the Whole'
timezone = 'America/Detroit'
long_name = 'Wayne County Committee of the whole'
allowed_domains = ['www.waynecounty.com']
start_urls = ['https://www.waynecounty.com/elected/commission/committee-of-the-whole.aspx']
# Calendar shows only meetings in current year.
yearStr = datetime.now().year
def parse(self, response):
"""
`parse` should always `yield` a dict that follows the Event Schema
<https://city-bureau.github.io/city-scrapers/06_event_schema.html>.
Change the `_parse_id`, `_parse_name`, etc methods to fit your scraping
needs.
"""
entries = response.xpath('//tbody/tr')
for item in entries:
data = {
'_type': 'event',
'name': 'Committee of the Whole',
'event_description': self._parse_description(item),
'classification': 'Committee',
'start': self._parse_start(item),
'end': {'date': None, 'time': None, 'note': ''},
'all_day': False,
'location': self._parse_location(),
'documents': self._parse_documents(item, response.url),
'sources': [{'url': response.url, 'note': ''}]
}
data['id'] = self._generate_id(data)
data['status'] = self._generate_status(data, '')
yield data
@staticmethod
def _parse_documents(item, base_url):
url = item.xpath('td/a/@href').extract_first()
url = urljoin(base_url, url) if url is not None else ''
if url != '':
note = item.xpath('td/a/text()').extract_first()
note = note.lower() if note is not None else ''
return [{
'url': url,
'note': note
}]
return []
@staticmethod
def _parse_description(response):
"""
Event description taken from static text at top of page.
"""
desc_xpath = '//h2[contains(text(), "Committee of the Whole")]/following-sibling::div/section/text()'
desc = response.xpath(desc_xpath).extract_first()
return desc
def _parse_start(self, item):
"""
Parse start date and time.
"""
md_str = item.xpath('.//td[2]/text()').extract_first()
time_str = item.xpath('.//td[3]/text()').extract_first()
dt_str = '{0}, {1} - {2}'.format(md_str, self.yearStr, time_str)
try:
dt = datetime.strptime(dt_str, '%B %d, %Y - %I:%M %p')
except ValueError:
return {
'date': None,
'time': None,
'note': '',
}
else:
return {
'date': dt.date(),
'time': dt.time(),
'note': '',
}
@staticmethod
def _parse_location():
"""
Location hardcoded. Text on the URL claims meetings are all held at
the same location.
"""
return {
'name': '7th floor meeting room, Guardian Building',
'address': '500 Griswold St, Detroit, MI 48226',
'neighborhood': '',
}
```
#### File: documenters-aggregator/deploy/aws_setup.py
```python
import os
import json
import boto3
from os import listdir, environ, path
from os.path import isfile, join
from zipfile import ZipFile
DEPLOY_TAG = 'latest' # datetime.now().strftime("%Y%m%d%H%M")
ECS_URI = environ.get('ECS_REPOSITORY_URI')
BATCH_JOB_ROLE = 'city-scrapers-batch-job-role'
SPIDER_PATH = join(
path.dirname(path.dirname(path.abspath(__file__))),
'city_scrapers',
'spiders'
)
ENV_VARS = [
'SCRAPY_SETTINGS_MODULE',
'AIRTABLE_API_KEY',
'CITY_SCRAPERS_AIRTABLE_BASE_KEY',
'CITY_SCRAPERS_AIRTABLE_DATA_TABLE',
'CITY_SCRAPERS_AIRTABLE_GEOCODE_TABLE',
'CITY_SCRAPERS_GOOGLE_API_KEY',
'SENTRY_DSN',
'MAPZEN_API_KEY'
]
batch = boto3.client('batch')
iam = boto3.resource('iam')
lambda_client = boto3.client('lambda')
spider_names = [
path.splitext(f)[0]
for f in listdir(SPIDER_PATH)
if isfile(join(SPIDER_PATH, f)) and f != '__init__.py'
]
def create_job_definitions():
"""
Register all job definitions.
"""
active_job_defs = batch.describe_job_definitions(status='ACTIVE')['jobDefinitions']
print('deregistering all current job definitions')
for job in active_job_defs:
batch.deregister_job_definition(jobDefinition=job['jobDefinitionArn'])
future_job_defs = spider_names
job_role_arn = iam.Role(BATCH_JOB_ROLE).arn
for job_def in future_job_defs:
print('creating job def {}'.format(job_def))
batch.register_job_definition(
jobDefinitionName=job_def,
type='container',
containerProperties={
'image': '{0}:{1}'.format(ECS_URI, DEPLOY_TAG),
'vcpus': 1,
'memory': 768,
'command': ['scrapy', 'crawl', job_def],
'jobRoleArn': job_role_arn,
'environment': [{'name': v, 'value': environ.get(v)} for v in ENV_VARS],
'readonlyRootFilesystem': False,
'privileged': False,
},
retryStrategy={'attempts': 3}
)
def update_lambda_function(name):
with ZipFile('{}.zip'.format(name), 'w') as zf:
for f in listdir(join(path.dirname(__file__), name)):
zf.write(join(path.dirname(__file__), name, f), path.basename(f))
with open('{}.zip'.format(name), 'rb') as zf:
zip_buffer = zf.read()
os.remove('{}.zip'.format(name))
lambda_client.update_function_code(FunctionName=name, ZipFile=zip_buffer)
create_job_definitions()
update_lambda_function('city-scrapers-status')
```
#### File: documenters-aggregator/tests/test_chi_localschoolcouncil.py
```python
import pytest
from datetime import date
from datetime import datetime
from pytz import timezone
from tests.utils import file_response
from city_scrapers.spiders.chi_localschoolcouncil import chi_LSCMeetingSpider
from textwrap import dedent
test_response = file_response('files/tests_chilocal_events.json')
spider = chi_LSCMeetingSpider(start_date=datetime(2018, 1, 1))
parsed_items = [item for item in spider.parse(test_response) if isinstance(item, dict)]
# def test_id():
# assert parsed_items[0]['id'] == 'chi_localschoolcouncil/201801081600/x/local_school_council_fort_dearborn_es'
def test_name():
assert parsed_items[0]['name'] == 'Local School Council: Fort Dearborn ES'
def test_start_time():
assert parsed_items[0]['start_time'].isoformat() == '2018-01-08T16:00:00-06:00'
def test_end_time():
assert parsed_items[0]['end_time'] == None
def test_location():
assert parsed_items[0]['location'] == {
'address': '9025 S Throop St 60620',
'coordinates': {
'latitude': '41.72967267',
'longitude': '-87.65548116',
}
}
@pytest.mark.parametrize('item', parsed_items)
def test_timezone(item):
assert item['timezone'] == 'America/Chicago'
@pytest.mark.parametrize('item', parsed_items)
def test_all_day(item):
assert item['all_day'] is False
@pytest.mark.parametrize('item', parsed_items)
def test_classification(item):
assert item['classification'] is 'meeting'
@pytest.mark.parametrize('item', parsed_items)
def test_status(item):
assert item['status'] == 'tentative'
@pytest.mark.parametrize('item', parsed_items)
def test__type(item):
assert item['_type'] == 'event'
```
#### File: documenters-aggregator/tests/test_chi_school_community_action_council.py
```python
from datetime import datetime
import pytest
from freezegun import freeze_time
from tests.utils import file_response
from city_scrapers.spiders.chi_school_community_action_council import Chi_school_community_action_councilSpider
freezer = freeze_time('2018-06-01 12:00:01')
freezer.start()
test_response = file_response('files/chi_school_community_action_council_CAC.html', url='http://cps.edu/FACE/Pages/CAC.aspx')
spider = Chi_school_community_action_councilSpider()
parsed_items = [item for item in spider.parse(test_response) if isinstance(item, dict)]
current_month_number = datetime.today().month
freezer.stop()
def test_num_items():
assert len(parsed_items) == (13 - current_month_number)*8
def test_name():
assert parsed_items[0]['name'] == 'Austin Community Action Council'
def test_start_time():
assert parsed_items[0]['start_time'].isoformat() == '2018-06-12T17:30:00'
def test_end_time():
assert parsed_items[0]['end_time'].isoformat() == '2018-06-12T20:30:00'
# def test_id():
# assert parsed_items[0]['id'] == \
# 'chi_school_community_action_council/201805081730/x/austin_community_action_council'
def test_location():
assert parsed_items[0]['location'] == {
'url': None,
'name': ' <NAME> ',
'address': '5101 W Harrison St.',
'coordinates': {
'latitude': None,
'longitude': None,
},
}
@pytest.mark.parametrize('item', parsed_items)
def test_description(item):
assert item['description'] == "Community Action Councils, or CACs, consist of 25-30 voting members who are " \
"directly involved in developing a strategic plan for educational success within " \
"their communities. CAC members include parents; elected officials; faith-based " \
"institutions, health care and community-based organizations; Local School" \
" Council (LSC) members; business leaders; educators and school administrators; " \
"staff members from Chicago's Sister Agencies; community residents; " \
"and students. There are nine CACs across Chicago. Each works to empower the " \
"community they serve to lead the improvement of local quality education."
@pytest.mark.parametrize('item', parsed_items)
def test_sources(item):
assert item['sources'] == [{'url': 'http://cps.edu/FACE/Pages/CAC.aspx',
'note': ''}]
@pytest.mark.parametrize('item', parsed_items)
def test_timezone(item):
assert item['timezone'] == 'America/Chicago'
@pytest.mark.parametrize('item', parsed_items)
def test_all_day(item):
assert item['all_day'] is False
@pytest.mark.parametrize('item', parsed_items)
def test_classification(item):
assert item['classification'] == 'Education'
@pytest.mark.parametrize('item', parsed_items)
def test__type(item):
assert parsed_items[0]['_type'] == 'event'
```
#### File: documenters-aggregator/tests/test_il_labor.py
```python
import pytest
from tests.utils import file_response
from city_scrapers.spiders.il_labor import Il_laborSpider
test_response = file_response('files/il_labor.html', url='https://www.illinois.gov/ilrb/meetings/Pages/default.aspx')
spider = Il_laborSpider()
parsed_items = [item for item in spider.parse(test_response) if isinstance(item, dict)]
def test_name():
assert parsed_items[0]['name'] == 'Local panel meeting'
@pytest.mark.parametrize('item', parsed_items)
def test_description(item):
EXPECTED_DESCRIPTION = ("The Illinois Public Labor Relations Act (Act) governs labor relations "
"between most public employers in Illinois and their employees. Throughout "
"the State, the Illinois Labor Relations Board regulates the designation of "
"employee representatives; the negotiation of wages, hours, and other conditions "
"of employment; and resolves, or if necessary, adjudicates labor disputes.")
assert item['description'] == EXPECTED_DESCRIPTION
@pytest.mark.parametrize('item', parsed_items)
def test_timezone(item):
assert item['timezone'] == 'America/Chicago'
def test_start_time():
assert parsed_items[1]['start_time'].isoformat() == '2018-06-12T13:00:00-05:00'
@pytest.mark.parametrize('item', parsed_items)
def test_end_time(item):
assert item['end_time'] is None
# def test_id():
# assert parsed_items[1]['id'] == 'il_labor/201806121300/x/state_panel_meeting'
@pytest.mark.parametrize('item', parsed_items)
def test_all_day(item):
assert item['all_day'] is False
@pytest.mark.parametrize('item', parsed_items)
def test_classification(item):
assert item['classification'] == 'committee-meeting'
@pytest.mark.parametrize('item', parsed_items)
def test_status(item):
assert item['status'] == 'tentative'
def test_location():
assert parsed_items[0]['location'] == {
'url': None,
'name': None,
'address': 'Room S-401, 160 N. LaSalle Street, Chicago, IL',
'coordinates': {
'latitude': None,
'longitude': None,
},
}
assert parsed_items[1]['location'] == {
'url': None,
'address': 'Room N-703, 160 N. LaSalle Street, Chicago, IL Or Room 5A, 801 S. 7th Street, Springfield, IL',
'name': None,
'coordinates': {
'latitude': None,
'longitude': None,
},
}
@pytest.mark.parametrize('item', parsed_items)
def test__type(item):
assert item['_type'] == 'event'
@pytest.mark.parametrize('item', parsed_items)
def test_sources(item):
assert item['sources'] == [{'url': 'https://www.illinois.gov/ilrb/meetings/Pages/default.aspx', 'note': ''}]
```
#### File: documenters-aggregator/tests/test_il_metra_board.py
```python
import pytest
from tests.utils import file_response
from city_scrapers.spiders.il_metra_board import Il_metra_boardSpider
test_response = file_response('files/il_metra_board.html')
spider = Il_metra_boardSpider()
parsed_items = [item for item in spider.parse(test_response) if isinstance(item, dict)]
def test_name():
assert parsed_items[0]['name'] == 'Metra February 2018 Board Meeting'
def test_start_time():
assert parsed_items[0]['start_time'].isoformat() == '2018-02-21T10:30:00-06:00'
# def test_id():
# assert parsed_items[0]['id'] == 'il_metra_board/201802211030/x/metra_february_2018_board_meeting'
def test_location():
assert parsed_items[0]['location'] == {
'url': '',
'name': '',
'address': '547 West Jackson Boulevard, Chicago, IL',
'coordinates': {
'latitude': '',
'longitude': '',
},
}
def test_sources():
assert parsed_items[0]['sources'][0] == {
'url': 'http://www.example.com',
'note': ''
}
@pytest.mark.parametrize('item', parsed_items)
def test_end_time(item):
assert item['end_time'] is None
@pytest.mark.parametrize('item', parsed_items)
def test_timezone(item):
assert item['timezone'] == 'America/Chicago'
@pytest.mark.parametrize('item', parsed_items)
def test_all_day(item):
assert item['all_day'] is False
@pytest.mark.parametrize('item', parsed_items)
def test_classification(item):
assert item['classification'] is 'transit'
@pytest.mark.parametrize('item', parsed_items)
def test__type(item):
assert parsed_items[0]['_type'] == 'event'
``` |
{
"source": "Jimdo/django-prometheus-metrics",
"score": 2
} |
#### File: django-prometheus-metrics/django_prometheus_metrics/models.py
```python
from prometheus_client import Counter
model_inserts_total = Counter(
'django_model_inserts_total', 'Number of inserts on a certain model', ['model']
)
model_updates_total = Counter(
'django_model_updates_total', 'Number of updates on a certain model', ['model']
)
model_deletes_total = Counter(
'django_model_deletes_total', 'Number of deletes on a certain model', ['model']
)
def MetricsModelMixin(name):
class Mixin(object):
def _do_insert(self, *args, **kwargs):
model_inserts_total.labels(name).inc()
return super(Mixin, self)._do_insert(*args, **kwargs)
def _do_update(self, *args, **kwargs):
model_updates_total.labels(name).inc()
return super(Mixin, self)._do_update(*args, **kwargs)
def _do_delete(self, *args, **kwargs):
model_deletes_total.labels(name).inc()
return super(Mixin, self).delete(*args, **kwargs)
return Mixin
``` |
{
"source": "Jimdo/supervisor",
"score": 2
} |
#### File: supervisor/tests/test_options.py
```python
import os
import sys
import tempfile
import socket
import unittest
import signal
import shutil
import errno
try:
# Python < 3
from StringIO import StringIO
except ImportError:
# Python >= 3
from io import StringIO
from mock import Mock, patch, sentinel
from supervisor.tests.base import DummySupervisor
from supervisor.tests.base import DummyLogger
from supervisor.tests.base import DummyOptions
from supervisor.tests.base import DummyPConfig
from supervisor.tests.base import DummyProcess
from supervisor.tests.base import DummySocketConfig
from supervisor.tests.base import lstrip
class OptionTests(unittest.TestCase):
def _makeOptions(self, read_error=False):
from cStringIO import StringIO
from supervisor.options import Options
from supervisor.datatypes import integer
class MyOptions(Options):
master = {
'other': 41 }
def __init__(self, read_error=read_error):
self.read_error = read_error
Options.__init__(self)
class Foo(object): pass
self.configroot = Foo()
def read_config(self, fp):
if self.read_error:
raise ValueError(self.read_error)
# Pretend we read it from file:
self.configroot.__dict__.update(self.default_map)
self.configroot.__dict__.update(self.master)
options = MyOptions()
options.configfile = StringIO()
options.add(name='anoption', confname='anoption',
short='o', long='option', default='default')
options.add(name='other', confname='other', env='OTHER',
short='p:', long='other=', handler=integer)
return options
def test_searchpaths(self):
options = self._makeOptions()
self.assertEquals(len(options.searchpaths), 5)
self.assertTrue('supervisord.conf' in options.searchpaths)
self.assertTrue('etc/supervisord.conf' in options.searchpaths)
self.assertTrue('/etc/supervisord.conf' in options.searchpaths)
def test_options_and_args_order(self):
# Only config file exists
options = self._makeOptions()
options.realize([])
self.assertEquals(options.anoption, 'default')
self.assertEquals(options.other, 41)
# Env should trump config
options = self._makeOptions()
os.environ['OTHER'] = '42'
options.realize([])
self.assertEquals(options.other, 42)
# Opt should trump both env (still set) and config
options = self._makeOptions()
options.realize(['-p', '43'])
self.assertEquals(options.other, 43)
del os.environ['OTHER']
def test_config_reload(self):
options = self._makeOptions()
options.realize([])
self.assertEquals(options.other, 41)
options.master['other'] = 42
options.process_config()
self.assertEquals(options.other, 42)
def test_config_reload_do_usage_false(self):
options = self._makeOptions(read_error='error')
self.assertRaises(ValueError, options.process_config,
False)
def test_config_reload_do_usage_true(self):
options = self._makeOptions(read_error='error')
from StringIO import StringIO
L = []
def exit(num):
L.append(num)
options.stderr = options.stdout = StringIO()
options.exit = exit
options.configroot.anoption = 1
options.configroot.other = 1
options.process_config(True)
self.assertEqual(L, [2])
def test__set(self):
from supervisor.options import Options
options = Options()
options._set('foo', 'bar', 0)
self.assertEquals(options.foo, 'bar')
self.assertEquals(options.attr_priorities['foo'], 0)
options._set('foo', 'baz', 1)
self.assertEquals(options.foo, 'baz')
self.assertEquals(options.attr_priorities['foo'], 1)
options._set('foo', 'gazonk', 0)
self.assertEquals(options.foo, 'baz')
self.assertEquals(options.attr_priorities['foo'], 1)
options._set('foo', 'gazonk', 1)
self.assertEquals(options.foo, 'gazonk')
class ClientOptionsTests(unittest.TestCase):
def _getTargetClass(self):
from supervisor.options import ClientOptions
return ClientOptions
def _makeOne(self):
return self._getTargetClass()()
def test_no_config_file(self):
"""Making sure config file is not required."""
instance = self._makeOne()
# No default config file search in case they would exist
self.assertTrue(len(instance.searchpaths) > 0)
instance.searchpaths = []
class DummyException(Exception):
pass
def dummy_exit(self, _exitcode=0):
raise DummyException()
instance.exit = dummy_exit
instance.realize(args=['-s', 'http://localhost:9001', '-u', 'chris',
'-p', '123'])
self.assertEquals(instance.interactive, 1)
self.assertEqual(instance.serverurl, 'http://localhost:9001')
self.assertEqual(instance.username, 'chris')
self.assertEqual(instance.password, '<PASSWORD>')
def test_options(self):
tempdir = tempfile.gettempdir()
s = lstrip("""[supervisorctl]
serverurl=http://localhost:9001
username=chris
password=<PASSWORD>
prompt=mysupervisor
history_file=%s/sc_history
""" % tempdir)
from StringIO import StringIO
fp = StringIO(s)
instance = self._makeOne()
instance.configfile = fp
instance.realize(args=[])
self.assertEqual(instance.interactive, True)
history_file = os.path.join(tempdir, 'sc_history')
self.assertEqual(instance.history_file, history_file)
options = instance.configroot.supervisorctl
self.assertEqual(options.prompt, 'mysupervisor')
self.assertEqual(options.serverurl, 'http://localhost:9001')
self.assertEqual(options.username, 'chris')
self.assertEqual(options.password, '<PASSWORD>')
self.assertEqual(options.history_file, history_file)
def test_unreadable_config_file(self):
# Quick and dirty way of coming up with a decent filename
tempf = tempfile.NamedTemporaryFile()
fname = tempf.name
tempf.close()
self.assertFalse(os.path.exists(fname))
instance = self._makeOne()
instance.stderr = StringIO()
class DummyException(Exception):
def __init__(self, exitcode):
self.exitcode = exitcode
def dummy_exit(self, exitcode=2):
# Important default exitcode=2 like sys.exit.
raise DummyException(exitcode)
instance.exit = dummy_exit
try:
instance.realize(args=['-c', fname])
except DummyException, e:
self.assertEquals(e.exitcode, 2)
else:
self.fail("expected exception")
try:
instance.read_config(fname)
except ValueError, e:
self.assertTrue("could not find config file" in str(e))
else:
self.fail("expected exception")
tempf = tempfile.NamedTemporaryFile()
os.chmod(tempf.name, 0) # Removing read perms
try:
instance.read_config(tempf.name)
except ValueError, e:
self.assertTrue("could not read config file" in str(e))
else:
self.fail("expected exception")
tempf.close()
def test_options_unixsocket_cli(self):
from StringIO import StringIO
fp = StringIO('[supervisorctl]')
instance = self._makeOne()
instance.configfile = fp
instance.realize(args=['--serverurl', 'unix:///dev/null'])
self.assertEqual(instance.serverurl, 'unix:///dev/null')
class ServerOptionsTests(unittest.TestCase):
def _getTargetClass(self):
from supervisor.options import ServerOptions
return ServerOptions
def _makeOne(self):
return self._getTargetClass()()
def test_version(self):
from supervisor.options import VERSION
options = self._makeOne()
from StringIO import StringIO
options.stdout = StringIO()
self.assertRaises(SystemExit, options.version, None)
self.assertEqual(options.stdout.getvalue(), VERSION + '\n')
def test_options(self):
s = lstrip("""[inet_http_server]
port=127.0.0.1:8999
username=chrism
password=<PASSWORD>
[supervisord]
directory=%(tempdir)s
backofflimit=10
user=root
umask=022
logfile=supervisord.log
logfile_maxbytes=1000MB
logfile_backups=5
loglevel=error
pidfile=supervisord.pid
nodaemon=true
identifier=fleeb
childlogdir=%(tempdir)s
nocleanup=true
minfds=2048
minprocs=300
environment=FAKE_ENV_VAR=/some/path
[program:cat1]
command=/bin/cat
priority=1
autostart=true
user=root
stdout_logfile=/tmp/cat.log
stopsignal=KILL
stopwaitsecs=5
startsecs=5
startretries=10
directory=/tmp
umask=002
[program:cat2]
priority=2
command=/bin/cat
autostart=true
autorestart=false
stdout_logfile_maxbytes = 1024
stdout_logfile_backups = 2
stdout_logfile = /tmp/cat2.log
[program:cat3]
priority=3
process_name = replaced
command=/bin/cat
autorestart=true
exitcodes=0,1,127
stopasgroup=true
killasgroup=true
[program:cat4]
priority=4
process_name = fleeb_%%(process_num)s
numprocs = 2
command = /bin/cat
autorestart=unexpected
""" % {'tempdir':tempfile.gettempdir()})
from supervisor import datatypes
from StringIO import StringIO
fp = StringIO(s)
instance = self._makeOne()
instance.configfile = fp
instance.realize(args=[])
options = instance.configroot.supervisord
self.assertEqual(options.directory, tempfile.gettempdir())
self.assertEqual(options.umask, 022)
self.assertEqual(options.logfile, 'supervisord.log')
self.assertEqual(options.logfile_maxbytes, 1000 * 1024 * 1024)
self.assertEqual(options.logfile_backups, 5)
self.assertEqual(options.loglevel, 40)
self.assertEqual(options.pidfile, 'supervisord.pid')
self.assertEqual(options.nodaemon, True)
self.assertEqual(options.identifier, 'fleeb')
self.assertEqual(options.childlogdir, tempfile.gettempdir())
self.assertEqual(len(options.server_configs), 1)
self.assertEqual(options.server_configs[0]['family'], socket.AF_INET)
self.assertEqual(options.server_configs[0]['host'], '127.0.0.1')
self.assertEqual(options.server_configs[0]['port'], 8999)
self.assertEqual(options.server_configs[0]['username'], 'chrism')
self.assertEqual(options.server_configs[0]['password'], '<PASSWORD>')
self.assertEqual(options.nocleanup, True)
self.assertEqual(options.minfds, 2048)
self.assertEqual(options.minprocs, 300)
self.assertEqual(options.nocleanup, True)
self.assertEqual(len(options.process_group_configs), 4)
self.assertEqual(options.environment, dict(FAKE_ENV_VAR='/some/path'))
cat1 = options.process_group_configs[0]
self.assertEqual(cat1.name, 'cat1')
self.assertEqual(cat1.priority, 1)
self.assertEqual(len(cat1.process_configs), 1)
proc1 = cat1.process_configs[0]
self.assertEqual(proc1.name, 'cat1')
self.assertEqual(proc1.command, '/bin/cat')
self.assertEqual(proc1.priority, 1)
self.assertEqual(proc1.autostart, True)
self.assertEqual(proc1.autorestart, datatypes.RestartWhenExitUnexpected)
self.assertEqual(proc1.startsecs, 5)
self.assertEqual(proc1.startretries, 10)
self.assertEqual(proc1.uid, 0)
self.assertEqual(proc1.stdout_logfile, '/tmp/cat.log')
self.assertEqual(proc1.stopsignal, signal.SIGKILL)
self.assertEqual(proc1.stopwaitsecs, 5)
self.assertEqual(proc1.stopasgroup, False)
self.assertEqual(proc1.killasgroup, False)
self.assertEqual(proc1.stdout_logfile_maxbytes,
datatypes.byte_size('50MB'))
self.assertEqual(proc1.stdout_logfile_backups, 10)
self.assertEqual(proc1.exitcodes, [0,2])
self.assertEqual(proc1.directory, '/tmp')
self.assertEqual(proc1.umask, 002)
self.assertEqual(proc1.environment, dict(FAKE_ENV_VAR='/some/path'))
cat2 = options.process_group_configs[1]
self.assertEqual(cat2.name, 'cat2')
self.assertEqual(cat2.priority, 2)
self.assertEqual(len(cat2.process_configs), 1)
proc2 = cat2.process_configs[0]
self.assertEqual(proc2.name, 'cat2')
self.assertEqual(proc2.command, '/bin/cat')
self.assertEqual(proc2.priority, 2)
self.assertEqual(proc2.autostart, True)
self.assertEqual(proc2.autorestart, False)
self.assertEqual(proc2.uid, None)
self.assertEqual(proc2.stdout_logfile, '/tmp/cat2.log')
self.assertEqual(proc2.stopsignal, signal.SIGTERM)
self.assertEqual(proc2.stopasgroup, False)
self.assertEqual(proc2.killasgroup, False)
self.assertEqual(proc2.stdout_logfile_maxbytes, 1024)
self.assertEqual(proc2.stdout_logfile_backups, 2)
self.assertEqual(proc2.exitcodes, [0,2])
self.assertEqual(proc2.directory, None)
cat3 = options.process_group_configs[2]
self.assertEqual(cat3.name, 'cat3')
self.assertEqual(cat3.priority, 3)
self.assertEqual(len(cat3.process_configs), 1)
proc3 = cat3.process_configs[0]
self.assertEqual(proc3.name, 'replaced')
self.assertEqual(proc3.command, '/bin/cat')
self.assertEqual(proc3.priority, 3)
self.assertEqual(proc3.autostart, True)
self.assertEqual(proc3.autorestart, datatypes.RestartUnconditionally)
self.assertEqual(proc3.uid, None)
self.assertEqual(proc3.stdout_logfile, datatypes.Automatic)
self.assertEqual(proc3.stdout_logfile_maxbytes,
datatypes.byte_size('50MB'))
self.assertEqual(proc3.stdout_logfile_backups, 10)
self.assertEqual(proc3.exitcodes, [0,1,127])
self.assertEqual(proc3.stopsignal, signal.SIGTERM)
self.assertEqual(proc3.stopasgroup, True)
self.assertEqual(proc3.killasgroup, True)
cat4 = options.process_group_configs[3]
self.assertEqual(cat4.name, 'cat4')
self.assertEqual(cat4.priority, 4)
self.assertEqual(len(cat4.process_configs), 2)
proc4_a = cat4.process_configs[0]
self.assertEqual(proc4_a.name, 'fleeb_0')
self.assertEqual(proc4_a.command, '/bin/cat')
self.assertEqual(proc4_a.priority, 4)
self.assertEqual(proc4_a.autostart, True)
self.assertEqual(proc4_a.autorestart,
datatypes.RestartWhenExitUnexpected)
self.assertEqual(proc4_a.uid, None)
self.assertEqual(proc4_a.stdout_logfile, datatypes.Automatic)
self.assertEqual(proc4_a.stdout_logfile_maxbytes,
datatypes.byte_size('50MB'))
self.assertEqual(proc4_a.stdout_logfile_backups, 10)
self.assertEqual(proc4_a.exitcodes, [0,2])
self.assertEqual(proc4_a.stopsignal, signal.SIGTERM)
self.assertEqual(proc4_a.stopasgroup, False)
self.assertEqual(proc4_a.killasgroup, False)
proc4_b = cat4.process_configs[1]
self.assertEqual(proc4_b.name, 'fleeb_1')
self.assertEqual(proc4_b.command, '/bin/cat')
self.assertEqual(proc4_b.priority, 4)
self.assertEqual(proc4_b.autostart, True)
self.assertEqual(proc4_b.autorestart,
datatypes.RestartWhenExitUnexpected)
self.assertEqual(proc4_b.uid, None)
self.assertEqual(proc4_b.stdout_logfile, datatypes.Automatic)
self.assertEqual(proc4_b.stdout_logfile_maxbytes,
datatypes.byte_size('50MB'))
self.assertEqual(proc4_b.stdout_logfile_backups, 10)
self.assertEqual(proc4_b.exitcodes, [0,2])
self.assertEqual(proc4_b.stopsignal, signal.SIGTERM)
self.assertEqual(proc4_b.stopasgroup, False)
self.assertEqual(proc4_b.killasgroup, False)
here = os.path.abspath(os.getcwd())
self.assertEqual(instance.uid, 0)
self.assertEqual(instance.gid, 0)
self.assertEqual(instance.directory, tempfile.gettempdir())
self.assertEqual(instance.umask, 022)
self.assertEqual(instance.logfile, os.path.join(here,'supervisord.log'))
self.assertEqual(instance.logfile_maxbytes, 1000 * 1024 * 1024)
self.assertEqual(instance.logfile_backups, 5)
self.assertEqual(instance.loglevel, 40)
self.assertEqual(instance.pidfile, os.path.join(here,'supervisord.pid'))
self.assertEqual(instance.nodaemon, True)
self.assertEqual(instance.passwdfile, None)
self.assertEqual(instance.identifier, 'fleeb')
self.assertEqual(instance.childlogdir, tempfile.gettempdir())
self.assertEqual(len(instance.server_configs), 1)
self.assertEqual(instance.server_configs[0]['family'], socket.AF_INET)
self.assertEqual(instance.server_configs[0]['host'], '127.0.0.1')
self.assertEqual(instance.server_configs[0]['port'], 8999)
self.assertEqual(instance.server_configs[0]['username'], 'chrism')
self.assertEqual(instance.server_configs[0]['password'], '<PASSWORD>')
self.assertEqual(instance.nocleanup, True)
self.assertEqual(instance.minfds, 2048)
self.assertEqual(instance.minprocs, 300)
def test_no_config_file_exits(self):
instance = self._makeOne()
# No default config file search in case they would exist
self.assertTrue(len(instance.searchpaths) > 0)
instance.searchpaths = []
class DummyException(Exception):
def __init__(self, exitcode):
self.exitcode = exitcode
def dummy_exit(exitcode=2):
# Important default exitcode=2 like sys.exit.
raise DummyException(exitcode)
instance.exit = dummy_exit
# Making sure we capture stdout and stderr
instance.stderr = StringIO()
try:
instance.realize()
except DummyException, e:
# Caught expected exception
import traceback
self.assertEquals(e.exitcode, 2,
"Wrong exitcode for: %s" % traceback.format_exc(e))
else:
self.fail("Did not get a DummyException.")
def test_reload(self):
from cStringIO import StringIO
text = lstrip("""\
[supervisord]
user=root
[program:one]
command = /bin/cat
[program:two]
command = /bin/dog
[program:four]
command = /bin/sheep
[group:thegroup]
programs = one,two
""")
instance = self._makeOne()
instance.configfile = StringIO(text)
instance.realize(args=[])
section = instance.configroot.supervisord
self.assertEqual(len(section.process_group_configs), 2)
cat = section.process_group_configs[0]
self.assertEqual(len(cat.process_configs), 1)
cat = section.process_group_configs[1]
self.assertEqual(len(cat.process_configs), 2)
self.assertTrue(section.process_group_configs is
instance.process_group_configs)
text = lstrip("""\
[supervisord]
user=root
[program:one]
command = /bin/cat
[program:three]
command = /bin/pig
[group:thegroup]
programs = three
""")
instance.configfile = StringIO(text)
instance.process_config()
section = instance.configroot.supervisord
self.assertEqual(len(section.process_group_configs), 2)
cat = section.process_group_configs[0]
self.assertEqual(len(cat.process_configs), 1)
proc = cat.process_configs[0]
self.assertEqual(proc.name, 'one')
self.assertEqual(proc.command, '/bin/cat')
self.assertTrue(section.process_group_configs is
instance.process_group_configs)
cat = section.process_group_configs[1]
self.assertEqual(len(cat.process_configs), 1)
proc = cat.process_configs[0]
self.assertEqual(proc.name, 'three')
self.assertEqual(proc.command, '/bin/pig')
def test_reload_clears_parse_warnings(self):
instance = self._makeOne()
old_warning = "Warning from a prior config read"
instance.parse_warnings = [old_warning]
from cStringIO import StringIO
text = lstrip("""\
[supervisord]
user=root
[program:cat]
command = /bin/cat
""")
instance.configfile = StringIO(text)
instance.realize(args=[])
self.assertFalse(old_warning in instance.parse_warnings)
def test_unreadable_config_file(self):
# Quick and dirty way of coming up with a decent filename
tempf = tempfile.NamedTemporaryFile()
fname = tempf.name
tempf.close()
self.assertFalse(os.path.exists(fname))
instance = self._makeOne()
instance.stderr = StringIO()
class DummyException(Exception):
def __init__(self, exitcode):
self.exitcode = exitcode
def dummy_exit(self, exitcode=2):
# Important default exitcode=2 like sys.exit.
raise DummyException(exitcode)
instance.exit = dummy_exit
try:
instance.realize(args=['-c', fname])
except DummyException, e:
self.assertEquals(e.exitcode, 2)
else:
self.fail("expected exception")
try:
instance.read_config(fname)
except ValueError, e:
self.assertTrue("could not find config file" in str(e))
else:
self.fail("expected exception")
tempf = tempfile.NamedTemporaryFile()
os.chmod(tempf.name, 0) # Removing read perms
try:
instance.read_config(tempf.name)
except ValueError, e:
self.assertTrue("could not read config file" in str(e))
else:
self.fail("expected exception")
tempf.close()
def test_readFile_failed(self):
from supervisor.options import readFile
try:
readFile('/notthere', 0, 10)
except ValueError, inst:
self.assertEqual(inst.args[0], 'FAILED')
else:
raise AssertionError("Didn't raise")
def test_get_pid(self):
instance = self._makeOne()
self.assertEqual(os.getpid(), instance.get_pid())
def test_get_signal_delegates_to_signal_receiver(self):
instance = self._makeOne()
instance.signal_receiver.receive(signal.SIGTERM, None)
instance.signal_receiver.receive(signal.SIGCHLD, None)
self.assertEqual(instance.get_signal(), signal.SIGTERM)
self.assertEqual(instance.get_signal(), signal.SIGCHLD)
self.assertEqual(instance.get_signal(), None)
def test_check_execv_args_cant_find_command(self):
instance = self._makeOne()
from supervisor.options import NotFound
self.assertRaises(NotFound, instance.check_execv_args,
'/not/there', None, None)
def test_check_execv_args_notexecutable(self):
instance = self._makeOne()
from supervisor.options import NotExecutable
self.assertRaises(NotExecutable,
instance.check_execv_args, '/etc/passwd',
['etc/passwd'], os.stat('/etc/passwd'))
def test_check_execv_args_isdir(self):
instance = self._makeOne()
from supervisor.options import NotExecutable
self.assertRaises(NotExecutable,
instance.check_execv_args, '/',
['/'], os.stat('/'))
def test_cleanup_afunix_unlink(self):
fn = tempfile.mktemp()
f = open(fn, 'w')
f.write('foo')
f.close()
instance = self._makeOne()
class Port:
family = socket.AF_UNIX
address = fn
class Server:
pass
instance.httpservers = [({'family':socket.AF_UNIX, 'file':fn},
Server())]
instance.pidfile = ''
instance.cleanup()
self.failIf(os.path.exists(fn))
def test_cleanup_afunix_nounlink(self):
fn = tempfile.mktemp()
try:
f = open(fn, 'w')
f.write('foo')
f.close()
instance = self._makeOne()
class Port:
family = socket.AF_UNIX
address = fn
class Server:
pass
instance.httpservers = [({'family':socket.AF_UNIX, 'file':fn},
Server())]
instance.pidfile = ''
instance.unlink_socketfiles = False
instance.cleanup()
self.failUnless(os.path.exists(fn))
finally:
try:
os.unlink(fn)
except OSError:
pass
def test_close_httpservers(self):
instance = self._makeOne()
class Server:
closed = False
def close(self):
self.closed = True
server = Server()
instance.httpservers = [({}, server)]
instance.close_httpservers()
self.assertEqual(server.closed, True)
def test_close_logger(self):
instance = self._makeOne()
logger = DummyLogger()
instance.logger = logger
instance.close_logger()
self.assertEqual(logger.closed, True)
def test_write_pidfile_ok(self):
fn = tempfile.mktemp()
try:
instance = self._makeOne()
instance.logger = DummyLogger()
instance.pidfile = fn
instance.write_pidfile()
self.failUnless(os.path.exists(fn))
pid = int(open(fn, 'r').read()[:-1])
self.assertEqual(pid, os.getpid())
msg = instance.logger.data[0]
self.failUnless(msg.startswith('supervisord started with pid'))
finally:
try:
os.unlink(fn)
except OSError:
pass
def test_write_pidfile_fail(self):
fn = '/cannot/possibly/exist'
instance = self._makeOne()
instance.logger = DummyLogger()
instance.pidfile = fn
instance.write_pidfile()
msg = instance.logger.data[0]
self.failUnless(msg.startswith('could not write pidfile'))
def test_close_fd(self):
instance = self._makeOne()
innie, outie = os.pipe()
os.read(innie, 0) # we can read it while its open
os.write(outie, 'foo') # we can write to it while its open
instance.close_fd(innie)
self.assertRaises(OSError, os.read, innie, 0)
instance.close_fd(outie)
self.assertRaises(OSError, os.write, outie, 'foo')
def test_processes_from_section(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/cat
priority = 1
autostart = false
autorestart = false
startsecs = 100
startretries = 100
user = root
stdout_logfile = NONE
stdout_logfile_backups = 1
stdout_logfile_maxbytes = 100MB
stdout_events_enabled = true
stopsignal = KILL
stopwaitsecs = 100
killasgroup = true
exitcodes = 1,4
redirect_stderr = false
environment = KEY1=val1,KEY2=val2,KEY3=%(process_num)s
numprocs = 2
process_name = %(group_name)s_%(program_name)s_%(process_num)02d
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
pconfigs = instance.processes_from_section(config, 'program:foo', 'bar')
self.assertEqual(len(pconfigs), 2)
pconfig = pconfigs[0]
self.assertEqual(pconfig.name, 'bar_foo_00')
self.assertEqual(pconfig.command, '/bin/cat')
self.assertEqual(pconfig.autostart, False)
self.assertEqual(pconfig.autorestart, False)
self.assertEqual(pconfig.startsecs, 100)
self.assertEqual(pconfig.startretries, 100)
self.assertEqual(pconfig.uid, 0)
self.assertEqual(pconfig.stdout_logfile, None)
self.assertEqual(pconfig.stdout_capture_maxbytes, 0)
self.assertEqual(pconfig.stdout_logfile_maxbytes, 104857600)
self.assertEqual(pconfig.stdout_events_enabled, True)
self.assertEqual(pconfig.stopsignal, signal.SIGKILL)
self.assertEqual(pconfig.stopasgroup, False)
self.assertEqual(pconfig.killasgroup, True)
self.assertEqual(pconfig.stopwaitsecs, 100)
self.assertEqual(pconfig.exitcodes, [1,4])
self.assertEqual(pconfig.redirect_stderr, False)
self.assertEqual(pconfig.environment,
{'KEY1':'val1', 'KEY2':'val2', 'KEY3':'0'})
def test_processes_from_section_host_node_name_expansion(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/foo --host=%(host_node_name)s
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
pconfigs = instance.processes_from_section(config, 'program:foo', 'bar')
import platform
expected = "/bin/foo --host=" + platform.node()
self.assertEqual(pconfigs[0].command, expected)
def test_processes_from_section_environment_variables_expansion(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/foo --path='%(ENV_PATH)s'
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
pconfigs = instance.processes_from_section(config, 'program:foo', 'bar')
expected = "/bin/foo --path='%s'" % os.environ['PATH']
self.assertEqual(pconfigs[0].command, expected)
def test_processes_from_section_bad_program_name_spaces(self):
instance = self._makeOne()
text = lstrip("""\
[program:spaces are bad]
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:spaces are bad', None)
def test_processes_from_section_bad_program_name_colons(self):
instance = self._makeOne()
text = lstrip("""\
[program:colons:are:bad]
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:colons:are:bad', None)
def test_processes_from_section_no_procnum_in_processname(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/cat
numprocs = 2
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:foo', None)
def test_processes_from_section_no_command(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
numprocs = 2
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:foo', None)
def test_processes_from_section_missing_replacement_in_process_name(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/cat
process_name = %(not_there)s
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:foo', None)
def test_processes_from_section_bad_expression_in_process_name(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/cat
process_name = %(program_name)
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:foo', None)
def test_processes_from_section_bad_chars_in_process_name(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/cat
process_name = colons:are:bad
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:foo', None)
def test_processes_from_section_stopasgroup_implies_killasgroup(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/cat
process_name = %(program_name)s
stopasgroup = true
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
pconfigs = instance.processes_from_section(config, 'program:foo', 'bar')
self.assertEqual(len(pconfigs), 1)
pconfig = pconfigs[0]
self.assertEqual(pconfig.stopasgroup, True)
self.assertEqual(pconfig.killasgroup, True)
def test_processes_from_section_killasgroup_mismatch_w_stopasgroup(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/cat
process_name = %(program_name)s
stopasgroup = true
killasgroup = false
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
self.assertRaises(ValueError, instance.processes_from_section,
config, 'program:foo', None)
def test_processes_from_autolog_without_rollover(self):
instance = self._makeOne()
text = lstrip("""\
[program:foo]
command = /bin/foo
stdout_logfile = AUTO
stdout_logfile_maxbytes = 0
stderr_logfile = AUTO
stderr_logfile_maxbytes = 0
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
instance.logger = DummyLogger()
config.read_string(text)
instance.processes_from_section(config, 'program:foo', None)
self.assertEqual(instance.parse_warnings[0],
'For [program:foo], AUTO logging used for stdout_logfile '
'without rollover, set maxbytes > 0 to avoid filling up '
'filesystem unintentionally')
self.assertEqual(instance.parse_warnings[1],
'For [program:foo], AUTO logging used for stderr_logfile '
'without rollover, set maxbytes > 0 to avoid filling up '
'filesystem unintentionally')
def test_homogeneous_process_groups_from_parser(self):
text = lstrip("""\
[program:many]
process_name = %(program_name)s_%(process_num)s
command = /bin/cat
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
gconfigs = instance.process_groups_from_parser(config)
self.assertEqual(len(gconfigs), 1)
gconfig = gconfigs[0]
self.assertEqual(gconfig.name, 'many')
self.assertEqual(gconfig.priority, 1)
self.assertEqual(len(gconfig.process_configs), 2)
def test_event_listener_pools_from_parser(self):
text = lstrip("""\
[eventlistener:dog]
events=PROCESS_COMMUNICATION
process_name = %(program_name)s_%(process_num)s
command = /bin/dog
numprocs = 2
priority = 1
[eventlistener:cat]
events=PROCESS_COMMUNICATION
process_name = %(program_name)s_%(process_num)s
command = /bin/cat
numprocs = 3
[eventlistener:biz]
events=PROCESS_COMMUNICATION
process_name = %(program_name)s_%(process_num)s
command = /bin/biz
numprocs = 2
""")
from supervisor.options import UnhosedConfigParser
from supervisor.dispatchers import default_handler
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
gconfigs = instance.process_groups_from_parser(config)
self.assertEqual(len(gconfigs), 3)
gconfig1 = gconfigs[0]
self.assertEqual(gconfig1.name, 'biz')
self.assertEqual(gconfig1.result_handler, default_handler)
self.assertEqual(len(gconfig1.process_configs), 2)
gconfig1 = gconfigs[1]
self.assertEqual(gconfig1.name, 'cat')
self.assertEqual(gconfig1.priority, -1)
self.assertEqual(gconfig1.result_handler, default_handler)
self.assertEqual(len(gconfig1.process_configs), 3)
gconfig1 = gconfigs[2]
self.assertEqual(gconfig1.name, 'dog')
self.assertEqual(gconfig1.priority, 1)
self.assertEqual(gconfig1.result_handler, default_handler)
self.assertEqual(len(gconfig1.process_configs), 2)
def test_event_listener_pool_with_event_results_handler(self):
text = lstrip("""\
[eventlistener:dog]
events=PROCESS_COMMUNICATION
command = /bin/dog
result_handler = supervisor.tests.base:dummy_handler
""")
from supervisor.options import UnhosedConfigParser
from supervisor.tests.base import dummy_handler
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
gconfigs = instance.process_groups_from_parser(config)
self.assertEqual(len(gconfigs), 1)
gconfig1 = gconfigs[0]
self.assertEqual(gconfig1.result_handler, dummy_handler)
def test_event_listener_pool_noeventsline(self):
text = lstrip("""\
[eventlistener:dog]
process_name = %(program_name)s_%(process_num)s
command = /bin/dog
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_event_listener_pool_unknown_eventtype(self):
text = lstrip("""\
[eventlistener:dog]
events=PROCESS_COMMUNICATION,THIS_EVENT_TYPE_DOESNT_EXIST
process_name = %(program_name)s_%(process_num)s
command = /bin/dog
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_programs_from_parser(self):
from supervisor.options import FastCGIGroupConfig
from supervisor.options import FastCGIProcessConfig
text = lstrip("""\
[fcgi-program:foo]
socket = unix:///tmp/%(program_name)s.sock
socket_owner = testuser:testgroup
socket_mode = 0666
process_name = %(program_name)s_%(process_num)s
command = /bin/foo
numprocs = 2
priority = 1
[fcgi-program:bar]
socket = unix:///tmp/%(program_name)s.sock
process_name = %(program_name)s_%(process_num)s
command = /bin/bar
user = testuser
numprocs = 3
[fcgi-program:flub]
socket = unix:///tmp/%(program_name)s.sock
command = /bin/flub
[fcgi-program:cub]
socket = tcp://localhost:6000
command = /bin/cub
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
#Patch pwd and grp module functions to give us sentinel
#uid/gid values so that the test does not depend on
#any specific system users
pwd_mock = Mock()
pwd_mock.return_value = (None, None, sentinel.uid, sentinel.gid)
grp_mock = Mock()
grp_mock.return_value = (None, None, sentinel.gid)
@patch('pwd.getpwuid', pwd_mock)
@patch('pwd.getpwnam', pwd_mock)
@patch('grp.getgrnam', grp_mock)
def get_process_groups(instance, config):
return instance.process_groups_from_parser(config)
gconfigs = get_process_groups(instance, config)
exp_owner = (sentinel.uid, sentinel.gid)
self.assertEqual(len(gconfigs), 4)
gconf_foo = gconfigs[0]
self.assertEqual(gconf_foo.__class__, FastCGIGroupConfig)
self.assertEqual(gconf_foo.name, 'foo')
self.assertEqual(gconf_foo.priority, 1)
self.assertEqual(gconf_foo.socket_config.url,
'unix:///tmp/foo.sock')
self.assertEqual(exp_owner, gconf_foo.socket_config.get_owner())
self.assertEqual(0666, gconf_foo.socket_config.get_mode())
self.assertEqual(len(gconf_foo.process_configs), 2)
pconfig_foo = gconf_foo.process_configs[0]
self.assertEqual(pconfig_foo.__class__, FastCGIProcessConfig)
gconf_bar = gconfigs[1]
self.assertEqual(gconf_bar.name, 'bar')
self.assertEqual(gconf_bar.priority, 999)
self.assertEqual(gconf_bar.socket_config.url,
'unix:///tmp/bar.sock')
self.assertEqual(exp_owner, gconf_bar.socket_config.get_owner())
self.assertEqual(0700, gconf_bar.socket_config.get_mode())
self.assertEqual(len(gconf_bar.process_configs), 3)
gconf_cub = gconfigs[2]
self.assertEqual(gconf_cub.name, 'cub')
self.assertEqual(gconf_cub.socket_config.url,
'tcp://localhost:6000')
self.assertEqual(len(gconf_cub.process_configs), 1)
gconf_flub = gconfigs[3]
self.assertEqual(gconf_flub.name, 'flub')
self.assertEqual(gconf_flub.socket_config.url,
'unix:///tmp/flub.sock')
self.assertEqual(None, gconf_flub.socket_config.get_owner())
self.assertEqual(0700, gconf_flub.socket_config.get_mode())
self.assertEqual(len(gconf_flub.process_configs), 1)
def test_fcgi_program_no_socket(self):
text = lstrip("""\
[fcgi-program:foo]
process_name = %(program_name)s_%(process_num)s
command = /bin/foo
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_unknown_socket_protocol(self):
text = lstrip("""\
[fcgi-program:foo]
socket=junk://blah
process_name = %(program_name)s_%(process_num)s
command = /bin/foo
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_rel_unix_sock_path(self):
text = lstrip("""\
[fcgi-program:foo]
socket=unix://relative/path
process_name = %(program_name)s_%(process_num)s
command = /bin/foo
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_bad_tcp_sock_format(self):
text = lstrip("""\
[fcgi-program:foo]
socket=tcp://missingport
process_name = %(program_name)s_%(process_num)s
command = /bin/foo
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_bad_expansion_proc_num(self):
text = lstrip("""\
[fcgi-program:foo]
socket=unix:///tmp/%(process_num)s.sock
process_name = %(program_name)s_%(process_num)s
command = /bin/foo
numprocs = 2
priority = 1
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_socket_owner_set_for_tcp(self):
text = lstrip("""\
[fcgi-program:foo]
socket=tcp://localhost:8000
socket_owner=nobody:nobody
command = /bin/foo
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_socket_mode_set_for_tcp(self):
text = lstrip("""\
[fcgi-program:foo]
socket = tcp://localhost:8000
socket_mode = 0777
command = /bin/foo
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_bad_socket_owner(self):
text = lstrip("""\
[fcgi-program:foo]
socket = unix:///tmp/foo.sock
socket_owner = sometotaljunkuserthatshouldnobethere
command = /bin/foo
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_fcgi_program_bad_socket_mode(self):
text = lstrip("""\
[fcgi-program:foo]
socket = unix:///tmp/foo.sock
socket_mode = junk
command = /bin/foo
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError,instance.process_groups_from_parser,config)
def test_heterogeneous_process_groups_from_parser(self):
text = lstrip("""\
[program:one]
command = /bin/cat
[program:two]
command = /bin/cat
[group:thegroup]
programs = one,two
priority = 5
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
gconfigs = instance.process_groups_from_parser(config)
self.assertEqual(len(gconfigs), 1)
gconfig = gconfigs[0]
self.assertEqual(gconfig.name, 'thegroup')
self.assertEqual(gconfig.priority, 5)
self.assertEqual(len(gconfig.process_configs), 2)
def test_mixed_process_groups_from_parser1(self):
text = lstrip("""\
[program:one]
command = /bin/cat
[program:two]
command = /bin/cat
[program:many]
process_name = %(program_name)s_%(process_num)s
command = /bin/cat
numprocs = 2
priority = 1
[group:thegroup]
programs = one,two
priority = 5
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
gconfigs = instance.process_groups_from_parser(config)
self.assertEqual(len(gconfigs), 2)
manyconfig = gconfigs[0]
self.assertEqual(manyconfig.name, 'many')
self.assertEqual(manyconfig.priority, 1)
self.assertEqual(len(manyconfig.process_configs), 2)
gconfig = gconfigs[1]
self.assertEqual(gconfig.name, 'thegroup')
self.assertEqual(gconfig.priority, 5)
self.assertEqual(len(gconfig.process_configs), 2)
def test_mixed_process_groups_from_parser2(self):
text = lstrip("""\
[program:one]
command = /bin/cat
[program:two]
command = /bin/cat
[program:many]
process_name = %(program_name)s_%(process_num)s
command = /bin/cat
numprocs = 2
priority = 1
[group:thegroup]
programs = one,two, many
priority = 5
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
gconfigs = instance.process_groups_from_parser(config)
self.assertEqual(len(gconfigs), 1)
gconfig = gconfigs[0]
self.assertEqual(gconfig.name, 'thegroup')
self.assertEqual(gconfig.priority, 5)
self.assertEqual(len(gconfig.process_configs), 4)
def test_unknown_program_in_heterogeneous_group(self):
text = lstrip("""\
[program:one]
command = /bin/cat
[group:foo]
programs = notthere
""")
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
self.assertRaises(ValueError, instance.process_groups_from_parser,
config)
def test_rpcinterfaces_from_parser(self):
text = lstrip("""\
[rpcinterface:dummy]
supervisor.rpcinterface_factory = %s
foo = bar
""" % __name__)
from supervisor.options import UnhosedConfigParser
config = UnhosedConfigParser()
config.read_string(text)
instance = self._makeOne()
factories = instance.get_plugins(config,
'supervisor.rpcinterface_factory',
'rpcinterface:')
self.assertEqual(len(factories), 1)
factory = factories[0]
self.assertEqual(factory[0], 'dummy')
self.assertEqual(factory[1], sys.modules[__name__])
self.assertEqual(factory[2], {'foo':'bar'})
def test_clear_autochildlogdir(self):
dn = tempfile.mkdtemp()
try:
instance = self._makeOne()
instance.childlogdir = dn
sid = 'supervisor'
instance.identifier = sid
logfn = instance.get_autochildlog_name('foo', sid,'stdout')
first = logfn + '.1'
second = logfn + '.2'
open(first, 'w')
open(second, 'w')
instance.clear_autochildlogdir()
self.failIf(os.path.exists(logfn))
self.failIf(os.path.exists(first))
self.failIf(os.path.exists(second))
finally:
shutil.rmtree(dn)
def test_clear_autochildlog_oserror(self):
instance = self._makeOne()
instance.childlogdir = '/tmp/this/cant/possibly/existjjjj'
instance.logger = DummyLogger()
instance.clear_autochildlogdir()
self.assertEqual(instance.logger.data, ['Could not clear childlog dir'])
def test_openhttpservers_reports_friendly_usage_when_eaddrinuse(self):
supervisord = DummySupervisor()
instance = self._makeOne()
def raise_eaddrinuse(supervisord):
raise socket.error(errno.EADDRINUSE)
instance.make_http_servers = raise_eaddrinuse
recorder = []
def record_usage(message):
recorder.append(message)
instance.usage = record_usage
instance.openhttpservers(supervisord)
self.assertEqual(len(recorder), 1)
expected = 'Another program is already listening'
self.assertTrue(recorder[0].startswith(expected))
def test_openhttpservers_reports_socket_error_with_errno(self):
supervisord = DummySupervisor()
instance = self._makeOne()
def make_http_servers(supervisord):
raise socket.error(errno.EPERM)
instance.make_http_servers = make_http_servers
recorder = []
def record_usage(message):
recorder.append(message)
instance.usage = record_usage
instance.openhttpservers(supervisord)
self.assertEqual(len(recorder), 1)
expected = ('Cannot open an HTTP server: socket.error '
'reported errno.EPERM (%d)' % errno.EPERM)
self.assertEqual(recorder[0], expected)
def test_openhttpservers_reports_other_socket_errors(self):
supervisord = DummySupervisor()
instance = self._makeOne()
def make_http_servers(supervisord):
raise socket.error('uh oh')
instance.make_http_servers = make_http_servers
recorder = []
def record_usage(message):
recorder.append(message)
instance.usage = record_usage
instance.openhttpservers(supervisord)
self.assertEqual(len(recorder), 1)
expected = ('Cannot open an HTTP server: socket.error '
'reported uh oh')
self.assertEqual(recorder[0], expected)
def test_openhttpservers_reports_value_errors(self):
supervisord = DummySupervisor()
instance = self._makeOne()
def make_http_servers(supervisord):
raise ValueError('not prefixed with help')
instance.make_http_servers = make_http_servers
recorder = []
def record_usage(message):
recorder.append(message)
instance.usage = record_usage
instance.openhttpservers(supervisord)
self.assertEqual(len(recorder), 1)
expected = 'not prefixed with help'
self.assertEqual(recorder[0], expected)
def test_openhttpservers_does_not_catch_other_exception_types(self):
supervisord = DummySupervisor()
instance = self._makeOne()
def make_http_servers(supervisord):
raise OverflowError
instance.make_http_servers = make_http_servers
# this scenario probably means a bug in supervisor. we dump
# all the gory details on the poor user for troubleshooting
self.assertRaises(OverflowError,
instance.openhttpservers, supervisord)
def test_dropPrivileges_user_none(self):
instance = self._makeOne()
msg = instance.dropPrivileges(None)
self.assertEqual(msg, "No user specified to setuid to!")
@patch('pwd.getpwuid', Mock(return_value=["foo", None, 12, 34]))
@patch('os.getuid', Mock(return_value=12))
def test_dropPrivileges_nonroot_same_user(self):
instance = self._makeOne()
msg = instance.dropPrivileges(os.getuid())
self.assertEqual(msg, None) # no error if same user
@patch('pwd.getpwuid', Mock(return_value=["foo", None, 55, 34]))
@patch('os.getuid', Mock(return_value=12))
def test_dropPrivileges_nonroot_different_user(self):
instance = self._makeOne()
msg = instance.dropPrivileges(42)
self.assertEqual(msg, "Can't drop privilege as nonroot user")
class TestProcessConfig(unittest.TestCase):
def _getTargetClass(self):
from supervisor.options import ProcessConfig
return ProcessConfig
def _makeOne(self, *arg, **kw):
defaults = {}
for name in ('name', 'command', 'directory', 'umask',
'priority', 'autostart', 'autorestart',
'startsecs', 'startretries', 'uid',
'stdout_logfile', 'stdout_capture_maxbytes',
'stdout_events_enabled',
'stdout_logfile_backups', 'stdout_logfile_maxbytes',
'stderr_logfile', 'stderr_capture_maxbytes',
'stderr_events_enabled',
'stderr_logfile_backups', 'stderr_logfile_maxbytes',
'stopsignal', 'stopwaitsecs', 'stopasgroup', 'killasgroup', 'exitcodes',
'redirect_stderr', 'environment'):
defaults[name] = name
defaults.update(kw)
return self._getTargetClass()(*arg, **defaults)
def test_create_autochildlogs(self):
options = DummyOptions()
instance = self._makeOne(options)
from supervisor.datatypes import Automatic
instance.stdout_logfile = Automatic
instance.stderr_logfile = Automatic
instance.create_autochildlogs()
self.assertEqual(instance.stdout_logfile, options.tempfile_name)
self.assertEqual(instance.stderr_logfile, options.tempfile_name)
def test_make_process(self):
options = DummyOptions()
instance = self._makeOne(options)
process = instance.make_process()
from supervisor.process import Subprocess
self.assertEqual(process.__class__, Subprocess)
self.assertEqual(process.group, None)
def test_make_process_with_group(self):
options = DummyOptions()
instance = self._makeOne(options)
process = instance.make_process('abc')
from supervisor.process import Subprocess
self.assertEqual(process.__class__, Subprocess)
self.assertEqual(process.group, 'abc')
def test_make_dispatchers_stderr_not_redirected(self):
options = DummyOptions()
instance = self._makeOne(options)
instance.redirect_stderr = False
process1 = DummyProcess(instance)
dispatchers, pipes = instance.make_dispatchers(process1)
self.assertEqual(dispatchers[5].channel, 'stdout')
from supervisor.events import ProcessCommunicationStdoutEvent
self.assertEqual(dispatchers[5].event_type,
ProcessCommunicationStdoutEvent)
self.assertEqual(pipes['stdout'], 5)
self.assertEqual(dispatchers[7].channel, 'stderr')
from supervisor.events import ProcessCommunicationStderrEvent
self.assertEqual(dispatchers[7].event_type,
ProcessCommunicationStderrEvent)
self.assertEqual(pipes['stderr'], 7)
def test_make_dispatchers_stderr_redirected(self):
options = DummyOptions()
instance = self._makeOne(options)
process1 = DummyProcess(instance)
dispatchers, pipes = instance.make_dispatchers(process1)
self.assertEqual(dispatchers[5].channel, 'stdout')
self.assertEqual(pipes['stdout'], 5)
self.assertEqual(pipes['stderr'], None)
class FastCGIProcessConfigTest(unittest.TestCase):
def _getTargetClass(self):
from supervisor.options import FastCGIProcessConfig
return FastCGIProcessConfig
def _makeOne(self, *arg, **kw):
defaults = {}
for name in ('name', 'command', 'directory', 'umask',
'priority', 'autostart', 'autorestart',
'startsecs', 'startretries', 'uid',
'stdout_logfile', 'stdout_capture_maxbytes',
'stdout_events_enabled',
'stdout_logfile_backups', 'stdout_logfile_maxbytes',
'stderr_logfile', 'stderr_capture_maxbytes',
'stderr_events_enabled',
'stderr_logfile_backups', 'stderr_logfile_maxbytes',
'stopsignal', 'stopwaitsecs', 'stopasgroup', 'killasgroup', 'exitcodes',
'redirect_stderr', 'environment'):
defaults[name] = name
defaults.update(kw)
return self._getTargetClass()(*arg, **defaults)
def test_make_process(self):
options = DummyOptions()
instance = self._makeOne(options)
self.assertRaises(NotImplementedError, instance.make_process)
def test_make_process_with_group(self):
options = DummyOptions()
instance = self._makeOne(options)
process = instance.make_process('abc')
from supervisor.process import FastCGISubprocess
self.assertEqual(process.__class__, FastCGISubprocess)
self.assertEqual(process.group, 'abc')
def test_make_dispatchers(self):
options = DummyOptions()
instance = self._makeOne(options)
instance.redirect_stderr = False
process1 = DummyProcess(instance)
dispatchers, pipes = instance.make_dispatchers(process1)
self.assertEqual(dispatchers[4].channel, 'stdin')
self.assertEqual(dispatchers[4].closed, True)
self.assertEqual(dispatchers[5].channel, 'stdout')
from supervisor.events import ProcessCommunicationStdoutEvent
self.assertEqual(dispatchers[5].event_type,
ProcessCommunicationStdoutEvent)
self.assertEqual(pipes['stdout'], 5)
self.assertEqual(dispatchers[7].channel, 'stderr')
from supervisor.events import ProcessCommunicationStderrEvent
self.assertEqual(dispatchers[7].event_type,
ProcessCommunicationStderrEvent)
self.assertEqual(pipes['stderr'], 7)
class ProcessGroupConfigTests(unittest.TestCase):
def _getTargetClass(self):
from supervisor.options import ProcessGroupConfig
return ProcessGroupConfig
def _makeOne(self, options, name, priority, pconfigs):
return self._getTargetClass()(options, name, priority, pconfigs)
def test_ctor(self):
options = DummyOptions()
instance = self._makeOne(options, 'whatever', 999, [])
self.assertEqual(instance.options, options)
self.assertEqual(instance.name, 'whatever')
self.assertEqual(instance.priority, 999)
self.assertEqual(instance.process_configs, [])
def test_after_setuid(self):
options = DummyOptions()
pconfigs = [DummyPConfig(options, 'process1', '/bin/process1')]
instance = self._makeOne(options, 'whatever', 999, pconfigs)
instance.after_setuid()
self.assertEqual(pconfigs[0].autochildlogs_created, True)
def test_make_group(self):
options = DummyOptions()
pconfigs = [DummyPConfig(options, 'process1', '/bin/process1')]
instance = self._makeOne(options, 'whatever', 999, pconfigs)
group = instance.make_group()
from supervisor.process import ProcessGroup
self.assertEqual(group.__class__, ProcessGroup)
class FastCGIGroupConfigTests(unittest.TestCase):
def _getTargetClass(self):
from supervisor.options import FastCGIGroupConfig
return FastCGIGroupConfig
def _makeOne(self, *args, **kw):
return self._getTargetClass()(*args, **kw)
def test_ctor(self):
options = DummyOptions()
sock_config = DummySocketConfig(6)
instance = self._makeOne(options, 'whatever', 999, [], sock_config)
self.assertEqual(instance.options, options)
self.assertEqual(instance.name, 'whatever')
self.assertEqual(instance.priority, 999)
self.assertEqual(instance.process_configs, [])
self.assertEqual(instance.socket_config, sock_config)
def test_same_sockets_are_equal(self):
options = DummyOptions()
sock_config1 = DummySocketConfig(6)
instance1 = self._makeOne(options, 'whatever', 999, [], sock_config1)
sock_config2 = DummySocketConfig(6)
instance2 = self._makeOne(options, 'whatever', 999, [], sock_config2)
self.assertTrue(instance1 == instance2)
self.assertFalse(instance1 != instance2)
def test_diff_sockets_are_not_equal(self):
options = DummyOptions()
sock_config1 = DummySocketConfig(6)
instance1 = self._makeOne(options, 'whatever', 999, [], sock_config1)
sock_config2 = DummySocketConfig(7)
instance2 = self._makeOne(options, 'whatever', 999, [], sock_config2)
self.assertTrue(instance1 != instance2)
self.assertFalse(instance1 == instance2)
class SignalReceiverTests(unittest.TestCase):
def test_returns_None_initially(self):
from supervisor.options import SignalReceiver
sr = SignalReceiver()
self.assertEquals(sr.get_signal(), None)
def test_returns_signals_in_order_received(self):
from supervisor.options import SignalReceiver
sr = SignalReceiver()
sr.receive(signal.SIGTERM, 'frame')
sr.receive(signal.SIGCHLD, 'frame')
self.assertEquals(sr.get_signal(), signal.SIGTERM)
self.assertEquals(sr.get_signal(), signal.SIGCHLD)
self.assertEquals(sr.get_signal(), None)
def test_does_not_queue_duplicate_signals(self):
from supervisor.options import SignalReceiver
sr = SignalReceiver()
sr.receive(signal.SIGTERM, 'frame')
sr.receive(signal.SIGTERM, 'frame')
self.assertEquals(sr.get_signal(), signal.SIGTERM)
self.assertEquals(sr.get_signal(), None)
def test_queues_again_after_being_emptied(self):
from supervisor.options import SignalReceiver
sr = SignalReceiver()
sr.receive(signal.SIGTERM, 'frame')
self.assertEquals(sr.get_signal(), signal.SIGTERM)
self.assertEquals(sr.get_signal(), None)
sr.receive(signal.SIGCHLD, 'frame')
self.assertEquals(sr.get_signal(), signal.SIGCHLD)
self.assertEquals(sr.get_signal(), None)
class UtilFunctionsTests(unittest.TestCase):
def test_make_namespec(self):
from supervisor.options import make_namespec
self.assertEquals(make_namespec('group', 'process'), 'group:process')
self.assertEquals(make_namespec('process', 'process'), 'process')
def test_split_namespec(self):
from supervisor.options import split_namespec
s = split_namespec
self.assertEquals(s('process:group'), ('process', 'group'))
self.assertEquals(s('process'), ('process', 'process'))
self.assertEquals(s('group:'), ('group', None))
self.assertEquals(s('group:*'), ('group', None))
def test_suite():
return unittest.findTestCases(sys.modules[__name__])
if __name__ == '__main__':
unittest.main(defaultTest='test_suite')
``` |
{
"source": "Jimdo/thumbor",
"score": 2
} |
#### File: thumbor/vows/base_handler_vows.py
```python
from pyvows import Vows, expect
from thumbor.handlers import BaseHandler
from thumbor.context import Context
from thumbor.config import Config
from thumbor.app import ThumborServiceApp
from mock import MagicMock
@Vows.batch
class BaseHandlerVows(Vows.Context):
class ShouldStoreHeaderOnContext(Vows.Context):
def topic(self):
ctx = Context(None, Config(), None)
application = ThumborServiceApp(ctx)
handler = BaseHandler(application, MagicMock())
handler._transforms = []
return handler
def should_be_ThumborServiceApp(self, topic):
mocked_context = MagicMock(**{'config.MAX_AGE_TEMP_IMAGE': 30})
topic._write_results_to_client(mocked_context, '', 'image/jpeg')
expect(mocked_context.headers).to_include('Expires')
expect(mocked_context.headers).to_include('Server')
expect(mocked_context.headers).to_include('Cache-Control')
expect(mocked_context.headers['Content-Type']).to_equal('image/jpeg')
``` |
{
"source": "jimdowling/airflow-chef",
"score": 2
} |
#### File: hopsworks_plugin/operators/hopsworks_operator.py
```python
import os
import time
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
from airflow.exceptions import AirflowException
from hopsworks_plugin.hooks.hopsworks_hook import HopsworksHook
class HopsworksAbstractOperator(BaseOperator):
"""
Abstract Hopsworks operator for some common functionalities across all operators
:param hopsworks_conn_id: HTTP connection identifier for Hopsworks
:type hopsworks_conn_id: str
:param project_id: Hopsworks Project ID this job is associated with. Either this or project_name.
:type project_id: int
:param project_name: Hopsworks Project name this job is associated with. Either this or project_id.
:type project_name: str
"""
def __init__(
self,
hopsworks_conn_id = 'hopsworks_default',
project_id = None,
project_name = None,
*args,
**kwargs):
super(HopsworksAbstractOperator, self).__init__(*args, **kwargs)
self.hopsworks_conn_id = hopsworks_conn_id
self.project_id = project_id
self.project_name = project_name
if 'hw_api_key' in self.params:
self.hw_api_key = self.params['hw_api_key']
else:
self.hw_api_key = None
def _get_hook(self):
return HopsworksHook(self.hopsworks_conn_id, self.project_id, self.project_name, self.owner, self.hw_api_key)
class HopsworksFeatureValidationResult(HopsworksAbstractOperator):
"""
Operator to fetch data validation result of a Feature Group.
Data Validation job is launched externally either manually or
by HopsworksLaunchOperator. By default the task will fail if the
validation result is not Success.
:param hopsworks_conn_id: HTTP connection identifier for Hopsworks
:type hopsworks_conn_id: str
:param project_id: Hopsworks Project ID this job is associated with. Either this or project_name.
:type project_id: int
:param project_name: Hopsworks Project name this job is associated with. Either this or project_id.
:type project_name: str
:param feature_store_name: Optional name of the Feature Store the feature group belongs to
:type feature_store_name: str
:param feature_group_name: Name of the Feature Group the validation has performed
:type feature_group_name: str
:param ignore_result: Control whether a failed validation should not fail the task, default is False (aka failed validation will fail the task)
:type ignore_result: boolean
"""
FEATURE_STORE_SUFFIX = "_featurestore"
SUCCESS_STATUS = "Success"
@apply_defaults
def __init__(
self,
hopsworks_conn_id = 'hopsworks_default',
project_id = None,
project_name = None,
feature_store_name = None,
feature_group_name = None,
ignore_result = False,
*args,
**kwargs):
super(HopsworksFeatureValidationResult, self).__init__(hopsworks_conn_id,
project_id,
project_name,
*args,
**kwargs)
if feature_store_name:
self.feature_store_name = feature_store_name
else:
self.feature_store_name = project_name.lower() + HopsworksFeatureValidationResult.FEATURE_STORE_SUFFIX
self.feature_group_name = feature_group_name
self.ignore_result = ignore_result
def execute(self, context):
hook = self._get_hook()
feature_store_id = hook.get_feature_store_id_by_name(self.feature_store_name)
feature_group_id = hook.get_feature_group_id_by_name(feature_store_id, self.feature_group_name)
validation_result = hook.get_feature_group_validation_result(feature_store_id, feature_group_id)
if not 'status' in validation_result:
raise AirflowException("Data validation result for {0}/{1} does NOT have status".format(self.feature_store_name, self.feature_group_name))
validation_status = validation_result['status']
if not self.ignore_result and HopsworksFeatureValidationResult.SUCCESS_STATUS != validation_status:
raise AirflowException("Feature validation has failed with status: {0} for {0}/{1}"
.format(validation_status, self.feature_store_name, self.feature_group_name))
class HopsworksLaunchOperator(HopsworksAbstractOperator):
"""
Basic operator to launch jobs on Hadoop through Hopsworks
Jobs should have already been created in Hopsworks
:param hopsworks_conn_id: HTTP connection identifier for Hopsworks
:type hopsworks_conn_id: str
:param project_id: Hopsworks Project ID this job is associated with. Either this or project_name.
:type project_id: int
:param project_name: Hopsworks Project name this job is associated with. Either this or project_id.
:type project_name: str
:param job_name: Name of the job in Hopsworks
:type job_name: str
:param wait_for_completion: Operator will wait until the job finishes
:type wait_for_completion: boolean
:param poke_interval_s: Interval in seconds to poke for job status
:type poke_interval_s: int
:param wait_timeout_s: Throw an exception if timeout has reached and job hasn't finished yet
:type wait_timeout_s: int
:param wait_for_state: Set of final states to wait for {'FINISHED', 'FAILED', 'KILLED', 'FRAMEWORK_FAILURE',
'APP_MASTER_START_FAILED', 'INITIALIZATION_FAILED'}
:type wait_for_state: set
:param ignore_failure: Do not fail the task if Job has failed
:type ignore_failure: boolean
"""
SUCCESS_APP_STATE = {'FINISHED'}
FAILED_APP_STATE = {'FAILED', 'KILLED', 'FRAMEWORK_FAILURE', 'APP_MASTER_START_FAILED', 'INITIALIZATION_FAILED'}
FINAL_APP_STATE = SUCCESS_APP_STATE.union(FAILED_APP_STATE)
FAILED_AM_STATUS = {'FAILED', 'KILLED'}
@apply_defaults
def __init__(
self,
hopsworks_conn_id = 'hopsworks_default',
job_name = None,
project_id = None,
project_name = None,
wait_for_completion = True,
poke_interval_s = 1,
wait_timeout_s = -1,
wait_for_state = FINAL_APP_STATE,
ignore_failure = False,
job_arguments = None,
*args,
**kwargs):
super(HopsworksLaunchOperator, self).__init__(hopsworks_conn_id,
project_id,
project_name,
*args,
**kwargs)
self.job_name = job_name
self.wait_for_completion = wait_for_completion
self.poke_interval_s = poke_interval_s if poke_interval_s > 0 else 1
self.wait_timeout_s = wait_timeout_s
self.wait_for_state = wait_for_state
self.ignore_failure = ignore_failure
self.job_arguments = job_arguments
def execute(self, context):
hook = self._get_hook()
self.log.debug("Launching job %s", self.job_name)
hook.launch_job(self.job_name, self.job_arguments)
if self.wait_for_completion:
self.log.debug("Waiting for job completion")
time.sleep(5)
wait_timeout = self.wait_timeout_s
while True:
time.sleep(self.poke_interval_s)
app_state, am_status = hook.get_job_state(self.job_name)
if not self.ignore_failure and self._has_failed(app_state, am_status):
raise AirflowException(("Task failed because Job {0} failed with application state {1} " +
"and application master status {2}")
.format(self.job_name, app_state, am_status))
if self._has_finished(app_state):
self.log.debug("Job %s finished", self.job_name)
return
self.log.debug("Job %s has not finished yet, waiting...", self.job_name)
if self.wait_timeout_s > -1:
wait_timeout -= self.poke_interval_s
if wait_timeout < 0:
raise AirflowException("Timeout has been reached while waiting for job {0} to finish"
.format(self.job_name))
def _has_finished(self, app_state):
self.log.debug("Job state is %s", app_state)
return app_state.upper() in self.wait_for_state
def _has_failed(self, app_state, am_status):
return app_state.upper() in HopsworksLaunchOperator.FAILED_APP_STATE or \
(app_state.upper() in HopsworksLaunchOperator.SUCCESS_APP_STATE and \
am_status.upper() in HopsworksLaunchOperator.FAILED_AM_STATUS)
class HopsworksModelServingInstance(HopsworksAbstractOperator):
"""
Hopsworks operator to administer model serving instances in Hopsworks.
You can create a new model serving instance, update an existing one or
stop an instance.
:param hopsworks_conn_id: HTTP connection identifier for Hopsworks
:type hopsworks_conn_id: str
:param project_id: Hopsworks Project ID this job is associated with. Either this or project_id.
:type project_id: int
:param project_name: Hopsworks Project name this job is associated with. Either this or project_name.
:type project_name: str
:param model_name: Name of the model to be served
:type model_name: str
:param artifact_path: Path in HDFS where the model is saved
:type artifact_path: str
:param model_version: Model version to serve, defaults to 1
:type model_version: int
:param action: Action to perform after creating or updating an instance
Available actions are START and STOP
:type action: str
:param batching_enabled: Enable batch for model serving
:type batching_enabled: boolean
:param serving_instances: Relevant only when Kubernetes is deployed. Number of
serving instances to be created for serving the model.
:param kafka_topic_name: Kafka topic name to publish serving requests. Possible values are:
NONE: Do not create a Kafka topic. Serving requests will not be published
CREATE: Create a new unique Kafka topic
KAFKA_TOPIC_NAME: Name of an existing Kafka topic
:type kafka_topic_name: str
:param kafka_num_partitions: Number of partitions when creating a new Kafka topic. Cannot be updated.
:type kafka_num_partitions: int
:param kafka_num_replicas: Number of replicas when creating a new kafka topic. Cannot be updated.
:type kafka_num_replicas: int
"""
serving_actions = ["START", "STOP"]
@apply_defaults
def __init__(
self,
hopsworks_conn_id = 'hopsworks_default',
project_id = None,
project_name = None,
model_name = None,
artifact_path = None,
model_version = 1,
action = "START",
serving_type = "TENSORFLOW",
batching_enabled = False,
serving_instances = 1,
kafka_topic_name = None,
kafka_num_partitions = 1,
kafka_num_replicas = 1,
*args,
**kwargs):
super(HopsworksModelServingInstance, self).__init__(hopsworks_conn_id,
project_id,
project_name,
*args,
**kwargs)
self.model_name = model_name
self.artifact_path = artifact_path
self.model_version = model_version
self.action = action
self.serving_type = serving_type.upper()
self.batching_enabled = batching_enabled
self.serving_instances = serving_instances
self.kafka_topic_name = kafka_topic_name
self.kafka_num_partitions = kafka_num_partitions
self.kafka_num_replicas = kafka_num_replicas
def execute(self, context):
if not self.action.upper() in HopsworksModelServingInstance.serving_actions:
raise AirflowException("Unknown model serving action {0} Valid actions are: START, STOP"
.format(self.action))
if self.model_name is None:
raise AirflowException("Model name cannot be empty")
hook = self._get_hook()
serving_instance = self._find_model_serving_instance_by_model_name(hook, self.model_name)
if self.action.upper() == "START":
self._start_model_serving(hook, serving_instance)
elif self.action.upper() == "STOP":
self._stop_model_serving(hook, serving_instance)
def _start_model_serving(self, hook, serving_instance):
serving_params = {}
kafka_topic_params = {}
if serving_instance:
# If serving instance with the same name exists,
# update it instead of creating a new one
serving_params['id'] = serving_instance['id']
if self.kafka_topic_name:
# If user provided a new Kafka topic name, update it
kafka_topic_params['name'] = self.kafka_topic_name
kafka_topic_params['numOfPartitions'] = self.kafka_num_partitions
kafka_topic_params['numOfReplicas'] = self.kafka_num_replicas
else:
# Otherwise use the previous if it had any
stored_kafka_params = serving_instance.get('kafkaTopicDTO', None)
if stored_kafka_params:
kafka_topic_params['name'] = stored_kafka_params['name']
else:
kafka_topic_params['name'] = self.kafka_topic_name if self.kafka_topic_name else "NONE"
kafka_topic_params['numOfPartitions'] = self.kafka_num_partitions
kafka_topic_params['numOfReplicas'] = self.kafka_num_replicas
serving_params['kafkaTopicDTO'] = kafka_topic_params
serving_params['batchingEnabled'] = self.batching_enabled
serving_params['name'] = self.model_name
serving_params['artifactPath'] = self.artifact_path
serving_params['modelVersion'] = self.model_version
serving_params['requestedInstances'] = self.serving_instances
serving_params['servingType'] = self.serving_type
self.log.debug("Create model serving parameters: %s", serving_params)
hook.create_update_serving_instance(serving_params)
# If instance does not exist, start it
# If instance exists, it is an update and Hopsworks
# will handle restarting the serving instance
if not serving_instance:
# Get all model serving instances to get the ID of the newly created instance
serving_instance = self._find_model_serving_instance_by_model_name(hook, self.model_name)
self.log.debug("Starting model serving instance %s", self.model_name)
hook.start_model_serving_instance(serving_instance['id'])
def _stop_model_serving(self, hook, serving_instance):
if not serving_instance:
raise AirflowException("Trying to stop model serving instance, but instance does not exist!")
self.log.debug("Stopping model serving instance %s", serving_instance['modelName'])
hook.stop_model_serving_instance(serving_instance['id'])
def _find_model_serving_instance_by_model_name(self, hook, model_name):
serving_instances = hook.get_model_serving_instances()
for si in serving_instances:
if model_name == si['name']:
return si
return None
``` |
{
"source": "jimdowling/Names-To-Nationality-Predicter",
"score": 3
} |
#### File: ML Model/src/logs-visualizer.py
```python
import matplotlib.pyplot as plt
file_name = "results/Testing.csv"
def main():
epoches = []
test_avg_errors = []
test_accuracies = []
train_avg_errors = []
train_accuracies = []
with open(file_name, 'r') as file_reader:
line = file_reader.readline() # Skip the csv header
line = file_reader.readline()
while line:
tokenized_line = line.split(',')
epoche = int(tokenized_line[0])
test_avg_error = float(tokenized_line[1])
test_accuracy = float(tokenized_line[2])
train_avg_error = float(tokenized_line[3])
train_accuracy = float(tokenized_line[4])
epoches.append(epoche)
test_avg_errors.append(test_avg_error)
test_accuracies.append(test_accuracy)
train_avg_errors.append(train_avg_error)
train_accuracies.append(train_accuracy)
line = file_reader.readline()
# Plot the test_avg_error vs epoche
'''
cross_entropies_plt.title.set_text('Cross entropies vs Epoche')
cross_entropies_plt.plot(iterations, cross_entropies_train, label="Cross Entropies Train")
cross_entropies_plt.plot(iterations, cross_entropies_valid, label="Cross Entropies Valid")
cross_entropies_plt.legend()
'''
fig, (errors_plt, accuracy_plt) = plt.subplots(2)
errors_plt.title.set_text('Errors vs Epoche')
errors_plt.plot(epoches, train_avg_errors, label='Test Avg. Error')
errors_plt.plot(epoches, test_avg_errors, label='Test Avg. Error')
errors_plt.legend()
accuracy_plt.title.set_text('Accuracy vs Epoche')
accuracy_plt.plot(epoches, train_accuracies, label='Train Accuracy')
accuracy_plt.plot(epoches, test_accuracies, label='Test Accuracy')
accuracy_plt.legend()
plt.show()
main()
```
#### File: ML Model/src/ml_utils.py
```python
import numpy as np
'''
This contains useful activation functions
'''
class ActivationFunctions:
@staticmethod
def sigmoid(x):
return 1 / (1 + np.exp(-x))
@staticmethod
def sigmoid_derivative_given_sigmoid_val(sigmoid_value):
return sigmoid_value * (1 - sigmoid_value)
@staticmethod
def tanh(x):
return np.tanh(x)
@staticmethod
def tanh_derivative_given_tanh_val(tanh_value):
return 1.0 - (tanh_value ** 2)
@staticmethod
def softmax(x):
e_x = np.exp(x - np.max(x))
return e_x / np.sum(e_x, axis=0)
@staticmethod
def softmax_derivative(val):
softmax_val = ActivationFunctions.softmax(val)
reshaped_softmax_val = softmax_val.reshape(-1,1)
return np.diagflat(reshaped_softmax_val) - np.dot(reshaped_softmax_val, reshaped_softmax_val.T)
'''
This contains useful loss functions
'''
class LossFunctions:
@staticmethod
def cross_entropy(hypothesis, expected_result, epsilon=1e-12):
return -np.sum(np.multiply(expected_result, np.log(hypothesis + epsilon)))
```
#### File: ML Model/src/ml_utils_test.py
```python
import unittest
import math
import numpy as np
from ml_utils import ActivationFunctions, LossFunctions
class MLUtilsTest(unittest.TestCase):
def test_tanh_should_return_correct_value_when_given_single_negative_number(self):
expected_value = math.tanh(-9)
actual_value = ActivationFunctions.tanh(-9)
self.assertTrue(abs(actual_value - expected_value) < 0.0000000001)
def test_tanh_should_return_correct_values_when_given_negative_numbers_in_array(self):
expected_values = [math.tanh(i) for i in range(-100, -1)]
actual_values = ActivationFunctions.tanh(np.array([i for i in range(-100, -1)]))
self.assertEquals(len(expected_values), len(actual_values))
for i in range(0, len(expected_values)):
self.assertTrue(abs(actual_values[i] - expected_values[i]) < 0.0000000001)
def test_tanh_should_return_correct_values_when_given_negative_numbers_in_2D_array(self):
expected_values = [ [math.tanh(i) for i in range(-100, -1)] for j in range(0, 10) ]
actual_values = ActivationFunctions.tanh(np.array([ [i for i in range(-100, -1)] for j in range(0, 10)]))
self.assertEquals(len(expected_values), len(actual_values))
for i in range(0, 10):
self.assertEquals(len(expected_values[i]), len(actual_values[i]))
for j in range(0, len(expected_values[i])):
self.assertTrue(abs(actual_values[i][j] - expected_values[i][j]) < 0.0000000001)
def test_tanh_should_return_correct_value_when_given_zero(self):
expected_value = math.tanh(0)
actual_value = ActivationFunctions.tanh(0)
self.assertTrue(abs(actual_value - expected_value) < 0.0000000001)
def test_tanh_should_return_correct_values_when_given_zeros_in_array(self):
expected_values = [math.tanh(0) for i in range(-100, -1)]
actual_values = ActivationFunctions.tanh(np.array([0 for i in range(-100, -1)]))
self.assertEquals(len(expected_values), len(actual_values))
for i in range(0, len(expected_values)):
self.assertTrue(abs(actual_values[i] - expected_values[i]) < 0.0000000001)
def test_tanh_should_return_correct_values_when_given_zeros_in_2D_array(self):
expected_values = [ [math.tanh(0) for i in range(-100, -1)] for j in range(0, 10) ]
actual_values = ActivationFunctions.tanh(np.array([ [0 for i in range(-100, -1)] for j in range(0, 10)]))
self.assertEquals(len(expected_values), len(actual_values))
for i in range(0, 10):
self.assertEquals(len(expected_values[i]), len(actual_values[i]))
for j in range(0, len(expected_values[i])):
self.assertTrue(abs(actual_values[i][j] - expected_values[i][j]) < 0.0000000001)
def test_tanh_should_return_correct_value_when_given_single_positive_number(self):
expected_value = math.tanh(9)
actual_value = ActivationFunctions.tanh(9)
self.assertTrue(abs(actual_value - expected_value) < 0.0000000001)
def test_tanh_should_return_correct_values_when_given_positive_numbers_in_array(self):
expected_values = [math.tanh(i) for i in range(1, 100)]
actual_values = ActivationFunctions.tanh(np.array([i for i in range(1, 100)]))
self.assertEquals(len(expected_values), len(actual_values))
for i in range(0, len(expected_values)):
self.assertTrue(abs(actual_values[i] - expected_values[i]) < 0.0000000001)
def test_tanh_should_return_correct_values_when_given_positive_numbers_in_2D_array(self):
expected_values = [ [math.tanh(i) for i in range(1, 200)] for j in range(0, 10) ]
actual_values = ActivationFunctions.tanh(np.array([ [i for i in range(1, 200)] for j in range(0, 10)]))
self.assertEquals(len(expected_values), len(actual_values))
for i in range(0, 10):
self.assertEquals(len(expected_values[i]), len(actual_values[i]))
for j in range(0, len(expected_values[i])):
self.assertTrue(abs(actual_values[i][j] - expected_values[i][j]) < 0.0000000001)
def test_softmax(self):
input_values = [2.0, 1.0, 0.1]
expected_values = np.array([0.7, 0.2, 0.1])
actual_values = ActivationFunctions.softmax(input_values)
for i in range(3):
self.assertTrue(abs(actual_values[i] - expected_values[i]) < 0.1)
if __name__ == '__main__':
unittest.main()
```
#### File: ML Model/src/names_to_nationality_classifier.py
```python
import copy
import numpy as np
import random
from sklearn.utils import shuffle
from ml_utils import ActivationFunctions, LossFunctions
import time
from serializer import Serializer
class NamesToNationalityClassifier:
def __init__(self, possible_labels, alpha=0.0001, hidden_dimensions=500, l2_lambda = 0.02, momentum=0.9, num_epoche=30):
self.serializer = Serializer(possible_labels)
self.alpha = alpha
self.input_dimensions = self.serializer.input_dimensions
self.hidden_dimensions = hidden_dimensions
self.output_dimensions = self.serializer.target_dimensions
self.training_to_validation_ratio = 0.7 # This means 70% of the dataset will be used for training, and 30% is for validation
# Weight Initialization
# We are using the Xavier initialization
# Reference: https://medium.com/usf-msds/deep-learning-best-practices-1-weight-initialization-14e5c0295b94
self.weight_init_type = 'X1'
self.W0 = np.random.randn(self.hidden_dimensions, self.hidden_dimensions) * np.sqrt(1 / self.hidden_dimensions)
self.W1 = np.random.randn(self.hidden_dimensions, self.input_dimensions + 1) * np.sqrt(1 / (self.input_dimensions + 1))
self.W2 = np.random.randn(self.output_dimensions, self.hidden_dimensions + 1) * np.sqrt(1 / (self.hidden_dimensions + 1))
# Momentum and regularization
self.l2_lambda = l2_lambda # The lambda for L2 regularization
self.momentum = momentum
self.W0_velocity = np.zeros((self.hidden_dimensions, self.hidden_dimensions))
self.W1_velocity = np.zeros((self.hidden_dimensions, self.input_dimensions + 1))
self.W2_velocity = np.zeros((self.output_dimensions, self.hidden_dimensions + 1))
# Bias values
self.layer_1_bias = 1
self.layer_2_bias = 1
# Num epoche
self.num_epoche = num_epoche
self.serialized_training_examples = []
self.serialized_training_labels = []
self.serialized_testing_examples = []
self.serialized_testing_labels = []
def add_training_examples(self, examples, labels):
serialized_examples, serialized_labels = self.serializer.serialize_examples_and_labels(examples, labels) #self.__serialize_examples_and_labels__(examples, labels)
num_training_data = int(len(serialized_examples) * self.training_to_validation_ratio)
self.serialized_training_examples = serialized_examples[:num_training_data]
self.serialized_training_labels = serialized_labels[:num_training_data]
self.serialized_testing_examples = serialized_examples[num_training_data:]
self.serialized_testing_labels = serialized_labels[num_training_data:]
'''
Trains the model based on the training data provided.
It will output a dictionary with the following keys:
{
'epoche_to_train_avg_error': the train avg error per epoche,
'epoche_to_test_avg_error': the test avg error per epoche,
'epoche_to_train_accuracy': the train accuracy per epoche,
'epoche_to_test_accuracy': the test accuracy per epoche
}
'''
def train(self):
print("Training...")
print(self)
epoche_to_train_avg_error = np.zeros((self.num_epoche, ))
epoche_to_test_avg_error = np.zeros((self.num_epoche, ))
epoche_to_train_accuracy = np.zeros((self.num_epoche, ))
epoche_to_test_accuracy = np.zeros((self.num_epoche, ))
for epoche in range(self.num_epoche):
train_avg_error = 0
train_accuracy = 0
# Reshuffle the data
self.serialized_training_examples, self.serialized_training_labels = shuffle(
self.serialized_training_examples, self.serialized_training_labels)
for i in range(len(self.serialized_training_examples)):
# It is a "num_char" x "self.input_dimensions" matrix
example = self.serialized_training_examples[i]
# It is a 1D array with "self.output_dimensions" elements
label = self.serialized_training_labels[i]
# Perform forward propagation
forward_propagation_results = self.__perform_forward_propagation__(example, label)
letter_pos_to_hypothesis = forward_propagation_results['letter_pos_to_hypothesis']
letter_pos_to_loss = forward_propagation_results['letter_pos_to_loss']
# Calculate the train avg error and the train accuracy
train_avg_error += np.sum(letter_pos_to_loss)
train_accuracy += 1 if self.__is_hypothesis_correct__(letter_pos_to_hypothesis[-1], label) else 0
# Perform back propagation
self.__perform_back_propagation__(example, label, forward_propagation_results)
epoche_to_train_avg_error[epoche] = train_avg_error / len(self.serialized_training_examples)
epoche_to_train_accuracy[epoche] = train_accuracy / len(self.serialized_training_examples)
test_avg_error, test_accuracy, test_runnable_ratio = self.__validate__()
epoche_to_test_accuracy[epoche] = test_accuracy
epoche_to_test_avg_error[epoche] = test_avg_error
print(epoche, epoche_to_train_avg_error[epoche], epoche_to_test_avg_error[epoche], epoche_to_train_accuracy[epoche], epoche_to_test_accuracy[epoche], test_runnable_ratio, time.time())
return {
'epoche_to_train_avg_error': epoche_to_train_avg_error,
'epoche_to_test_avg_error': epoche_to_test_avg_error,
'epoche_to_train_accuracy': epoche_to_train_accuracy,
'epoche_to_test_accuracy': epoche_to_test_accuracy
}
'''
Trains an example with a label.
The example is a name (like "<NAME>") and its label is a country name (ex: "Canada")
'''
def train_example(self, example, label):
serialized_example = self.serializer.serialize_example(example)
serialized_label = self.serializer.serialize_label(label)
# Perform forward propagation
forward_propagation_results = self.__perform_forward_propagation__(serialized_example, serialized_label)
# Perform back propagation
self.__perform_back_propagation__(serialized_example, serialized_label, forward_propagation_results)
'''
It computes how well the model runs based on the validation data
It returns the avg. error and accuracy rate
'''
def __validate__(self):
total_cost = 0
num_correct = 0
num_examples_ran = 0
for i in range(len(self.serialized_testing_examples)):
# It is a num_char x 27 matrix
example = self.serialized_testing_examples[i]
# It is a 1D 124 element array
label = self.serialized_testing_labels[i]
forward_propagation_results = self.__perform_forward_propagation__(example, label)
letter_pos_to_loss = forward_propagation_results['letter_pos_to_loss']
letter_pos_to_hypothesis = forward_propagation_results['letter_pos_to_hypothesis']
if len(letter_pos_to_hypothesis) > 0:
final_hypothesis = letter_pos_to_hypothesis[-1]
# Seeing whether the hypothesis is correct
if self.__is_hypothesis_correct__(final_hypothesis, label):
num_correct += 1
total_cost += np.sum(letter_pos_to_loss)
num_examples_ran += 1
avg_cost = total_cost / num_examples_ran
accuracy = num_correct / num_examples_ran
runnable_examples_ratio = num_examples_ran / len(self.serialized_testing_examples)
return avg_cost, accuracy, runnable_examples_ratio
def __is_hypothesis_correct__(self, hypothesis, label):
return np.argmax(hypothesis, axis=0) == np.argmax(label, axis=0)
'''
This function will perform a forward propagation with the serialized version of the example
and the serialized version of the label.
The serialized_example needs to be a 2D matrix with size num_char x self.input_dimensions.
The serialized_label needs to be a 1D array with size self.output_dimentions.
So this function will return:
- the loss at each timestep (called 'letter_pos_to_loss')
- the hidden states at each timestep (called 'letter_pos_to_hidden_state')
- the layer 2 values at each timestep (called 'letter_pos_to_layer_2_values')
- the hypothesis at each timestep (called 'letter_pos_to_hypothesis')
-
'''
def __perform_forward_propagation__(self, serialized_example, serialized_label):
num_chars = len(serialized_example)
# Stores the hidden state for each letter position.
letter_pos_to_h0 = np.zeros((num_chars + 1, self.hidden_dimensions))
# Stores the layer 2 values for each letter position
letter_pos_to_h1 = np.zeros((num_chars, self.hidden_dimensions))
# Stores the hypothesis for each letter position
letter_pos_to_h2 = np.zeros((num_chars, self.output_dimensions))
# The hidden state for the first letter position is all 0s.
letter_pos_to_h0[0] = np.zeros(self.hidden_dimensions)
# The loss for each letter position
letter_pos_to_loss = np.zeros((num_chars, ))
for j in range(num_chars):
# The inputs
X = serialized_example[j]
X_with_bias = np.r_[[self.layer_1_bias], X] # <- We add a bias to the input. It is now a 28 element array
h0 = letter_pos_to_h0[j]
y1 = np.dot(self.W1, X_with_bias) + np.dot(self.W0, h0)
h1 = ActivationFunctions.tanh(y1)
# Adding the bias
h1_with_bias = np.r_[[self.layer_2_bias], h1]
y2 = np.dot(self.W2, h1_with_bias)
h2 = ActivationFunctions.softmax(y2)
# Update the dictionaries
letter_pos_to_h1[j] = h1
letter_pos_to_h2[j] = h2
letter_pos_to_h0[j + 1] = h1
letter_pos_to_loss[j] = LossFunctions.cross_entropy(h2, serialized_label)
return {
'letter_pos_to_loss': letter_pos_to_loss,
'letter_pos_to_hidden_state': letter_pos_to_h0,
'letter_pos_to_layer_2_values': letter_pos_to_h1,
'letter_pos_to_hypothesis': letter_pos_to_h2
}
'''
Performs back propagation.
Note that it requires the results from self.__perform_forward_propagation__() on the same example
Note that the example needs to be a serialized example, and the label needs to be a serialized label
'''
def __perform_back_propagation__(self, serialized_example, serialized_label, forward_propagation_results):
letter_pos_to_h0 = forward_propagation_results['letter_pos_to_hidden_state']
letter_pos_to_h1 = forward_propagation_results['letter_pos_to_layer_2_values']
letter_pos_to_h2 = forward_propagation_results['letter_pos_to_hypothesis']
letter_pos_to_loss = forward_propagation_results['letter_pos_to_loss']
# The loss gradients w.r.t W0, W1, W2
dL_dW0 = np.zeros((self.hidden_dimensions, self.hidden_dimensions))
dL_dW1 = np.zeros((self.hidden_dimensions, self.input_dimensions + 1))
dL_dW2 = np.zeros((self.output_dimensions, self.hidden_dimensions + 1))
num_chars = len(serialized_example)
for j in range(num_chars - 1, -1, -1):
X = serialized_example[j]
X_with_bias = np.r_[[self.layer_1_bias], X]
# This is a 1D array with "self.hidden_dimensions" elements
h0 = letter_pos_to_h0[j]
# This is a 1D array with "self.hidden_dimensions" elements
h1 = letter_pos_to_h1[j]
# Adding the bias
# This is a 1D array with "self.hidden_dimensions + 1" elements
h1_with_bias = np.r_[[self.layer_2_bias], h1]
# This is a 1D array with "self.output_dimensions" elements
h2 = letter_pos_to_h2[j]
# This is a 1D array with "self.output_dimentions" elements
# This is the derivative of y with respect to the cross entropy score
dL_dY2 = h2 - serialized_label
# This is a 1D array with "self.hidden_dimensions + 1" elements
dL_dH1 = np.dot(dL_dY2.T, self.W2)
dL_dY1 = np.multiply(dL_dH1, ActivationFunctions.tanh_derivative_given_tanh_val(h1_with_bias))
# We are removing the bias value
# So now it is a "self.hidden_dimensions" elements
dL_dY1 = dL_dY1[1:]
# We are not updating the weights of the bias value, so we are setting the changes for the bias weights to 0
# We are going to update the weights of the bias value later
dL_dW0 += np.dot(np.array([dL_dY1]).T, np.array([h0]))
dL_dW1 += np.dot(np.array([dL_dY1]).T, np.array([X_with_bias]))
dL_dW2 += np.dot(np.array([dL_dY2]).T, np.array([h1_with_bias]))
# Add regularization
dL_dW0 += self.l2_lambda * self.W0
dL_dW1 += self.l2_lambda * self.W1
dL_dW2 += self.l2_lambda * self.W2
# Add the velocity
self.W0_velocity = (self.momentum * self.W0_velocity) + (self.alpha * dL_dW0)
self.W1_velocity = (self.momentum * self.W1_velocity) + (self.alpha * dL_dW1)
self.W2_velocity = (self.momentum * self.W2_velocity) + (self.alpha * dL_dW2)
# Update weights
self.W0 -= self.W0_velocity
self.W1 -= self.W1_velocity
self.W2 -= self.W2_velocity
def predict(self, name):
# Serialize the name to a num_char x 27 matrix
example = self.serializer.serialize_example(name)
# num_chars = len(example)
label = np.zeros((self.output_dimensions, ))
forward_propagation_results = self.__perform_forward_propagation__(example, label)
letter_pos_to_y2 = forward_propagation_results['letter_pos_to_hypothesis']
if len(letter_pos_to_y2) > 0:
hypothesis = ActivationFunctions.softmax(letter_pos_to_y2[-1])
formatted_hypothesis = []
for k in range(self.output_dimensions):
formatted_hypothesis.append((hypothesis[k], self.serializer.index_to_label[k]))
formatted_hypothesis.sort(reverse=True)
return formatted_hypothesis
else:
raise Exception('Hypothesis cannot be obtained')
def save_model(self, filename):
np.savez_compressed(filename,
layer_1_weights=self.W1,
layer_2_weights=self.W2,
hidden_state_weights=self.W0)
def load_model_from_file(self, filename):
data = np.load(filename)
self.W1 = data['layer_1_weights']
self.W2 = data['layer_2_weights']
self.W0 = data['hidden_state_weights']
def __str__(self):
description = "RNN with learning rate: {}, momentum: {}, L2 reg. rate: {}, Weight Init. Type: {}, Num. Epoche: {}"
return description.format(self.alpha,
self.momentum,
self.l2_lambda,
self.weight_init_type,
self.num_epoche)
``` |
{
"source": "jimdowling/tfspark",
"score": 2
} |
#### File: tfspark/tensorflowonspark/util.py
```python
from __future__ import absolute_import
from __future__ import division
from __future__ import nested_scopes
from __future__ import print_function
import socket
def get_ip_address():
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
s.connect(("8.8.8.8", 80))
return s.getsockname()[0]
``` |
{
"source": "jimdrake55x/NotionTools",
"score": 2
} |
#### File: JiraToNotion/jira/jira_builder.py
```python
from models.jira_config import Jira_Config
from constants.constants import JIRA_API_JQL_URL, JIRA_API_EVAL_URL, JIRA_API_BASE_REPLACE, FOLDER_DATA, FOLDER_DATA_FILE, FOLDER_DATA_FILE_REPLACE, FILE_SPRINT_DATA, FILE_TICKETS_DATA, FILE_JIRA_CONFIG
from requests.auth import HTTPBasicAuth
import json
import os
import requests
# Public Methods
def query_active_sprints():
jira_config = Jira_Config()
query = json.dumps({
"expression": "{active_sprints: board.activeSprints}",
"context": {
"board": jira_config.board_number
}
})
return __make_jira_query_request(query)
def query_issues_for_sprint(sprintNumber):
jql_sprint = "sprint = " + str(sprintNumber)
jql = json.dumps({
"jql": jql_sprint,
"fields": [
"summary",
"created",
"assignee",
"reporter",
"subtasks",
"labels",
"status",
"issuetype"
],
"maxResults": 150
})
return __make_jira_jql_request(jql)
def query_issues_for_sprint_ordered(sprintNumber, orderByProperty):
jql_sprint = "sprint = " + str(sprintNumber) + " order by cf[" + str(orderByProperty) +"]"
jql = json.dumps({
"jql": jql_sprint,
"fields": [
"summary",
"created",
"assignee",
"reporter",
"subtasks",
"labels",
"status",
"issuetype"
],
"maxResults": 150
})
return __make_jira_jql_request(jql)
def query_specific_issue(issue_name):
jql_issue = "issue = " + issue_name
jql = json.dumps({
"jql": jql_issue,
"fields": [
"summary",
"created",
"assignee",
"reporter",
"subtasks",
"labels",
"status",
"issuetype"
]
})
return __make_jira_jql_request(jql)
# Private methods
def __make_jira_jql_request(jql):
jira_config = Jira_Config()
api = JIRA_API_JQL_URL.replace(
JIRA_API_BASE_REPLACE, jira_config.jira_cloud_base)
auth = HTTPBasicAuth(jira_config.username, jira_config.token)
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
payload = jql
try:
response = requests.request(
"POST",
api,
data=payload,
headers=headers,
auth=auth
)
if not os.path.exists(FOLDER_DATA):
os.makedirs(FOLDER_DATA)
# Write the sprint data to the data directory
write_file = FOLDER_DATA_FILE.replace(
FOLDER_DATA_FILE_REPLACE, FILE_TICKETS_DATA)
with open(write_file, 'w') as outfile:
json.dump(response.text, outfile)
except Exception as e:
print('Something went wrong with requesting sprint information. Is your board # correct?')
print(e)
return json.loads(response.text)
def __make_jira_query_request(query):
jira_config = Jira_Config()
api = JIRA_API_EVAL_URL.replace(
JIRA_API_BASE_REPLACE, jira_config.jira_cloud_base)
auth = HTTPBasicAuth(jira_config.username, jira_config.token)
headers = {
"Accept": "application/json",
"Content-Type": "application/json"
}
payload = query
try:
response = requests.request(
"POST",
api,
data=payload,
headers=headers,
auth=auth
)
# Create a data directory if it doesnt exist
if not os.path.exists(FOLDER_DATA):
os.makedirs(FOLDER_DATA)
# Write the sprint data to the data directory
write_file = FOLDER_DATA_FILE.replace(
FOLDER_DATA_FILE_REPLACE, FILE_SPRINT_DATA)
with open(write_file, 'w') as outfile:
json.dump(response.text, outfile)
except Exception as e:
print('Something went wrong with requesting sprint information. Is your board # correct?')
print(e)
return json.loads(response.text)
```
#### File: JiraToNotion/models/notion_config.py
```python
from constants.constants import FOLDER_CONFIG_FILE_REPLACE, FILE_NOTION_CONFIG, CONFIG_FOLDER
from config_builder.config_builder import get_file_data
import os
class Notion_config():
def __init__(self):
try:
config_path = os.path.split(os.path.dirname(__file__))[0] + CONFIG_FOLDER
notion_config = config_path.replace(
FOLDER_CONFIG_FILE_REPLACE, FILE_NOTION_CONFIG)
config_path = os.path.abspath(notion_config)
notion_data = get_file_data(config_path)
except:
print("Unable to locate the notion_config.json file. do you have one created in the root of the directory?")
self.token = notion_data['token']
self.base_page = notion_data['base_page']
self.theme = notion_data['theme']
```
#### File: JiraToNotion/models/sprint.py
```python
class Sprint:
def __init__(self, name, start_date, end_date, number, tickets=[]):
self.name = name
self.start_date = start_date
self.end_date = end_date
self.number = number
self.tickets = tickets
```
#### File: JiraToNotion/notion_builder/schema_builder.py
```python
from models.ticket import Ticket
import random
def ticket_collection_scheme(tickets: [Ticket]):
labelOptions = __labels_schema(tickets)
statusOptions = __status_schema(tickets)
ticketTypeOptions = __ticket_type_schema(tickets)
return {
'BrbY': {'name': 'Assignee', 'type': 'text'},
'M<X_': {'name': 'Ticket Link', 'type': 'url'},
'T~rI': {'name': 'Reporter', 'type': 'text'},
'dcdI': {'name': 'Created Date', 'type': 'date'},
'jpRx': {'name': 'Pull Request', 'type': 'url'},
'lHq{': {
'name': 'Ticket Type',
'type': 'select',
'options': ticketTypeOptions,
},
'oJ{W': {
'name': 'Ticket Labels',
'type': 'multi_select',
'options': labelOptions
},
'AnKr': {
'name': 'Status',
'type': 'select',
'options': statusOptions
},
'title': {'name': 'Ticket Name', 'type': 'title'}
}
def __labels_schema(tickets: [Ticket]):
options = []
labels = []
for ticket in tickets:
for label in ticket.labels:
if label not in labels:
labels.append(label)
options.append({
"color": __get_random_color(),
"id": "this-is-the-id-for-{0}".format(label),
"value": label,
})
return options
def __status_schema(tickets: [Ticket]):
options = []
status = []
for ticket in tickets:
if ticket.status.status not in status:
status.append(ticket.status.status)
options.append({
"color": ticket.status.color,
"id": "this-is-the-id-select-{0}".format(ticket.status.status),
"value": ticket.status.status,
})
return options
def __ticket_type_schema(tickets: [Ticket]):
options = []
types = []
for ticket in tickets:
if ticket.ticket_type not in types:
types.append(ticket.ticket_type)
options.append(
{
"color": __get_random_color(),
"id": "this-is-id-for-type-{0}".format(ticket.ticket_type),
"value": ticket.ticket_type
})
return options
def __get_random_color():
colors = ["default", "gray", "brown", "orange",
"yellow", "green", "blue", "purple", "pink", "red"]
randomColor = random.randint(0, (len(colors) - 1))
return colors[randomColor]
``` |
{
"source": "jim-easterbrook/guild",
"score": 3
} |
#### File: examples/blog/accounts-3.py
```python
import random
from guild.actor import *
class InsufficientFunds(ActorException):
pass
class Account(Actor):
def __init__(self, balance=10):
super(Account, self).__init__()
self.balance = balance
@actor_function
def deposit(self, amount):
# This is a function to allow the deposit to be confirmed
print "DEPOSIT", "\t", amount, "\t", self.balance
self.balance = self.balance + amount
return self.balance
@actor_function
def withdraw(self, amount):
if self.balance < amount:
raise InsufficientFunds("Insufficient Funds in your account",
requested=amount,
balance=self.balance)
self.balance = self.balance - amount
print "WITHDRAW", "\t", amount, "\t", self.balance
return amount
def transfer(amount, payer, payee):
funds = payer.withdraw(amount)
payee.deposit(funds)
class MischiefMaker(Actor):
def __init__(self, myaccount, friendsaccount):
super(MischiefMaker, self).__init__()
self.myaccount = myaccount
self.friendsaccount = friendsaccount
self.grabbed = 0
@process_method
def process(self):
try:
grab = random.randint(1, 10) * 10
transfer(grab, self.friendsaccount, self.myaccount)
except InsufficientFunds as e:
print "Awww, Tapped out", e.balance, "<", e.requested
self.stop()
return
self.grabbed = self.grabbed + grab
account1 = Account(1000).go()
account2 = Account(1000).go()
fred = MischiefMaker(account1, account2).go()
barney = MischiefMaker(account2, account1).go()
wait_for(fred, barney)
account1.stop()
account2.stop()
account1.join()
account2.join()
print "GAME OVER"
print "Fred grabbed", fred.grabbed
print "Barney grabbed", barney.grabbed
print "Total grabbed", fred.grabbed + barney.grabbed
print "Since they stopped grabbing..."
print "Money left", account1.balance, account2.balance
print "Ending money", account1.balance + account2.balance
```
#### File: examples/blog/log_watcher.py
```python
import re
import sys
import time
from guild.actor import *
class Follow(Actor):
def __init__(self, filename):
super(Follow, self).__init__()
self.filename = filename
self.f = None
def gen_process(self):
self.f = f = file(self.filename)
f.seek(0, 2) # seek to end
while True:
yield 1
line = f.readline()
if not line: # no data, so wait
time.sleep(0.1)
else:
self.output(line)
def onStop(self):
if self.f:
self.f.close()
class Grep(Actor):
def __init__(self, pattern):
super(Grep, self).__init__()
self.regex = re.compile(pattern)
@actor_method
def input(self, line):
if self.regex.search(line):
self.output(line)
class Printer(Actor):
@actor_method
def input(self, line):
sys.stdout.write(line)
sys.stdout.flush()
follow1 = Follow("x.log").go()
follow2 = Follow("y.log").go()
grep = Grep("pants").go()
printer = Printer().go()
pipeline(follow1, grep, printer)
pipeline(follow2, grep)
wait_KeyboardInterrupt()
stop(follow1, follow2, grep, printer)
wait_for(follow1, follow2, grep, printer)
```
#### File: guild/examples/qt_video_player.py
```python
from __future__ import print_function
import re
import subprocess
import sys
import time
from guild.actor import *
from guild.qtactor import ActorSignal, QtActorMixin
from PyQt4 import QtGui, QtCore
def VideoFileReader(file_name):
# get video dims
proc_pipe = subprocess.Popen([
'ffmpeg', '-loglevel', 'info', '-i', file_name,
], stdout=subprocess.PIPE, stderr=subprocess.PIPE, bufsize=10**8)
stdout, stderr = proc_pipe.communicate()
pattern = re.compile('Stream.*Video.* ([0-9]{2,})x([0-9]+)')
for line in str(stderr).split('\n'):
match = pattern.search(line)
if match:
xlen, ylen = map(int, match.groups())
break
else:
print('Could not get video dimensions of', file_name)
return
try:
bytes_per_frame = xlen * ylen * 3
proc_pipe = subprocess.Popen([
'ffmpeg', '-loglevel', 'warning', '-i', file_name,
'-f', 'image2pipe', '-pix_fmt', 'rgb24', '-vcodec', 'rawvideo', '-'
], stdout=subprocess.PIPE, bufsize=bytes_per_frame)
while True:
raw_image = proc_pipe.stdout.read(bytes_per_frame)
if len(raw_image) < bytes_per_frame:
break
yield xlen, ylen, raw_image
finally:
proc_pipe.terminate()
proc_pipe.stdout.close()
class Player(Actor):
def __init__(self, video_file):
self.video_file = video_file
self.paused = False
super(Player, self).__init__()
def gen_process(self):
self.reader = VideoFileReader(self.video_file)
raw_image = None
while True:
yield 1
if not (self.paused and raw_image):
try:
xlen, ylen, raw_image = next(self.reader)
except StopIteration:
break
image = QtGui.QImage(
raw_image, xlen, ylen, QtGui.QImage.Format_RGB888)
self.output(image)
time.sleep(1.0/25)
@actor_method
def set_paused(self, paused):
self.paused = paused
def onStop(self):
self.reader.close()
class PlayerQt(QtActorMixin, QtCore.QObject):
signal = QtCore.pyqtSignal(QtGui.QImage)
def __init__(self, video_file):
self.video_file = video_file
self.paused = False
super(PlayerQt, self).__init__()
def gen_process(self):
self.reader = VideoFileReader(self.video_file)
raw_image = None
while True:
yield 1
if not (self.paused and raw_image):
try:
xlen, ylen, raw_image = next(self.reader)
except StopIteration:
break
image = QtGui.QImage(
raw_image, xlen, ylen, QtGui.QImage.Format_RGB888)
self.signal.emit(image)
time.sleep(1.0/25)
@actor_method
@QtCore.pyqtSlot(bool)
def set_paused(self, paused):
self.paused = paused
def onStop(self):
self.reader.close()
class Display(QtGui.QLabel):
@QtCore.pyqtSlot(QtGui.QImage)
def show_frame(self, frame):
pixmap = QtGui.QPixmap.fromImage(frame)
self.setPixmap(pixmap)
class DisplayActor(QtActorMixin, QtGui.QLabel):
@actor_method
def show_frame(self, frame):
pixmap = QtGui.QPixmap.fromImage(frame)
self.setPixmap(pixmap)
input = show_frame
class MainWindow(QtGui.QMainWindow):
def __init__(self, video_file):
super(MainWindow, self).__init__()
self.setWindowTitle("Guild video player")
# create guild pipeline
# version 1: guild player -> hybrid display
self.player = Player(video_file).go()
display = DisplayActor().go()
pipeline(self.player, display)
self.actors = [self.player, display]
# version 2: hybrid player -> Qt display
## self.player = PlayerQt(video_file).go()
## display = Display()
## self.player.signal.connect(display.show_frame)
## self.actors = [self.player]
# version 3: guild player -> hybrid bridge -> Qt display
## self.player = Player(video_file).go()
## bridge = ActorSignal().go()
## display = Display()
## pipeline(self.player, bridge)
## bridge.signal.connect(display.show_frame)
## self.actors = [self.player, bridge]
# central widget
widget = QtGui.QWidget()
grid = QtGui.QGridLayout()
grid.setColumnStretch(4, 1)
widget.setLayout(grid)
self.setCentralWidget(widget)
grid.addWidget(display, 0, 0, 1, 6)
# pause button
pause_button = QtGui.QCheckBox('pause')
pause_button.clicked.connect(self.player.set_paused)
pause_button.setShortcut('Space')
grid.addWidget(pause_button, 1, 0)
# quit button
quit_button = QtGui.QPushButton('quit')
quit_button.clicked.connect(self.shutdown)
quit_button.setShortcut('Ctrl+Q')
grid.addWidget(quit_button, 1, 5)
self.show()
def shutdown(self):
stop(*self.actors)
wait_for(*self.actors)
QtGui.QApplication.instance().quit()
if len(sys.argv) != 2:
print('usage: %s video_file' % sys.argv[0])
sys.exit(1)
app = QtGui.QApplication([])
main = MainWindow(sys.argv[1])
app.exec_()
```
#### File: guild/guild/qtactor.py
```python
import six
from PyQt4 import QtCore, QtGui
from .actor import ActorMixin, ActorMetaclass, actor_method
class _QtActorMixinMetaclass(QtCore.pyqtWrapperType, ActorMetaclass):
pass
@six.add_metaclass(_QtActorMixinMetaclass)
class QtActorMixin(ActorMixin):
# create unique event types
_qtactor_queue_event = QtCore.QEvent.registerEventType()
_qtactor_step_event = QtCore.QEvent.registerEventType()
_qtactor_stop_event = QtCore.QEvent.registerEventType()
def __init__(self, *argv, **argd):
super(QtActorMixin, self).__init__(*argv, **argd)
self._qtactor_dispatch = {
self._qtactor_queue_event : self._actor_do_queued,
self._qtactor_step_event : self._qtactor_step,
self._qtactor_stop_event : self._qtactor_stop,
}
self._qtactor_gen = None
# if not a Widget, move to a Qt thread
if isinstance(self, QtGui.QWidget):
# widgets can't be moved to another thread
self._qtactor_thread = None
else:
# create a thread and move to it
self._qtactor_thread = QtCore.QThread()
self.moveToThread(self._qtactor_thread)
self._qtactor_thread.started.connect(self._qtactor_run)
def start(self):
if self._qtactor_thread:
self._qtactor_thread.start()
else:
self._qtactor_run()
def _qtactor_run(self):
self.process_start()
self.process()
# get gen_process generator
try:
self._qtactor_gen = self.gen_process()
except AttributeError:
self._qtactor_gen = None
# do first step
if self._qtactor_gen:
self._qtactor_step()
def _qtactor_step(self):
try:
self._qtactor_gen.next()
except StopIteration:
self._qtactor_gen = None
return
# trigger next step
QtCore.QCoreApplication.postEvent(
self, QtCore.QEvent(self._qtactor_step_event),
QtCore.Qt.LowEventPriority)
def _qtactor_stop(self):
self._qtactor_dispatch = {}
if self._qtactor_gen:
self._qtactor_gen.close()
self.onStop()
if self._qtactor_thread:
self._qtactor_thread.quit()
def _actor_notify(self):
QtCore.QCoreApplication.postEvent(
self, QtCore.QEvent(self._qtactor_queue_event),
QtCore.Qt.LowEventPriority)
def event(self, event):
event_type = event.type()
if event_type in self._qtactor_dispatch:
event.accept()
self._qtactor_dispatch[event_type]()
return True
return super(QtActorMixin, self).event(event)
def stop(self):
QtCore.QCoreApplication.postEvent(
self, QtCore.QEvent(self._qtactor_stop_event),
QtCore.Qt.HighEventPriority)
def join(self):
if self._qtactor_thread:
self._qtactor_thread.wait()
class ActorSignal(QtActorMixin, QtCore.QObject):
signal = QtCore.pyqtSignal(object)
@actor_method
def input(self, msg):
self.signal.emit(msg)
```
#### File: guild/guild/stm.py
```python
from __future__ import print_function
from functools import wraps as _wraps
from contextlib import contextmanager
import copy
import threading
from guild.actor import Actor, ActorException
import time
class MaxRetriesExceeded(ActorException):
pass
class RetryTimeoutExceeded(ActorException):
pass
"""
===
STM
===
Support for basic in-process software transactional memory.
What IS it?
-----------
Software Transactional Memory (STM) is a technique for allowing multiple
threads to share data in such a way that they know when something has gone
wrong. It's been used in databases (just called transactions there really) for
some time and is also very similar to version control. Indeed, you can think of
STM as being like variable level version control.
Why is it useful?
-----------------
Why do you need it? Well, in normal code, Global variables are generally
shunned because it can make your code a pain to work with and a pain to be
certain if it works properly. Even with linear code, you can have 2 bits of
code manipulating a structure in surprising ways - but the results are
repeatable. Not-properly-managed-shared-data is to threaded systems as
not-properly-managed-globals are to normal code. (This code is one way of
helping manage shared data)
Well, with code where you have multiple threads active, having shared data is
like an even nastier version of globals. Why? Well, when you have 2 (or more)
running in parallel, the results of breakage can become hard to repeat as two
pieces of code "race" to update values.
With STM you make it explicit what the values are you want to update, and only
once you're happy with the updates do you publish them back to the shared
storage. The neat thing is, if someone else changed things since you last
looked, you get told (your commit fails), and you have to redo the work. This
may sound like extra work (you have to be prepared to redo the work), but it's
nicer than your code breaking :-)
The way you get that message is the .commit raises a ConcurrentUpdate
exception.
Also, it's designed to work happily in code that requires non-blocking usage -
which means you may also get a BusyRetry exception under load. If you do, you
should as the exception suggests retry the action that you just tried. (With or
without restarting the transaction)
Apologies if that sounds too noddy :)
Using It
--------
Accessing/Updating a single shared value in the store
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
You can have many single vars in a store of course... If they're related though
or updated as a group, see the next section::
from Axon.STM import Store
S = Store()
greeting = S.usevar("hello")
print(repr(greeting.value))
greeting.set("Hello World")
greeting.commit()
Accessing/Updating a collection of shared values in the store
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
Likewise you can use as many collections of values from the store as you like::
from Axon.STM import Store
S = Store()
D = S.using("account_one", "account_two", "myaccount")
D["account_one"].set(50)
D["account_two"].set(100)
D.commit()
S.dump()
D = S.using("account_one", "account_two", "myaccount")
D["myaccount"].set(D["account_one"].value+D["account_two"].value)
D["account_one"].set(0)
D["account_two"].set(0)
D.commit()
S.dump()
What can (possibly) go wrong?
-----------------------------
You can have 2 people trying to update the same values at once. An example of
this would be - suppose you have the following commands being executed by 2
threads with this mix of commands::
S = Store()
D = S.using("account_one", "account_two", "myaccount")
D["myaccount"].set(0)
D["account_one"].set(50)
D["account_two"].set(100)
D.commit() # 1
S.dump()
D = S.using("account_one", "account_two", "myaccount")
D["myaccount"].set(D["account_one"].value+D["account_two"].value)
E = S.using("account_one", "myaccount")
E["myaccount"].set(E["myaccount"].value-100)
E["account_one"].set(100)
E.commit() # 2
D["account_one"].set(0)
D["account_two"].set(0)
D.commit() # 3 - should fail
S.dump()
You do actually want this to fail because you have concurrent updates. This
will fail on the third commit, and fail by throwing a ConcurrentUpdate
exception. If you get this, you should redo the transaction.
The other is where there's lots of updates happening at once. Rather than the
code waiting until it acquires a lock, it is possible for either the .using,
.usevar or .commit methods to fail with a BusyRetry exception. This means
exactly what it says on the tin - the system was busy & you need to retry. In
this case you do not have to redo the transaction. This is hard to replicate
except under load. The reason we do this however is because most Kamaelia
components are implemented as generators, which makes blocking operation ( as a
.acquire() rather than .acquire(0) would be) an expensive operation.
"""
import time
class ConcurrentUpdate(Exception):
pass
class BusyRetry(Exception):
pass
class FAIL(Exception):
pass
class MAXFAIL(Exception):
pass
class Value(object):
"""
Value(version, value, store, key) -> new Value object
A simple versioned key-value pair which belongs to a thread-safe store
Arguments:
- version -- the initial version of the value
- value -- the object's initial value
- store -- a Store object to hold the value and it's history
- key -- a key to refer to the value
Note: You do not instantiate these - the Store does that
"""
def __init__(self, version, value, store, key):
"""
x.__init__(...) initializes x; see x.__class__.__doc__ for signature
"""
self.version = version
self.value = value
self.store = store
self.key = key
def __repr__(self):
return "Value" + repr((self.version, self.value))
def set(self, value):
""" Set the value without storing """
self.value = value
def commit(self):
""" Commit a new version of the value to the store """
self.store.set(self.key, self)
def clone(self):
""" Returns a clone of the value """
if isinstance(self.value, Actor):
return Value(self.version, self.value, self.store, self.key)
# otherwise...
return Value(self.version,
copy.deepcopy(self.value),
self.store,
self.key)
class Collection(dict):
"""
Collection() -> new Collection dict
A dictionary which belongs to a thread-safe store
Again, you do not instantiate these yourself
"""
def set_store(self, store):
""" Set the store to associate the collection with """
self.store = store
def commit(self):
""" Commit new versions of the collection's items to the store """
self.store.set_values(self)
def __getattribute__(self, key):
keys = super(Collection, self).keys()
if key in keys:
value = self[key].value
return value
return super(Collection, self).__getattribute__(key)
def __setattr__(self, key, value):
keys = super(Collection, self).keys()
if key in keys:
self[key].set(value)
else:
super(Collection, self).__setattr__(key, value)
class Store(object):
"""
Store() -> new Store object
A thread-safe versioning store for key-value pairs
You instantiate this as per the documentation for this module
"""
def __init__(self):
self.store = {} # Threadsafe
self.lock = threading.Lock()
self.last_update = time.time() # Readonly
# ////---------------------- Direct access -----------------------\\\\
# Let's make this lock free, and force the assumption that to do
# this the store must be locked.
#
# Let's make this clear by marking these private
#
# Reads Store Value - need to protect during clone
def __get(self, key):
"""
Retreive a value. Returns a clone of the Value. Not thread-safe.
"""
return self.store[key].clone()
# Writes Store Value - need to prevent multiple concurrent write
def __make(self, key):
""" Create a new key-value pair. Not thread-safe """
self.store[key] = Value(0, None, self, key)
# Writes Store Value - need to prevent multiple concurrent write
def __do_update(self, key, value):
"""
Update a key-value pair and increment the version. Not thread-safe
"""
if isinstance(value.value, Actor):
self.store[key] = Value(value.version + 1, value.value, self, key)
else:
self.store[key] = Value(value.version + 1,
copy.deepcopy(value.value),
self, key)
value.version = value.version + 1
# Reads Store Value - possibly thread safe, depending on VM implementation
def __can_update(self, key, value):
"""
Returns true if a value can be safely updated. Potentially not
thread-safe
"""
return not (self.store[key].version > value.version)
# \\\\---------------------- Direct access -----------------------////
# ////----------------- Single Value Mediation ------------------\\\\
# Both of these are read-write
# Reads and Writes Values (since value may not exist)
def usevar(self, key, islocked=False):
"""
Tries to get an item from the store. Returns the requested
Value object. If the store is already in use a BusyRetry
error is raised.
"""
locked = islocked
if not locked:
locked = self.lock.acquire(0)
result = None
if locked:
try:
try:
result = self.__get(key)
except KeyError:
self.__make(key)
result = self.__get(key)
finally:
if not islocked:
self.lock.release() # only release if we acquire
else:
raise BusyRetry
return result
# Reads and Writes Values (has to check store contents)
def set(self, key, value):
"""
Tries to update a value in the store. If the store is already
in use a BusyRetry error is raised. If the value has been
updated by another thread a ConcurrentUpdate error is raised
"""
locked = self.lock.acquire(0)
HasBeenSet = False
if locked:
try:
if self.__can_update(key, value):
self.__do_update(key, value)
HasBeenSet = True
finally:
self.lock.release()
else:
raise BusyRetry
if not HasBeenSet:
raise ConcurrentUpdate
self.last_update = time.time()
# \\\\----------------- Single Value Mediation ------------------////
# ////----------------- Multi-Value Mediation ------------------\\\\
# Both of these are read-write
# Reads and Writes Values (since values may not exist)
def using(self, *keys):
"""
Tries to get a selection of items from the store. Returns a
Collection dictionary containing the requested values. If the
store is already in use a BusyRetry error is raised.
"""
locked = self.lock.acquire(0)
if locked:
try:
D = Collection()
for key in keys:
D[key] = self.usevar(key, islocked=True)
D.set_store(self)
finally:
self.lock.release()
else:
raise BusyRetry
return D
# Reads and Writes Values (has to check store contents)
def set_values(self, D):
"""
Tries to update a selection of values in the store. If the store is
already in use a BusyRetry error is raised. If one of the values has
been updated by another thread a ConcurrentUpdate error is raised.
"""
CanUpdateAll = True # Hope for the best :-)
locked = self.lock.acquire(0)
if locked:
try:
for key in D:
# Let experience teach us otherwise :-)
CanUpdateAll = CanUpdateAll and self.__can_update(key, D[key]) # Reading Store
if CanUpdateAll:
for key in D:
self.__do_update(key, D[key]) # Writing Store
finally:
self.lock.release()
else:
raise BusyRetry
if not CanUpdateAll:
raise ConcurrentUpdate
# \\\\----------------- Multi-Value Mediation ------------------////
def names(self):
keys = self.store.keys()
return keys
def snapshot(self):
D = self.using(*self.names())
return D
def export(self, names = None):
result = {}
if names == None:
names = self.names()
locked = self.lock.acquire(0)
for name in names:
value_clone = self.store[name].clone()
value = value_clone.value
result[name] = value
self.lock.release()
return result
def dump(self):
# Who cares really? This is a debug :-)
print("DEBUG: Store dump ------------------------------")
for k in self.store:
print(" ", k, ":", self.store[k])
print ()
# def usevar(self, key, islocked=False):
def checkout(self, key=None, islocked=False):
if key is not None:
return self.usevar(key, islocked)
return STMCheckout(self)
class Bunch(object):
def __init__(self, keys):
i = 0
for key in keys:
self.__dict__[key] = i
i += 1
class STMCheckout(object):
def __init__(self, store, max_tries=10):
self.notcheckedin = True
self.num_tries = 0
self.max_tries = max_tries
self.store = store
@contextmanager
def changeset(self, *args, **argd):
autocheckin = argd.get("autocheckin", True)
# print("WOMBAT", args, self.store)
D = self.store.using(*args)
self.num_tries += 1
try:
yield D
if autocheckin:
D.commit()
self.notcheckedin = False
except ConcurrentUpdate as f:
if self.max_tries:
if self.max_tries == self.num_tries:
raise MAXFAIL(f)
def retry(max_tries=None, timeout=None):
if callable(max_tries):
return retry(None)(max_tries)
def mk_transaction(function):
@_wraps(function)
def as_transaction(*argv, **argd):
count = 0
ts = time.time()
succeeded = False
while not succeeded:
if max_tries is not None:
if count > max_tries:
raise MaxRetriesExceeded()
count += 1
if timeout is not None:
now = time.time()
if now-ts > timeout:
raise RetryTimeoutExceeded(now-ts , timeout)
try:
result = function(*argv, **argd)
succeeded = True
except ConcurrentUpdate:
pass
except BusyRetry:
pass
return result
return as_transaction
return mk_transaction
if __name__ == "__main__":
if 0:
S = Store()
D = S.using("account_one", "account_two", "myaccount")
D["myaccount"].set(0)
D["account_one"].set(50)
D["account_two"].set(100)
D.commit() # 1
S.dump()
D = S.using("account_one", "account_two", "myaccount")
D["myaccount"].set(D["account_one"].value + D["account_two"].value)
E = S.using("account_one", "myaccount")
E["myaccount"].set(E["myaccount"].value - 100)
E["account_one"].set(100)
E.commit() # 2
D["account_one"].set(0)
D["account_two"].set(0)
D.commit() # 3 - should fail
S.dump()
if 0:
S = Store()
D = S.using("account_one", "account_two", "myaccount")
D["account_one"].set(50)
D["account_two"].set(100)
D.commit()
S.dump()
D = S.using("account_one", "account_two", "myaccount")
D["myaccount"].set(D["account_one"].value + D["account_two"].value)
D["account_one"].set(0)
D["account_two"].set(0)
D.commit()
S.dump()
if 0:
S = Store()
D = S.usevar("accounts")
D.set({"account_one": 50, "account_two": 100, "myaccount": 0})
D.commit() # First
S.dump()
X = D.value
X["myaccount"] = X["account_one"] + X["account_two"]
X["account_one"] = 0
E = S.usevar("accounts")
Y = E.value
Y["myaccount"] = Y["myaccount"] - 100
Y["account_one"] = 100
E.set(Y)
E.commit() # Second
S.dump()
X["account_two"] = 0
D.set(X)
D.commit() # Third - This Should fail
S.dump()
print ("Committed", D.value["myaccount"])
if 1:
S = Store()
greeting = S.usevar("hello")
print (repr(greeting.value))
greeting.set("Hello World")
greeting.commit()
# ------------------------------------------------------
print (greeting)
S.dump()
# ------------------------------------------------------
par = S.usevar("hello")
par.set("Woo")
par.commit()
# ------------------------------------------------------
print (greeting)
S.dump()
# ------------------------------------------------------
greeting.set("Woo")
greeting.commit() # Should fail
print (repr(greeting), repr(greeting.value))
S.dump()
```
#### File: guild/site/build_site.py
```python
import markdown
import os
import pprint
def get_meta(source):
meta = {}
if source.startswith("---\n"):
source = source[4:]
pos = source.find("---\n")
meta_lines = source[:pos].split("\n")
source = source[pos+4:]
if meta_lines[-1]=="":
meta_lines = meta_lines[:-1]
for line in meta_lines:
pos = line.find(":")
key = line[:pos]
value = line[pos+1:].strip()
meta[key] = value
return meta, source
def render_markup(source, source_form):
if source_form == "None":
return "<pre>\n%s\n</pre>\n" % source
if source_form == "markdown":
return markdown.markdown(source)
def build_sidebar(site_meta, divider):
sidebar_parts = []
filenames = site_meta.keys()
filenames.sort()
for filename in filenames:
page_meta = site_meta[filename]
if page_meta.get("skip", False):
continue
stripped = filename[:filename.rfind(".")]
result_file = stripped.lower() + ".html"
link = '<a href="%s">%s</a>' % (result_file, page_meta.get("name", "None"))
sidebar_parts.append(link)
return divider.join(sidebar_parts)
def files(some_dir):
for filename in os.listdir("src"):
if os.path.isfile(os.path.join("src", filename)):
yield filename
def parse_source_data(page):
parts = []
process = page
while True:
index = process.find("{%")
if index == -1:
if len(process) > 0:
parts.append(("passthrough", process))
process = ""
break # No more parts to process
if index > 0:
parts.append(("passthrough", process[:index]))
process = process[index+2:]
index = process.find("%}")
if index == -1:
print "UNMATCHED {%, aborting unprocessed"
if len(process) > 0:
parts.append(("passthrough", process))
process = ""
break # No more parts to process
if index > 0:
parts.append(("process", process[:index]))
process = process[index+2:]
return parts
def process_directive(raw_directive):
if "panel(" in raw_directive:
raw_id = raw_directive[:raw_directive.find("=")]
raw_id = raw_id.strip()
part_args = raw_directive[raw_directive.find("panel(")+7:]
filename = part_args[:part_args.find('"')]
with open("src/"+filename) as f:
raw_contents = f.read()
meta, source_data = get_meta(raw_contents)
return source_data
return "??"+raw_directive ## DIRECTIVE PASS IN, BUT UNKNOWN
def process_directives(source_data):
result = []
for page_part_type, page_part in parse_source_data(source_data):
if page_part_type == "passthrough":
result.append(page_part)
elif page_part_type == "process":
part_result = process_directive(page_part)
print page_part
print "PRE LEN", len(part_result)
while "{%" in part_result: # panels may contain sub panels after all.
print "RECURSING!"
part_result = process_directives(part_result)
print "POST LEN", len(part_result)
result.append(part_result)
else:
result.append(page_part)
return "".join(result)
site_meta = {}
for filename in files("src"):
print filename
stripped = filename[:filename.rfind(".")]
result_file = stripped.lower() + ".html"
source_data = open("src/"+ filename ).read()
meta, source_data = get_meta(source_data)
meta["_stripped"] = stripped
meta["_source_data"] = source_data
meta["_result_file"] = result_file
site_meta[filename] = meta
sidebar = "<br>\n" + build_sidebar(site_meta, " <br> ")
count = 0
for filename in files("src"):
meta, source_data = site_meta[filename],site_meta[filename]["_source_data"]
if meta.get("skip", False):
print "Skipping", filename
continue
print "PROCESSING", filename
stripped = meta["_stripped"]
result_file = meta["_result_file"]
tmpl_name = meta.get("template", "None")
source_form = meta.get("source_form", "None")
source_data = process_directives(source_data)
processed = render_markup(source_data, source_form)
tmpl = open("templates/%s.tmpl" % tmpl_name, "rb").read()
result_html = tmpl
result_html = result_html.replace("{% page.body %}", processed)
result_html = result_html.replace(u"{% site.sidebar %}", str(sidebar))
result_html = result_html.replace(u"{% page.updated %}", meta.get("updated", "na"))
try:
result_html = result_html.replace(u"{% page.title %}",meta.get("title", meta["_stripped"]))
except KeyError as e:
print "KEYERROR meta",meta
raise
out = open("site/"+ result_file,"w")
out.write(result_html)
out.close()
count += 1
try:
import build_site_local
from build_site_local import run_local
run_local(site_meta, process_directives)
print "LOCAL RUN"
except ImportError:
print "No build_site_local customisations"
print "Success!", count, "files published."
``` |
{
"source": "jimeffry/MTCNN-TF",
"score": 3
} |
#### File: MTCNN-TF/caffe/tools_matrix.py
```python
import sys
from operator import itemgetter
import numpy as np
import cv2
'''
Function:
change rectangles into squares (matrix version)
Input:
rectangles: rectangles[i][0:3] is the position, rectangles[i][4] is score
Output:
squares: same as input
'''
def rect2square(rectangles):
w = rectangles[:,2] - rectangles[:,0]
h = rectangles[:,3] - rectangles[:,1]
l = np.maximum(w,h).T
rectangles[:,0] = rectangles[:,0] + w*0.5 - l*0.5
rectangles[:,1] = rectangles[:,1] + h*0.5 - l*0.5
rectangles[:,2:4] = rectangles[:,0:2] + np.repeat([l], 2, axis = 0).T
return rectangles
'''
Function:
apply NMS(non-maximum suppression) on ROIs in same scale(matrix version)
Input:
rectangles: rectangles[i][0:3] is the position, rectangles[i][4] is score
Output:
rectangles: same as input
'''
def NMS(rectangles,threshold,mode):
if len(rectangles)==0:
return rectangles
boxes = np.array(rectangles)
x1 = boxes[:,0]
y1 = boxes[:,1]
x2 = boxes[:,2]
y2 = boxes[:,3]
s = boxes[:,4]
area = np.multiply(x2-x1+1, y2-y1+1)
I = np.array(s.argsort())
pick = []
while len(I)>0:
xx1 = np.maximum(x1[I[-1]], x1[I[0:-1]]) #I[-1] have hightest prob score, I[0:-1]->others
yy1 = np.maximum(y1[I[-1]], y1[I[0:-1]])
xx2 = np.minimum(x2[I[-1]], x2[I[0:-1]])
yy2 = np.minimum(y2[I[-1]], y2[I[0:-1]])
w = np.maximum(0.0, xx2 - xx1 + 1)
h = np.maximum(0.0, yy2 - yy1 + 1)
inter = w * h
if type == 'iom':
o = inter / np.minimum(area[I[-1]], area[I[0:-1]])
else:
o = inter / (area[I[-1]] + area[I[0:-1]] - inter)
pick.append(I[-1])
I = I[np.where(o<=threshold)[0]]
result_rectangle = boxes[pick].tolist()
return result_rectangle
'''
Function:
Detect face position and calibrate bounding box on 12net feature map(matrix version)
Input:
cls_prob : softmax feature map for face classify
roi : feature map for regression
out_side : feature map's largest size
scale : current input image scale in multi-scales
width : image's origin width
height : image's origin height
threshold: 0.6 can have 99% recall rate
'''
def detect_face_12net(cls_prob,roi,out_side,scale,width,height,threshold):
in_side = 2*out_side+11
stride = 0
if out_side != 1:
stride = float(in_side-12)/(out_side-1)
(x,y) = np.where(cls_prob>=threshold)
boundingbox = np.array([x,y]).T
bb1 = np.fix((stride * (boundingbox) + 0 ) * scale)
bb2 = np.fix((stride * (boundingbox) + 11) * scale)
boundingbox = np.concatenate((bb1,bb2),axis = 1)
dx1 = roi[0][x,y]
dx2 = roi[1][x,y]
dx3 = roi[2][x,y]
dx4 = roi[3][x,y]
score = np.array([cls_prob[x,y]]).T
offset = np.array([dx1,dx2,dx3,dx4]).T
boundingbox = boundingbox + offset*12.0*scale
rectangles = np.concatenate((boundingbox,score),axis=1)
rectangles = rect2square(rectangles)
pick = []
for i in range(len(rectangles)):
x1 = int(max(0 ,rectangles[i][0]))
y1 = int(max(0 ,rectangles[i][1]))
x2 = int(min(width ,rectangles[i][2]))
y2 = int(min(height,rectangles[i][3]))
sc = rectangles[i][4]
if x2>x1 and y2>y1:
pick.append([x1,y1,x2,y2,sc])
return NMS(pick,0.5,'iou')
'''
Function:
Filter face position and calibrate bounding box on 12net's output
Input:
cls_prob : softmax feature map for face classify
roi_prob : feature map for regression
rectangles: 12net's predict
width : image's origin width
height : image's origin height
threshold : 0.6 can have 97% recall rate
Output:
rectangles: possible face positions
'''
def filter_face_24net(cls_prob,roi,rectangles,width,height,threshold):
prob = cls_prob[:,1]
pick = np.where(prob>=threshold)
rectangles = np.array(rectangles)
x1 = rectangles[pick,0]
y1 = rectangles[pick,1]
x2 = rectangles[pick,2]
y2 = rectangles[pick,3]
sc = np.array([prob[pick]]).T
dx1 = roi[pick,0]
dx2 = roi[pick,1]
dx3 = roi[pick,2]
dx4 = roi[pick,3]
w = x2-x1
h = y2-y1
x1 = np.array([(x1+dx1*w)[0]]).T
y1 = np.array([(y1+dx2*h)[0]]).T
x2 = np.array([(x2+dx3*w)[0]]).T
y2 = np.array([(y2+dx4*h)[0]]).T
rectangles = np.concatenate((x1,y1,x2,y2,sc),axis=1)
rectangles = rect2square(rectangles)
pick = []
for i in range(len(rectangles)):
x1 = int(max(0 ,rectangles[i][0]))
y1 = int(max(0 ,rectangles[i][1]))
x2 = int(min(width ,rectangles[i][2]))
y2 = int(min(height,rectangles[i][3]))
sc = rectangles[i][4]
if x2>x1 and y2>y1:
pick.append([x1,y1,x2,y2,sc])
return NMS(pick,0.7,'iou')
'''
Function:
Filter face position and calibrate bounding box on 12net's output
Input:
cls_prob : cls_prob[1] is face possibility
roi : roi offset
pts : 5 landmark
rectangles: 12net's predict, rectangles[i][0:3] is the position, rectangles[i][4] is score
width : image's origin width
height : image's origin height
threshold : 0.7 can have 94% recall rate on CelebA-database
Output:
rectangles: face positions and landmarks
'''
def filter_face_48net(cls_prob,roi,pts,rectangles,width,height,threshold):
prob = cls_prob[:,1]
pick = np.where(prob>=threshold)
rectangles = np.array(rectangles)
x1 = rectangles[pick,0]
y1 = rectangles[pick,1]
x2 = rectangles[pick,2]
y2 = rectangles[pick,3]
sc = np.array([prob[pick]]).T
dx1 = roi[pick,0]
dx2 = roi[pick,1]
dx3 = roi[pick,2]
dx4 = roi[pick,3]
w = x2-x1
h = y2-y1
pts0= np.array([(w*pts[pick,0]+x1)[0]]).T
pts1= np.array([(h*pts[pick,5]+y1)[0]]).T
pts2= np.array([(w*pts[pick,1]+x1)[0]]).T
pts3= np.array([(h*pts[pick,6]+y1)[0]]).T
pts4= np.array([(w*pts[pick,2]+x1)[0]]).T
pts5= np.array([(h*pts[pick,7]+y1)[0]]).T
pts6= np.array([(w*pts[pick,3]+x1)[0]]).T
pts7= np.array([(h*pts[pick,8]+y1)[0]]).T
pts8= np.array([(w*pts[pick,4]+x1)[0]]).T
pts9= np.array([(h*pts[pick,9]+y1)[0]]).T
x1 = np.array([(x1+dx1*w)[0]]).T
y1 = np.array([(y1+dx2*h)[0]]).T
x2 = np.array([(x2+dx3*w)[0]]).T
y2 = np.array([(y2+dx4*h)[0]]).T
rectangles=np.concatenate((x1,y1,x2,y2,sc,pts0,pts1,pts2,pts3,pts4,pts5,pts6,pts7,pts8,pts9),axis=1)
pick = []
for i in range(len(rectangles)):
x1 = int(max(0 ,rectangles[i][0]))
y1 = int(max(0 ,rectangles[i][1]))
x2 = int(min(width ,rectangles[i][2]))
y2 = int(min(height,rectangles[i][3]))
if x2>x1 and y2>y1:
pick.append([x1,y1,x2,y2,rectangles[i][4],
rectangles[i][5],rectangles[i][6],rectangles[i][7],rectangles[i][8],rectangles[i][9],rectangles[i][10],rectangles[i][11],rectangles[i][12],rectangles[i][13],rectangles[i][14]])
return NMS(pick,0.7,'iom')
'''
Function:
calculate multi-scale and limit the maxinum side to 1000
Input:
img: original image
Output:
pr_scale: limit the maxinum side to 1000, < 1.0
scales : Multi-scale
'''
def calculateScales_org(img):
caffe_img = img.copy()
pr_scale = 1.0
h,w,ch = caffe_img.shape
if min(w,h)>1000:
pr_scale = 1000.0/min(h,w)
w = int(w*pr_scale)
h = int(h*pr_scale)
elif max(w,h)<1000:
pr_scale = 1000.0/max(h,w)
w = int(w*pr_scale)
h = int(h*pr_scale)
#multi-scale
scales = []
factor = 0.709
factor_count = 0
minl = min(h,w)
while minl >= 12:
scales.append(pr_scale*pow(factor, factor_count))
minl *= factor
factor_count += 1
return scales
def calculateScales(img,min_face):
caffe_img = img.copy()
h,w,ch = caffe_img.shape
pr_scale = 12.0/min_face
w = int(w*pr_scale)
h = int(h*pr_scale)
#multi-scale
scales = []
factor = 0.7937
factor_count = 0
minl = min(h,w)
while minl >= 12:
scales.append(pr_scale*pow(factor, factor_count))
minl *= factor
factor_count += 1
return scales
```
#### File: MTCNN-TF/Detection/fcn_detector.py
```python
import numpy as np
import tensorflow as tf
import sys
sys.path.append("../")
from train_models.MTCNN_config import config
import os
class FcnDetector(object):
#net_factory: which net
#model_path: where the params'file is
def __init__(self, net_factory, model_path):
#create a graph
graph = tf.Graph()
self.train_face = config.train_face
with graph.as_default():
#define tensor and op in graph(-1,1)
self.image_op = tf.placeholder(tf.float32, name='input_image')
self.width_op = tf.placeholder(tf.int32, name='image_width')
self.height_op = tf.placeholder(tf.int32, name='image_height')
image_reshape = tf.reshape(self.image_op, [1, self.height_op, self.width_op, 3])
#self.cls_prob batch*2
#self.bbox_pred batch*4
#construct model here
#self.cls_prob, self.bbox_pred = net_factory(image_reshape, training=False)
#contains landmark
if config.p_landmark:
self.cls_prob, self.bbox_pred, _ = net_factory(image_reshape, training=False)
else:
self.cls_prob, self.bbox_pred = net_factory(image_reshape, training=False)
#allow
self.sess = tf.Session(config=tf.ConfigProto(allow_soft_placement=True, gpu_options=tf.GPUOptions(allow_growth=True)))
saver = tf.train.Saver()
#check whether the dictionary is valid
net_name = model_path.split('/')[-1]
print("net name is ",net_name)
if self.train_face==100:
logs_dir = "../logs/%s" %(net_name)
summary_op = tf.summary.merge_all()
if os.path.exists(logs_dir) == False:
os.mkdir(logs_dir)
writer = tf.summary.FileWriter(logs_dir,self.sess.graph)
model_dict = '/'.join(model_path.split('/')[:-1])
ckpt = tf.train.get_checkpoint_state(model_dict)
print("restore model path",model_path)
readstate = ckpt and ckpt.model_checkpoint_path
assert readstate, "the params dictionary is not valid"
print ("restore models' param")
saver.restore(self.sess, model_path)
if self.train_face==100:
saver.save(self.sess,model_dict+'/resaved/'+net_name+'relu')
'''
logs_dir = "../logs/%s" %(net_factory)
summary_op = tf.summary.merge_all()
if os.path.exists(logs_dir) == False:
os.mkdir(logs_dir)
writer = tf.summary.FileWriter(logs_dir,self.sess.graph)
#summary = self.sess.run()
#writer.add_summary(summary,global_step=step)
'''
def predict(self, databatch):
height, width, _ = databatch.shape
# print(height, width)
cls_prob, bbox_pred = self.sess.run([self.cls_prob, self.bbox_pred],
feed_dict={self.image_op: databatch, self.width_op: width,
self.height_op: height})
return cls_prob, bbox_pred
```
#### File: MTCNN-TF/prepare_data/gen_hard_example.py
```python
import sys
#sys.path.append("../")
sys.path.insert(0,'..')
import numpy as np
import argparse
import os
import cPickle as pickle
import cv2
from train_models.mtcnn_model import P_Net,R_Net
from train_models.MTCNN_config import config
from loader import TestLoader
from Detection.detector import Detector
from Detection.fcn_detector import FcnDetector
from Detection.MtcnnDetector import MtcnnDetector
from utils import convert_to_square,IoU,convert_to_rect,IoU_self
from data_utils import get_path,read_annotation
import pdb
#net : 24(RNet)/48(ONet)
#data: dict()
'''
def args():
parser = argparse.ArgumentParser(description="gen_hard_example for Rnet Onet")
parser.add_argument('--net',type=str,required=True,default='RNet'
help='should be RNet of ONet')
return parser.parse_args()
'''
def save_hard_example(gen_anno_file, gen_imgs_dir,data,save_path,test_mode):
# load ground truth from annotation file
# format of each line: image/path [x1,y1,x2,y2] for each gt_box in this image
im_idx_list = data['images']
# print(images[0])
gt_boxes_list = data['bboxes']
num_of_images = len(im_idx_list)
print("processing %d images in total" % num_of_images)
# save files
print("saved hard example dir ",net)
#neg_label_file = "%s/neg_%d.txt" % (net, image_size)
neg_label_file = gen_anno_file[0]
neg_file = open(neg_label_file, 'w')
pos_label_file = gen_anno_file[1]
pos_file = open(pos_label_file, 'w')
part_label_file = gen_anno_file[2]
part_file = open(part_label_file, 'w')
#read detect result
det_boxes = pickle.load(open(os.path.join(save_path, 'detections.pkl'), 'rb'))
print("det boxes and image num: ",len(det_boxes), num_of_images)
#print len(det_boxes)
#print num_of_images
assert len(det_boxes) == num_of_images, "incorrect detections or ground truths"
# index of neg, pos and part face, used as their image names
n_idx = 0
p_idx = 0
d_idx = 0
image_done = 0
cnt_pass =0
#im_idx_list image index(list)
#det_boxes detect result(list)
#gt_boxes_list gt(list)
neg_dir,pos_dir,part_dir = gen_imgs_dir
if test_mode == "PNet" and not config.train_face:
X1_thresh = 0.45
Y1_thresh = -0.2
elif test_mode == "RNet" and not config.train_face:
Y1_thresh = -0.2
X1_thresh = 0.45
print("generate Onet")
else:
Y1_thresh = 1
X1_thresh = 1
for im_idx, dets, gts in zip(im_idx_list, det_boxes, gt_boxes_list):
gts = np.array(gts, dtype=np.float32).reshape(-1, 4)
if image_done % 100 == 0:
print("%d images done" % image_done)
image_done += 1
if dets.shape[0] == 0:
continue
img = cv2.imread(im_idx)
#change to square
dets = convert_to_square(dets)
dets[:, 0:4] = np.round(dets[:, 0:4])
neg_num = 0
for box in dets:
x_left, y_top, x_right, y_bottom, _ = box.astype(int)
width = x_right - x_left + 1
height = y_bottom - y_top + 1
# ignore box that is too small or beyond image border
#if width < 20 or x_left < 0 or y_top < 0 or x_right > img.shape[1] - 1 or y_bottom > img.shape[0] - 1:
if x_left < 0 or y_top < 0 or x_right > img.shape[1] - 1 or y_bottom > img.shape[0] - 1 or width <=10 :
#print("pass")
cnt_pass+=1
continue
# compute intersection over union(IoU) between current box and all gt boxes
Iou_ = IoU(box, gts)
Iou_gt = IoU_self(box,gts)
cropped_im = img[y_top:y_bottom + 1, x_left:x_right + 1, :]
resized_im = cv2.resize(cropped_im, (image_size, image_size),
interpolation=cv2.INTER_LINEAR)
# save negative images and write label
# Iou with all gts must below 0.3
union_max = np.max(Iou_)
gt_max = np.max(Iou_gt)
if union_max <=0.3 and neg_num < 60:
#save the examples
idx = np.argmax(union_max)
assigned_gt = gts[idx]
x1, y1, x2, y2 = assigned_gt
offset_x1 = (x1 - x_left) / float(width)
offset_y1 = (y1 - y_top) / float(height)
offset_x2 = (x2 - x_right) / float(width)
offset_y2 = (y2 - y_bottom) / float(height)
save_file = get_path(neg_dir, "%s.jpg" % n_idx)
# print(save_file)
#neg_file.write(save_file + ' 0\n')
neg_file.write(save_file + ' 0 %.2f %.2f %.2f %.2f\n' % (offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
n_idx += 1
'''
if union_max>0:
if np.abs(offset_x1) < 1 :
neg_file.write(save_file + ' 0 %.2f %.2f %.2f %.2f\n' % (offset_x1, offset_y1, offset_x2, offset_y2))
#neg_file.write(' %.2f %.2f %.2f %.2f' % (x1, y1, x2, y2))
#neg_file.write(' %.2f %.2f %.2f %.2f ' % (x_left, y_top, x_right, y_bottom))
#neg_file.write(im_idx +'\n')
cv2.imwrite(save_file, resized_im)
n_idx += 1
else:
neg_file.write(save_file + ' 0 %.2f %.2f %.2f %.2f\n' % (offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
n_idx += 1
'''
neg_num += 1
else:
# find gt_box with the highest iou
idx = np.argmax(Iou_)
assigned_gt = gts[idx]
x1, y1, x2, y2 = assigned_gt
# compute bbox reg label
offset_x1 = (x1 - x_left) / float(width)
offset_y1 = (y1 - y_top) / float(height)
offset_x2 = (x2 - x_right) / float(width)
offset_y2 = (y2 - y_bottom) / float(height)
# save positive and part-face images and write labels
if union_max >= 0.6:
#if np.max(Iou) >= 0.65:
#if union_max >= 0.7 and offset_y1>Y1_thresh and np.abs(offset_x1)<= X1_thresh:
save_file = get_path(pos_dir, "%s.jpg" % p_idx)
pos_file.write(save_file + ' 1 %.2f %.2f %.2f %.2f\n' % (
offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
p_idx += 1
#elif np.max(Iou) >= 0.3:
elif union_max > 0.3 and union_max <=0.4:
#elif union_max <= 0.3 and union_max >0.1 and offset_y1 <Y1_thresh and np.abs(offset_x1)<= X1_thresh:
save_file = os.path.join(part_dir, "%s.jpg" % d_idx)
part_file.write(save_file + ' -1 %.2f %.2f %.2f %.2f\n' % (
offset_x1, offset_y1, offset_x2, offset_y2))
cv2.imwrite(save_file, resized_im)
d_idx += 1
print("%s images done, pos: %s part: %s neg: %s, pass: %s"%(image_done, p_idx, d_idx, n_idx,cnt_pass))
neg_file.close()
part_file.close()
pos_file.close()
print("neg image num: ",n_idx)
print("pos image num: ",p_idx)
print("pat image num: ",d_idx)
print("pass num : ",cnt_pass)
def rd_anotation(img_saved_dir,filename,data_set_name):
data = dict()
image_path_list = []
boxes_gd = []
with open(anno_file, 'r') as f:
annotations = f.readlines()
for annotation in annotations:
annotation = annotation.strip().split()
im_path = annotation[0]
if data_set_name == "WiderFace":
im_path = im_path +'.jpg'
im_path = os.path.join(img_saved_dir,im_path)
#print("img path ",im_path)
image_path_list.append(im_path)
#boxed change to float type
bbox = map(float, annotation[1:])
#print("box : ",bbox)
#gt
boxes = np.array(bbox, dtype=np.float32).reshape(-1, 4)
boxes_gd.append(boxes)
data['images'] = image_path_list
data['bboxes'] = boxes_gd
return data
def t_net(prefix, epoch,batch_size, img_saved_dir,anno_file,gen_anno_file,gen_imgs_dir,data_set_name,ignore_det=False,test_mode="PNet",thresh=[0.6, 0.6, 0.7], min_face_size=25,\
stride=2):
slide_window=False
detectors = [None, None, None]
print("Test model: ", test_mode)
#PNet-echo
print("epoch num ",epoch[0])
''' #for Pnet test
epoch_num = epoch[0]
epoch_c = np.arange(2,epoch_num,2)
prefix_c = []
prefix = prefix[0]
[prefix_c.append(prefix) for i in range(len(epoch_c))]
'''
print("prefixs is ",prefix)
model_path = ['%s-%s' % (x, y) for x, y in zip(prefix, epoch)]
#print("after zip model_path is ",model_path)
#model_path[0] = prefix + '-'+str(epoch_num) #for Pnet test
print("model_path 0 is ",model_path[0])
# load pnet model
if slide_window:
PNet = Detector(P_Net, 12, batch_size[0], model_path[0])
else:
PNet = FcnDetector(P_Net, model_path[0])
detectors[0] = PNet
# load rnet model
if test_mode in ["RNet", "ONet"]:
print("==================================", test_mode)
RNet = Detector(R_Net, 24, batch_size[1], model_path[1])
detectors[1] = RNet
# load onet model
if test_mode == "ONet":
print("==================================", test_mode)
ONet = Detector(O_Net, 48, batch_size[2], model_path[2])
detectors[2] = ONet
#read annatation(type:dict)
#img_box_dic = read_annotation(img_saved_dir,anno_file)
img_box_dic = rd_anotation(img_saved_dir,anno_file,data_set_name)
print("gen_hardexample threshold ",thresh)
if not ignore_det:
mtcnn_detector = MtcnnDetector(detectors=detectors, min_face_size=min_face_size,
stride=stride, threshold=thresh)
print("==================================")
# 注意是在“test”模式下
# imdb = IMDB("wider", image_set, root_path, dataset_path, 'test')
# gt_imdb = imdb.gt_imdb()
test_data = TestLoader(img_box_dic['images'])
#list
if not ignore_det:
detections,_ = mtcnn_detector.detect_face(test_data)
if test_mode == "PNet":
save_net = "RNet"
save_path = '24/RNet'
elif test_mode == "RNet":
save_net = "ONet"
save_path = "48/ONet"
#save detect result
#save_path = os.path.join(data_dir, save_net)
print ("save path is",save_path)
if not os.path.exists(save_path):
os.mkdir(save_path)
save_file = os.path.join(save_path, "detections.pkl")
if not ignore_det:
with open(save_file, 'wb') as f:
pickle.dump(detections, f,1)
f.close()
print("%s Test is Over and begin OHEM" % image_size)
save_hard_example(gen_anno_file, gen_imgs_dir,img_box_dic, save_path,test_mode)
def parse_args():
parser = argparse.ArgumentParser(description='Test mtcnn',
formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('--test_mode', dest='test_mode', help='test net type, can be PNet, RNet or ONet',
default='PNet', type=str)
parser.add_argument('--prefix', dest='prefix', help='prefix of model name', nargs="+",
default=["../data/MTCNN_model/PNet_landmark/v1_trained/PNet", "../data/MTCNN_model/RNet_landmark/RNet", "../data/MTCNN_model/ONet_landmark/ONet"],
type=str)
parser.add_argument('--epoch', dest='epoch', help='epoch number of model to load', nargs="+",
default=[32, 2900, 22], type=int)
parser.add_argument('--batch_size', dest='batch_size', help='list of batch size used in prediction', nargs="+",
default=[1, 2048, 16], type=int)
parser.add_argument('--thresh', dest='thresh', help='list of thresh for pnet, rnet, onet', nargs="+",
default=[0.4, 0.6, 0.7], type=float)
parser.add_argument('--min_face', dest='min_face', help='minimum face size for detection',
default=24, type=int)
parser.add_argument('--stride', dest='stride', help='stride of sliding window',
default=2, type=int)
parser.add_argument('--anno_file',type=str,default="./wider_face_train.txt",\
help='annotation saved file path')
parser.add_argument('--img_saved_dir',type=str,default="./WIDER_train/images/",\
help='images saved path')
parser.add_argument('--pos_txt',type=str,default="pos24.txt",\
help='positive images annotion file ')
parser.add_argument('--neg_txt',type=str,default="neg24.txt",\
help='negtive images annotion file ')
parser.add_argument('--part_txt',type=str,default="part24.txt",\
help='part images annotion file ')
parser.add_argument('--train_data_set',type=str,default="WiderFace",\
help='the model will be trained in the dataset ')
parser.add_argument('--ignore_det',type=bool,default=False,\
help='only run save_hard_example ')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
#net = 'RNet'
img_saved_dir = args.img_saved_dir
anno_file = args.anno_file
neg_label_file = args.train_data_set+"_"+args.neg_txt
pos_label_file = args.train_data_set+"_"+args.pos_txt
part_label_file = args.train_data_set+"_"+args.part_txt
prefix = args.prefix
epoch_list = args.epoch
batch_size = args.batch_size
stride = args.stride
test_mode = args.test_mode
score_thresh = args.thresh
min_face_size = args.min_face
ignore_det = args.ignore_det
if args.test_mode == "ONet":
image_size = 48
if args.test_mode =="PNet":
net = "RNet"
elif args.test_mode == "RNet":
net = "ONet"
if net == "RNet":
image_size = 24
if net == "ONet":
image_size = 48
data_dir = '%s' % str(image_size)
neg_label_file = os.path.join(data_dir,neg_label_file)
pos_label_file = os.path.join(data_dir,pos_label_file)
part_label_file = os.path.join(data_dir,part_label_file)
gen_anno_file = [neg_label_file,pos_label_file,part_label_file]
data_set_name = args.train_data_set
neg_dir = get_path(data_dir, '%s_negative' %(data_set_name))
pos_dir = get_path(data_dir, '%s_positive' %(data_set_name))
part_dir = get_path(data_dir, '%s_part' %(data_set_name))
gen_imgs_dir = [neg_dir,pos_dir,part_dir]
#create dictionary shuffle
for dir_path in [neg_dir, pos_dir, part_dir]:
if not os.path.exists(dir_path):
os.makedirs(dir_path)
print ('Called with argument:')
print("config ",config.train_face)
t_net(prefix, epoch_list,batch_size, img_saved_dir,anno_file,gen_anno_file,gen_imgs_dir,data_set_name,ignore_det,test_mode,score_thresh, min_face_size,stride)
```
#### File: jimeffry/MTCNN-TF/test.py
```python
import xml.etree.cElementTree as ET
import sys
import cv2
import shutil
'''
tree = ET.parse("/home/lxy/Downloads/DataSet/VOC_Person/VOC2012/Annotations/2007_000664.xml")
root = tree.getroot()
for child_of_root in root:
if child_of_root.tag == 'object':
for child_item in child_of_root:
print(child_item.tag)
'''
def cp_img():
img_path=sys.argv[1]
img_file=open(img_path,'r')
count=0
lines_ = img_file.readlines()
for line in lines_ :
count = count+1
if count >49 and count <4000:
tem_str=line.strip().split()
file1="/home/lxy/Develop/Center_Loss/MTCNN-Tensorflow/prepare_data/"+tem_str[0]
shutil.copyfile(file1,"/home/lxy/Develop/Center_Loss/MTCNN-Tensorflow/prepare_data/48/market_part/"+str(count)+".jpg")
#img_name="img"+str(count)
#shutil.copyfile(line.strip(),"/home/lxy/"+img_name+".jpg")
print("id ",count)
elif count > 4002:
break
'''
for j in range(1,len(tem_str)):
shutil.copyfile("/data/common/HighRailway/photo/"+tem_str[j].strip(),"/data/common/forSZ/photo/"+tem_str[j].strip())
print("photo ",j)
'''
def gen_txt():
txt_path = sys.argv[1]
f_out = open(txt_path,'w')
count = 0
base_dir = "48/market_part/"
for i in range(4000):
path_ = base_dir + str(count)+".jpg"
f_out.write("{} {}\n".format(path_,-1))
count+=1
if __name__=='__main__':
#cp_img()
gen_txt()
``` |
{
"source": "jimeggleston/wombatwiki",
"score": 2
} |
#### File: jimeggleston/wombatwiki/app.py
```python
from bottle import Bottle
from bottle import mako_view as view
from bottle import static_file
staticroot = "b:/websites/bottle/static"
app = Bottle()
@app.route('/<filename:re:.*\..*>')
def send_file(filename):
return static_file(filename, root=staticroot)
@app.route('/hello')
@app.route('/hello/')
@app.route('/hello/<name>')
@view('hello_template.mako')
def hello(name='World'):
return dict(name=name, platform="Bottle")
app.run(host='localhost', port=80, server='tornado', debug=True)
```
#### File: jimeggleston/wombatwiki/bottle_app.py
```python
import bottle_config as cfg
import os, sys, re
import bottle, beaker
bottle.debug(True)
from bottle import route, mount, run, hook, request, static_file, redirect, app as apps
from bottle_wiki import application as wikiapp
mainapp = apps.push()
@hook('before_request')
def setup_request():
request.session = request.environ.get('beaker.session',[])
@route('/<filename:re:.*\..*>')
def send_file(filename):
return static_file(filename, root=cfg.staticroot)
@route('/')
@route('/home')
def home():
redirect(cfg.wiki_virtdir)
mainapp.mount(cfg.wiki_virtdir, wikiapp)
application = beaker.middleware.SessionMiddleware(mainapp, cfg.session_opts)
# Error handling helper function
def eh():
html = '<hr><pre>'
html += '\r\n'.join(traceback.format_exception(sys.exc_type, sys.exc_value, sys.exc_traceback))
html += '</pre>'
return html
def dbgprt(*items):
text = ' '.join([str(i) for i in items])
print >>sys.stderr, '<pre>%s</pre><br>' % text
if cfg.runtime_env == "DEV": bottle.run(app=application, host='localhost', port=8080, server="tornado")
```
#### File: lib/beaker/cache.py
```python
import warnings
import beaker.container as container
import beaker.util as util
from beaker.crypto.util import sha1
from beaker.exceptions import BeakerException, InvalidCacheBackendError
from beaker.synchronization import _threading
import beaker.ext.memcached as memcached
import beaker.ext.database as database
import beaker.ext.sqla as sqla
import beaker.ext.google as google
# Initialize the cache region dict
cache_regions = {}
"""Dictionary of 'region' arguments.
A "region" is a string name that refers to a series of cache
configuration arguments. An application may have multiple
"regions" - one which stores things in a memory cache, one
which writes data to files, etc.
The dictionary stores string key names mapped to dictionaries
of configuration arguments. Example::
from beaker.cache import cache_regions
cache_regions.update({
'short_term':{
'expire':'60',
'type':'memory'
},
'long_term':{
'expire':'1800',
'type':'dbm',
'data_dir':'/tmp',
}
})
"""
cache_managers = {}
class _backends(object):
initialized = False
def __init__(self, clsmap):
self._clsmap = clsmap
self._mutex = _threading.Lock()
def __getitem__(self, key):
try:
return self._clsmap[key]
except KeyError, e:
if not self.initialized:
self._mutex.acquire()
try:
if not self.initialized:
self._init()
self.initialized = True
return self._clsmap[key]
finally:
self._mutex.release()
raise e
def _init(self):
try:
import pkg_resources
# Load up the additional entry point defined backends
for entry_point in pkg_resources.iter_entry_points('beaker.backends'):
try:
namespace_manager = entry_point.load()
name = entry_point.name
if name in self._clsmap:
raise BeakerException("NamespaceManager name conflict,'%s' "
"already loaded" % name)
self._clsmap[name] = namespace_manager
except (InvalidCacheBackendError, SyntaxError):
# Ignore invalid backends
pass
except:
import sys
from pkg_resources import DistributionNotFound
# Warn when there's a problem loading a NamespaceManager
if not isinstance(sys.exc_info()[1], DistributionNotFound):
import traceback
from StringIO import StringIO
tb = StringIO()
traceback.print_exc(file=tb)
warnings.warn(
"Unable to load NamespaceManager "
"entry point: '%s': %s" % (
entry_point,
tb.getvalue()),
RuntimeWarning, 2)
except ImportError:
pass
# Initialize the basic available backends
clsmap = _backends({
'memory': container.MemoryNamespaceManager,
'dbm': container.DBMNamespaceManager,
'file': container.FileNamespaceManager,
'ext:memcached': memcached.MemcachedNamespaceManager,
'ext:database': database.DatabaseNamespaceManager,
'ext:sqla': sqla.SqlaNamespaceManager,
'ext:google': google.GoogleNamespaceManager,
})
def cache_region(region, *args):
"""Decorate a function such that its return result is cached,
using a "region" to indicate the cache arguments.
Example::
from beaker.cache import cache_regions, cache_region
# configure regions
cache_regions.update({
'short_term':{
'expire':'60',
'type':'memory'
}
})
@cache_region('short_term', 'load_things')
def load(search_term, limit, offset):
'''Load from a database given a search term, limit, offset.'''
return database.query(search_term)[offset:offset + limit]
The decorator can also be used with object methods. The ``self``
argument is not part of the cache key. This is based on the
actual string name ``self`` being in the first argument
position (new in 1.6)::
class MyThing(object):
@cache_region('short_term', 'load_things')
def load(self, search_term, limit, offset):
'''Load from a database given a search term, limit, offset.'''
return database.query(search_term)[offset:offset + limit]
Classmethods work as well - use ``cls`` as the name of the class argument,
and place the decorator around the function underneath ``@classmethod``
(new in 1.6)::
class MyThing(object):
@classmethod
@cache_region('short_term', 'load_things')
def load(cls, search_term, limit, offset):
'''Load from a database given a search term, limit, offset.'''
return database.query(search_term)[offset:offset + limit]
:param region: String name of the region corresponding to the desired
caching arguments, established in :attr:`.cache_regions`.
:param \*args: Optional ``str()``-compatible arguments which will uniquely
identify the key used by this decorated function, in addition
to the positional arguments passed to the function itself at call time.
This is recommended as it is needed to distinguish between any two functions
or methods that have the same name (regardless of parent class or not).
.. note::
The function being decorated must only be called with
positional arguments, and the arguments must support
being stringified with ``str()``. The concatenation
of the ``str()`` version of each argument, combined
with that of the ``*args`` sent to the decorator,
forms the unique cache key.
.. note::
When a method on a class is decorated, the ``self`` or ``cls``
argument in the first position is
not included in the "key" used for caching. New in 1.6.
"""
return _cache_decorate(args, None, None, region)
def region_invalidate(namespace, region, *args):
"""Invalidate a cache region corresponding to a function
decorated with :func:`.cache_region`.
:param namespace: The namespace of the cache to invalidate. This is typically
a reference to the original function (as returned by the :func:`.cache_region`
decorator), where the :func:`.cache_region` decorator applies a "memo" to
the function in order to locate the string name of the namespace.
:param region: String name of the region used with the decorator. This can be
``None`` in the usual case that the decorated function itself is passed,
not the string name of the namespace.
:param args: Stringifyable arguments that are used to locate the correct
key. This consists of the ``*args`` sent to the :func:`.cache_region`
decorator itself, plus the ``*args`` sent to the function itself
at runtime.
Example::
from beaker.cache import cache_regions, cache_region, region_invalidate
# configure regions
cache_regions.update({
'short_term':{
'expire':'60',
'type':'memory'
}
})
@cache_region('short_term', 'load_data')
def load(search_term, limit, offset):
'''Load from a database given a search term, limit, offset.'''
return database.query(search_term)[offset:offset + limit]
def invalidate_search(search_term, limit, offset):
'''Invalidate the cached storage for a given search term, limit, offset.'''
region_invalidate(load, 'short_term', 'load_data', search_term, limit, offset)
Note that when a method on a class is decorated, the first argument ``cls``
or ``self`` is not included in the cache key. This means you don't send
it to :func:`.region_invalidate`::
class MyThing(object):
@cache_region('short_term', 'some_data')
def load(self, search_term, limit, offset):
'''Load from a database given a search term, limit, offset.'''
return database.query(search_term)[offset:offset + limit]
def invalidate_search(self, search_term, limit, offset):
'''Invalidate the cached storage for a given search term, limit, offset.'''
region_invalidate(self.load, 'short_term', 'some_data', search_term, limit, offset)
"""
if callable(namespace):
if not region:
region = namespace._arg_region
namespace = namespace._arg_namespace
if not region:
raise BeakerException("Region or callable function "
"namespace is required")
else:
region = cache_regions[region]
cache = Cache._get_cache(namespace, region)
_cache_decorator_invalidate(cache, region['key_length'], args)
class Cache(object):
"""Front-end to the containment API implementing a data cache.
:param namespace: the namespace of this Cache
:param type: type of cache to use
:param expire: seconds to keep cached data
:param expiretime: seconds to keep cached data (legacy support)
:param starttime: time when cache was cache was
"""
def __init__(self, namespace, type='memory', expiretime=None,
starttime=None, expire=None, **nsargs):
try:
cls = clsmap[type]
if isinstance(cls, InvalidCacheBackendError):
raise cls
except KeyError:
raise TypeError("Unknown cache implementation %r" % type)
self.namespace_name = namespace
self.namespace = cls(namespace, **nsargs)
self.expiretime = expiretime or expire
self.starttime = starttime
self.nsargs = nsargs
@classmethod
def _get_cache(cls, namespace, kw):
key = namespace + str(kw)
try:
return cache_managers[key]
except KeyError:
cache_managers[key] = cache = cls(namespace, **kw)
return cache
def put(self, key, value, **kw):
self._get_value(key, **kw).set_value(value)
set_value = put
def get(self, key, **kw):
"""Retrieve a cached value from the container"""
return self._get_value(key, **kw).get_value()
get_value = get
def remove_value(self, key, **kw):
mycontainer = self._get_value(key, **kw)
mycontainer.clear_value()
remove = remove_value
def _get_value(self, key, **kw):
if isinstance(key, unicode):
key = key.encode('ascii', 'backslashreplace')
if 'type' in kw:
return self._legacy_get_value(key, **kw)
kw.setdefault('expiretime', self.expiretime)
kw.setdefault('starttime', self.starttime)
return container.Value(key, self.namespace, **kw)
@util.deprecated("Specifying a "
"'type' and other namespace configuration with cache.get()/put()/etc. "
"is deprecated. Specify 'type' and other namespace configuration to "
"cache_manager.get_cache() and/or the Cache constructor instead.")
def _legacy_get_value(self, key, type, **kw):
expiretime = kw.pop('expiretime', self.expiretime)
starttime = kw.pop('starttime', None)
createfunc = kw.pop('createfunc', None)
kwargs = self.nsargs.copy()
kwargs.update(kw)
c = Cache(self.namespace.namespace, type=type, **kwargs)
return c._get_value(key, expiretime=expiretime, createfunc=createfunc,
starttime=starttime)
def clear(self):
"""Clear all the values from the namespace"""
self.namespace.remove()
# dict interface
def __getitem__(self, key):
return self.get(key)
def __contains__(self, key):
return self._get_value(key).has_current_value()
def has_key(self, key):
return key in self
def __delitem__(self, key):
self.remove_value(key)
def __setitem__(self, key, value):
self.put(key, value)
class CacheManager(object):
def __init__(self, **kwargs):
"""Initialize a CacheManager object with a set of options
Options should be parsed with the
:func:`~beaker.util.parse_cache_config_options` function to
ensure only valid options are used.
"""
self.kwargs = kwargs
self.regions = kwargs.pop('cache_regions', {})
# Add these regions to the module global
cache_regions.update(self.regions)
def get_cache(self, name, **kwargs):
kw = self.kwargs.copy()
kw.update(kwargs)
return Cache._get_cache(name, kw)
def get_cache_region(self, name, region):
if region not in self.regions:
raise BeakerException('Cache region not configured: %s' % region)
kw = self.regions[region]
return Cache._get_cache(name, kw)
def region(self, region, *args):
"""Decorate a function to cache itself using a cache region
The region decorator requires arguments if there are more than
two of the same named function, in the same module. This is
because the namespace used for the functions cache is based on
the functions name and the module.
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things():
@cache.region('short_term', 'some_data')
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
return load('rabbits', 20, 0)
.. note::
The function being decorated must only be called with
positional arguments.
"""
return cache_region(region, *args)
def region_invalidate(self, namespace, region, *args):
"""Invalidate a cache region namespace or decorated function
This function only invalidates cache spaces created with the
cache_region decorator.
:param namespace: Either the namespace of the result to invalidate, or the
cached function
:param region: The region the function was cached to. If the function was
cached to a single region then this argument can be None
:param args: Arguments that were used to differentiate the cached
function as well as the arguments passed to the decorated
function
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things(invalidate=False):
@cache.region('short_term', 'some_data')
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
# If the results should be invalidated first
if invalidate:
cache.region_invalidate(load, None, 'some_data',
'rabbits', 20, 0)
return load('rabbits', 20, 0)
"""
return region_invalidate(namespace, region, *args)
def cache(self, *args, **kwargs):
"""Decorate a function to cache itself with supplied parameters
:param args: Used to make the key unique for this function, as in region()
above.
:param kwargs: Parameters to be passed to get_cache(), will override defaults
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things():
@cache.cache('mycache', expire=15)
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
return load('rabbits', 20, 0)
.. note::
The function being decorated must only be called with
positional arguments.
"""
return _cache_decorate(args, self, kwargs, None)
def invalidate(self, func, *args, **kwargs):
"""Invalidate a cache decorated function
This function only invalidates cache spaces created with the
cache decorator.
:param func: Decorated function to invalidate
:param args: Used to make the key unique for this function, as in region()
above.
:param kwargs: Parameters that were passed for use by get_cache(), note that
this is only required if a ``type`` was specified for the
function
Example::
# Assuming a cache object is available like:
cache = CacheManager(dict_of_config_options)
def populate_things(invalidate=False):
@cache.cache('mycache', type="file", expire=15)
def load(search_term, limit, offset):
return load_the_data(search_term, limit, offset)
# If the results should be invalidated first
if invalidate:
cache.invalidate(load, 'mycache', 'rabbits', 20, 0, type="file")
return load('rabbits', 20, 0)
"""
namespace = func._arg_namespace
cache = self.get_cache(namespace, **kwargs)
if hasattr(func, '_arg_region'):
key_length = cache_regions[func._arg_region]['key_length']
else:
key_length = kwargs.pop('key_length', 250)
_cache_decorator_invalidate(cache, key_length, args)
def _cache_decorate(deco_args, manager, kwargs, region):
"""Return a caching function decorator."""
cache = [None]
def decorate(func):
namespace = util.func_namespace(func)
skip_self = util.has_self_arg(func)
def cached(*args):
if not cache[0]:
if region is not None:
if region not in cache_regions:
raise BeakerException(
'Cache region not configured: %s' % region)
reg = cache_regions[region]
if not reg.get('enabled', True):
return func(*args)
cache[0] = Cache._get_cache(namespace, reg)
elif manager:
cache[0] = manager.get_cache(namespace, **kwargs)
else:
raise Exception("'manager + kwargs' or 'region' "
"argument is required")
if skip_self:
try:
cache_key = " ".join(map(str, deco_args + args[1:]))
except UnicodeEncodeError:
cache_key = " ".join(map(unicode, deco_args + args[1:]))
else:
try:
cache_key = " ".join(map(str, deco_args + args))
except UnicodeEncodeError:
cache_key = " ".join(map(unicode, deco_args + args))
if region:
key_length = cache_regions[region]['key_length']
else:
key_length = kwargs.pop('key_length', 250)
if len(cache_key) + len(namespace) > int(key_length):
cache_key = sha1(cache_key).hexdigest()
def go():
return func(*args)
return cache[0].get_value(cache_key, createfunc=go)
cached._arg_namespace = namespace
if region is not None:
cached._arg_region = region
return cached
return decorate
def _cache_decorator_invalidate(cache, key_length, args):
"""Invalidate a cache key based on function arguments."""
try:
cache_key = " ".join(map(str, args))
except UnicodeEncodeError:
cache_key = " ".join(map(unicode, args))
if len(cache_key) + len(cache.namespace_name) > key_length:
cache_key = sha1(cache_key).hexdigest()
cache.remove_value(cache_key)
```
#### File: beaker/crypto/pbkdf2.py
```python
__version__ = "1.1"
from struct import pack
from binascii import b2a_hex
from random import randint
from base64 import b64encode
from beaker.crypto.util import hmac as HMAC, hmac_sha1 as SHA1
def strxor(a, b):
return "".join([chr(ord(x) ^ ord(y)) for (x, y) in zip(a, b)])
class PBKDF2(object):
"""PBKDF2.py : PKCS#5 v2.0 Password-Based Key Derivation
This implementation takes a passphrase and a salt (and optionally an
iteration count, a digest module, and a MAC module) and provides a
file-like object from which an arbitrarily-sized key can be read.
If the passphrase and/or salt are unicode objects, they are encoded as
UTF-8 before they are processed.
The idea behind PBKDF2 is to derive a cryptographic key from a
passphrase and a salt.
PBKDF2 may also be used as a strong salted password hash. The
'crypt' function is provided for that purpose.
Remember: Keys generated using PBKDF2 are only as strong as the
passphrases they are derived from.
"""
def __init__(self, passphrase, salt, iterations=1000,
digestmodule=SHA1, macmodule=HMAC):
if not callable(macmodule):
macmodule = macmodule.new
self.__macmodule = macmodule
self.__digestmodule = digestmodule
self._setup(passphrase, salt, iterations, self._pseudorandom)
def _pseudorandom(self, key, msg):
"""Pseudorandom function. e.g. HMAC-SHA1"""
return self.__macmodule(key=key, msg=msg,
digestmod=self.__digestmodule).digest()
def read(self, bytes):
"""Read the specified number of key bytes."""
if self.closed:
raise ValueError("file-like object is closed")
size = len(self.__buf)
blocks = [self.__buf]
i = self.__blockNum
while size < bytes:
i += 1
if i > 0xffffffff:
# We could return "" here, but
raise OverflowError("derived key too long")
block = self.__f(i)
blocks.append(block)
size += len(block)
buf = "".join(blocks)
retval = buf[:bytes]
self.__buf = buf[bytes:]
self.__blockNum = i
return retval
def __f(self, i):
# i must fit within 32 bits
assert (1 <= i and i <= 0xffffffff)
U = self.__prf(self.__passphrase, self.__salt + pack("!L", i))
result = U
for j in xrange(2, 1 + self.__iterations):
U = self.__prf(self.__passphrase, U)
result = strxor(result, U)
return result
def hexread(self, octets):
"""Read the specified number of octets. Return them as hexadecimal.
Note that len(obj.hexread(n)) == 2*n.
"""
return b2a_hex(self.read(octets))
def _setup(self, passphrase, salt, iterations, prf):
# Sanity checks:
# passphrase and salt must be str or unicode (in the latter
# case, we convert to UTF-8)
if isinstance(passphrase, unicode):
passphrase = passphrase.encode("UTF-8")
if not isinstance(passphrase, str):
raise TypeError("passphrase must be str or unicode")
if isinstance(salt, unicode):
salt = salt.encode("UTF-8")
if not isinstance(salt, str):
raise TypeError("salt must be str or unicode")
# iterations must be an integer >= 1
if not isinstance(iterations, (int, long)):
raise TypeError("iterations must be an integer")
if iterations < 1:
raise ValueError("iterations must be at least 1")
# prf must be callable
if not callable(prf):
raise TypeError("prf must be callable")
self.__passphrase = passphrase
self.__salt = salt
self.__iterations = iterations
self.__prf = prf
self.__blockNum = 0
self.__buf = ""
self.closed = False
def close(self):
"""Close the stream."""
if not self.closed:
del self.__passphrase
del self.__salt
del self.__iterations
del self.__prf
del self.__blockNum
del self.__buf
self.closed = True
def crypt(word, salt=None, iterations=None):
"""PBKDF2-based unix crypt(3) replacement.
The number of iterations specified in the salt overrides the 'iterations'
parameter.
The effective hash length is 192 bits.
"""
# Generate a (pseudo-)random salt if the user hasn't provided one.
if salt is None:
salt = _makesalt()
# salt must be a string or the us-ascii subset of unicode
if isinstance(salt, unicode):
salt = salt.encode("us-ascii")
if not isinstance(salt, str):
raise TypeError("salt must be a string")
# word must be a string or unicode (in the latter case, we convert to UTF-8)
if isinstance(word, unicode):
word = word.encode("UTF-8")
if not isinstance(word, str):
raise TypeError("word must be a string or unicode")
# Try to extract the real salt and iteration count from the salt
if salt.startswith("$p5k2$"):
(iterations, salt, dummy) = salt.split("$")[2:5]
if iterations == "":
iterations = 400
else:
converted = int(iterations, 16)
if iterations != "%x" % converted: # lowercase hex, minimum digits
raise ValueError("Invalid salt")
iterations = converted
if not (iterations >= 1):
raise ValueError("Invalid salt")
# Make sure the salt matches the allowed character set
allowed = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./"
for ch in salt:
if ch not in allowed:
raise ValueError("Illegal character %r in salt" % (ch,))
if iterations is None or iterations == 400:
iterations = 400
salt = "$p5k2$$" + salt
else:
salt = "$p5k2$%x$%s" % (iterations, salt)
rawhash = PBKDF2(word, salt, iterations).read(24)
return salt + "$" + b64encode(rawhash, "./")
# Add crypt as a static method of the PBKDF2 class
# This makes it easier to do "from PBKDF2 import PBKDF2" and still use
# crypt.
PBKDF2.crypt = staticmethod(crypt)
def _makesalt():
"""Return a 48-bit pseudorandom salt for crypt().
This function is not suitable for generating cryptographic secrets.
"""
binarysalt = "".join([pack("@H", randint(0, 0xffff)) for i in range(3)])
return b64encode(binarysalt, "./")
def test_pbkdf2():
"""Module self-test"""
from binascii import a2b_hex
#
# Test vectors from RFC 3962
#
# Test 1
result = PBKDF2("password", "<PASSWORD>", 1).read(16)
expected = a2b_hex("cdedb5281bb2f801565a1122b2563515")
if result != expected:
raise RuntimeError("self-test failed")
# Test 2
result = PBKDF2("password", "<PASSWORD>", 1200).hexread(32)
expected = ("5c08eb61fdf<PASSWORD>"
"a7e52ddbc5e51<PASSWORD>")
if result != expected:
raise RuntimeError("self-test failed")
# Test 3
result = PBKDF2("X" * 64, "pass phrase equals block size", 1200).hexread(32)
expected = ("139c30c0966bc32ba55fdbf212530ac9"
"c5ec59f1a452f5cc9ad940fea0598ed1")
if result != expected:
raise RuntimeError("self-test failed")
# Test 4
result = PBKDF2("X" * 65, "pass phrase exceeds block size", 1200).hexread(32)
expected = ("9ccad6d468770cd51b10e6a68721be61"
"1a8b4d282601db3b36be9246915ec82a")
if result != expected:
raise RuntimeError("self-test failed")
#
# Other test vectors
#
# Chunked read
f = PBKDF2("kickstart", "workbench", 256)
result = f.read(17)
result += f.read(17)
result += f.read(1)
result += f.read(2)
result += f.read(3)
expected = PBKDF2("kickstart", "workbench", 256).read(40)
if result != expected:
raise RuntimeError("self-test failed")
#
# crypt() test vectors
#
# crypt 1
result = crypt("cloadm", "exec")
expected = '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql'
if result != expected:
raise RuntimeError("self-test failed")
# crypt 2
result = crypt("gnu", '$p5k2$c$u9HvcT4d$.....')
expected = '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g'
if result != expected:
raise RuntimeError("self-test failed")
# crypt 3
result = crypt("dcl", "tUsch7fU", iterations=13)
expected = "$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL"
if result != expected:
raise RuntimeError("self-test failed")
# crypt 4 (unicode)
result = crypt(u'\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2',
'$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ')
expected = '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ'
if result != expected:
raise RuntimeError("self-test failed")
if __name__ == '__main__':
test_pbkdf2()
# vim:set ts=4 sw=4 sts=4 expandtab:
```
#### File: lib/cork/cork.py
```python
from base64 import b64encode, b64decode
from beaker import crypto
from datetime import datetime, timedelta
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
from logging import getLogger
from smtplib import SMTP, SMTP_SSL
from threading import Thread
from time import time
import bottle
import os
import re
import uuid
try:
import scrypt
scrypt_available = True
except ImportError: # pragma: no cover
scrypt_available = False
from backends import JsonBackend
log = getLogger(__name__)
class AAAException(Exception):
"""Generic Authentication/Authorization Exception"""
pass
class AuthException(AAAException):
"""Authentication Exception: incorrect username/password pair"""
pass
class Cork(object):
def __init__(self, directory=None, backend=None, email_sender=None,
initialize=False, session_domain=None, smtp_server=None,
smtp_url='localhost'):
"""Auth/Authorization/Accounting class
:param directory: configuration directory
:type directory: str.
:param users_fname: users filename (without .json), defaults to 'users'
:type users_fname: str.
:param roles_fname: roles filename (without .json), defaults to 'roles'
:type roles_fname: str.
"""
if smtp_server:
smtp_url = smtp_server
self.mailer = Mailer(email_sender, smtp_url)
self.password_reset_timeout = 3600 * 24
self.session_domain = session_domain
self.preferred_hashing_algorithm = 'PBKDF2'
# Setup JsonBackend by default for backward compatibility.
if backend is None:
self._store = JsonBackend(directory, users_fname='users',
roles_fname='roles', pending_reg_fname='register',
initialize=initialize)
else:
self._store = backend
def login(self, username, password, success_redirect=None,
fail_redirect=None):
"""Check login credentials for an existing user.
Optionally redirect the user to another page (typically /login)
:param username: username
:type username: str.
:param password: <PASSWORD>
:type password: str.
:param success_redirect: redirect authorized users (optional)
:type success_redirect: str.
:param fail_redirect: redirect unauthorized users (optional)
:type fail_redirect: str.
:returns: True for successful logins, else False
"""
assert isinstance(username, str), "the username must be a string"
assert isinstance(password, str), "the password must be a string"
if username in self._store.users:
if self._verify_password(username, password,
self._store.users[username]['hash']):
# Setup session data
self._setup_cookie(username)
self._store.users[username]['last_login'] = str(datetime.utcnow())
self._store.save_users()
if success_redirect:
bottle.redirect(success_redirect)
return True
if fail_redirect:
bottle.redirect(fail_redirect)
return False
def logout(self, success_redirect='/login', fail_redirect='/login'):
"""Log the user out, remove cookie
:param success_redirect: redirect the user after logging out
:type success_redirect: str.
:param fail_redirect: redirect the user if it is not logged in
:type fail_redirect: str.
"""
try:
session = self._beaker_session
session.delete()
except Exception, e:
log.debug("Exception %s while logging out." % repr(e))
bottle.redirect(fail_redirect)
bottle.redirect(success_redirect)
def require(self, username=None, role=None, fixed_role=False,
fail_redirect=None):
"""Ensure the user is logged in has the required role (or higher).
Optionally redirect the user to another page (typically /login)
If both `username` and `role` are specified, both conditions need to be
satisfied.
If none is specified, any authenticated user will be authorized.
By default, any role with higher level than `role` will be authorized;
set fixed_role=True to prevent this.
:param username: username (optional)
:type username: str.
:param role: role
:type role: str.
:param fixed_role: require user role to match `role` strictly
:type fixed_role: bool.
:param redirect: redirect unauthorized users (optional)
:type redirect: str.
"""
# Parameter validation
if username is not None:
if username not in self._store.users:
raise AAAException("Nonexistent user")
if fixed_role and role is None:
raise AAAException(
"""A role must be specified if fixed_role has been set""")
if role is not None and role not in self._store.roles:
raise AAAException("Role not found")
# Authentication
try:
cu = self.current_user
except AAAException:
if fail_redirect is None:
raise AuthException("Unauthenticated user")
else:
bottle.redirect(fail_redirect)
# Authorization
if cu.role not in self._store.roles:
raise AAAException("Role not found for the current user")
if username is not None:
if username != self.current_user.username:
if fail_redirect is None:
raise AuthException("Unauthorized access: incorrect"
" username")
else:
bottle.redirect(fail_redirect)
if fixed_role:
if role == self.current_user.role:
return
if fail_redirect is None:
raise AuthException("Unauthorized access: incorrect role")
else:
bottle.redirect(fail_redirect)
else:
if role is not None:
# Any role with higher level is allowed
current_lvl = self._store.roles[self.current_user.role]
threshold_lvl = self._store.roles[role]
if current_lvl >= threshold_lvl:
return
if fail_redirect is None:
raise AuthException("Unauthorized access: ")
else:
bottle.redirect(fail_redirect)
return
def create_role(self, role, level):
"""Create a new role.
:param role: role name
:type role: str.
:param level: role level (0=lowest, 100=admin)
:type level: int.
:raises: AuthException on errors
"""
if self.current_user.level < 100:
raise AuthException("The current user is not authorized to ")
if role in self._store.roles:
raise AAAException("The role is already existing")
try:
int(level)
except ValueError:
raise AAAException("The level must be numeric.")
self._store.roles[role] = level
self._store.save_roles()
def delete_role(self, role):
"""Deleta a role.
:param role: role name
:type role: str.
:raises: AuthException on errors
"""
if self.current_user.level < 100:
raise AuthException("The current user is not authorized to ")
if role not in self._store.roles:
raise AAAException("Nonexistent role.")
self._store.roles.pop(role)
self._store.save_roles()
def list_roles(self):
"""List roles.
:returns: (role, role_level) generator (sorted by role)
"""
for role in sorted(self._store.roles):
yield (role, self._store.roles[role])
def create_user(self, username, role, password, email_addr=None,
description=None):
"""Create a new user account.
This method is available to users with level>=100
:param username: username
:type username: str.
:param role: role
:type role: str.
:param password: <PASSWORD>
:type password: str.
:param email_addr: email address (optional)
:type email_addr: str.
:param description: description (free form)
:type description: str.
:raises: AuthException on errors
"""
assert username, "Username must be provided."
if self.current_user.level < 100:
raise AuthException("The current user is not authorized" \
" to create users.")
if username in self._store.users:
raise AAAException("User is already existing.")
if role not in self._store.roles:
raise AAAException("Nonexistent user role.")
tstamp = str(datetime.utcnow())
self._store.users[username] = {
'role': role,
'hash': self._hash(username, password),
'email_addr': email_addr,
'desc': description,
'creation_date': tstamp,
'last_login': tstamp
}
self._store.save_users()
def delete_user(self, username):
"""Delete a user account.
This method is available to users with level>=100
:param username: username
:type username: str.
:raises: Exceptions on errors
"""
if self.current_user.level < 100:
raise AuthException("The current user is not authorized to ")
if username not in self._store.users:
raise AAAException("Nonexistent user.")
self.user(username).delete()
def list_users(self):
"""List users.
:return: (username, role, email_addr, description) generator (sorted by
username)
"""
for un in sorted(self._store.users):
d = self._store.users[un]
yield (un, d['role'], d['email_addr'], d['desc'])
@property
def current_user(self):
"""Current autenticated user
:returns: User() instance, if authenticated
:raises: AuthException otherwise
"""
session = self._beaker_session
username = session.get('username', None)
if username is None:
raise AuthException("Unauthenticated user")
if username is not None and username in self._store.users:
return User(username, self, session=session)
raise AuthException("Unknown user: %s" % username)
@property
def user_is_anonymous(self):
"""Check if the current user is anonymous.
:returns: True if the user is anonymous, False otherwise
:raises: AuthException if the session username is unknown
"""
try:
username = self._beaker_session['username']
except KeyError:
return True
if username not in self._store.users:
raise AuthException("Unknown user: %s" % username)
return False
def user(self, username):
"""Existing user
:returns: User() instance if the user exist, None otherwise
"""
if username is not None and username in self._store.users:
return User(username, self)
return None
def register(self, username, password, email_addr, role='user',
max_level=50, subject="Signup confirmation",
email_template='views/registration_email.tpl',
description=None):
"""Register a new user account. An email with a registration validation
is sent to the user.
WARNING: this method is available to unauthenticated users
:param username: username
:type username: str.
:param password: <PASSWORD>
:type password: str.
:param role: role (optional), defaults to 'user'
:type role: str.
:param max_level: maximum role level (optional), defaults to 50
:type max_level: int.
:param email_addr: email address
:type email_addr: str.
:param subject: email subject
:type subject: str.
:param email_template: email template filename
:type email_template: str.
:param description: description (free form)
:type description: str.
:raises: AssertError or AAAException on errors
"""
assert username, "Username must be provided."
assert password, "A password must be provided."
assert email_addr, "An email address must be provided."
if username in self._store.users:
raise AAAException("User is already existing.")
if role not in self._store.roles:
raise AAAException("Nonexistent role")
if self._store.roles[role] > max_level:
raise AAAException("Unauthorized role")
registration_code = uuid.uuid4().hex
creation_date = str(datetime.utcnow())
# send registration email
email_text = bottle.template(email_template,
username=username,
email_addr=email_addr,
role=role,
creation_date=creation_date,
registration_code=registration_code
)
self.mailer.send_email(email_addr, subject, email_text)
# store pending registration
self._store.pending_registrations[registration_code] = {
'username': username,
'role': role,
'hash': self._hash(username, password),
'email_addr': email_addr,
'desc': description,
'creation_date': creation_date,
}
self._store.save_pending_registrations()
def validate_registration(self, registration_code):
"""Validate pending account registration, create a new account if
successful.
:param registration_code: registration code
:type registration_code: str.
"""
try:
data = self._store.pending_registrations.pop(registration_code)
except KeyError:
raise AuthException("Invalid registration code.")
username = data['username']
if username in self._store.users:
raise AAAException("User is already existing.")
# the user data is moved from pending_registrations to _users
self._store.users[username] = {
'role': data['role'],
'hash': data['hash'],
'email_addr': data['email_addr'],
'desc': data['desc'],
'creation_date': data['creation_date'],
'last_login': str(datetime.utcnow())
}
self._store.save_users()
def send_password_reset_email(self, username=None, email_addr=None,
subject="Password reset confirmation",
email_template='views/password_reset_email'):
"""Email the user with a link to reset his/her password
If only one parameter is passed, fetch the other from the users
database. If both are passed they will be matched against the users
database as a security check.
:param username: username
:type username: str.
:param email_addr: email address
:type email_addr: str.
:param subject: email subject
:type subject: str.
:param email_template: email template filename
:type email_template: str.
:raises: AAAException on missing username or email_addr,
AuthException on incorrect username/email_addr pair
"""
if username is None:
if email_addr is None:
raise AAAException("At least `username` or `email_addr` must" \
" be specified.")
# only email_addr is specified: fetch the username
for k, v in self._store.users.iteritems():
if v['email_addr'] == email_addr:
username = k
break
else:
raise AAAException("Email address not found.")
else: # username is provided
if username not in self._store.users:
raise AAAException("Nonexistent user.")
if email_addr is None:
email_addr = self._store.users[username].get('email_addr', None)
if not email_addr:
raise AAAException("Email address not available.")
else:
# both username and email_addr are provided: check them
stored_email_addr = self._store.users[username]['email_addr']
if email_addr != stored_email_addr:
raise AuthException("Username/email address pair not found.")
# generate a reset_code token
reset_code = self._reset_code(username, email_addr)
# send reset email
email_text = bottle.template(email_template,
username=username,
email_addr=email_addr,
reset_code=reset_code
)
self.mailer.send_email(email_addr, subject, email_text)
def reset_password(self, reset_code, password):
"""Validate reset_code and update the account password
The username is extracted from the reset_code token
:param reset_code: reset token
:type reset_code: str.
:param password: <PASSWORD>
:type password: str.
:raises: AuthException for invalid reset tokens, AAAException
"""
try:
reset_code = b64decode(reset_code)
username, email_addr, tstamp, h = reset_code.split(':', 3)
tstamp = int(tstamp)
except (TypeError, ValueError):
raise AuthException("Invalid reset code.")
if time() - tstamp > self.password_reset_timeout:
raise AuthException("Expired reset code.")
if not self._verify_password(username, email_addr, h):
raise AuthException("Invalid reset code.")
user = self.user(username)
if user is None:
raise AAAException("Nonexistent user.")
user.update(pwd=password)
def make_auth_decorator(self, username=None, role=None, fixed_role=False, fail_redirect='/login'):
'''
Create a decorator to be used for authentication and authorization
:param username: A resource can be protected for a specific user
:param role: Minimum role level required for authorization
:param fixed_role: Only this role gets authorized
:param fail_redirect: The URL to redirect to if a login is required.
'''
session_manager = self
def auth_require(username=username, role=role, fixed_role=fixed_role,
fail_redirect=fail_redirect):
def decorator(func):
import functools
@functools.wraps(func)
def wrapper(*a, **ka):
session_manager.require(username=username, role=role, fixed_role=fixed_role,
fail_redirect=fail_redirect)
return func(*a, **ka)
return wrapper
return decorator
return(auth_require)
## Private methods
@property
def _beaker_session(self):
"""Get Beaker session"""
return bottle.request.environ.get('beaker.session')
def _setup_cookie(self, username):
"""Setup cookie for a user that just logged in"""
session = self._beaker_session
session['username'] = username
if self.session_domain is not None:
session.domain = self.session_domain
session.save()
def _hash(self, username, pwd, salt=None, algo=None):
"""Hash username and password, generating salt value if required
"""
if algo is None:
algo = self.preferred_hashing_algorithm
if algo == 'PBKDF2':
return self._hash_pbkdf2(username, pwd, salt=salt)
if algo == 'scrypt':
return self._hash_scrypt(username, pwd, salt=salt)
raise RuntimeError("Unknown hashing algorithm requested: %s" % algo)
@staticmethod
def _hash_scrypt(username, pwd, salt=None):
"""Hash username and password, generating salt value if required
Use scrypt.
:returns: base-64 encoded str.
"""
if not scrypt_available:
raise Exception("scrypt.hash required."
" Please install the scrypt library.")
if salt is None:
salt = os.urandom(32)
assert len(salt) == 32, "Incorrect salt length"
cleartext = "%s\0%s" % (username, pwd)
h = scrypt.hash(cleartext, salt)
# 's' for scrypt
return b64encode('s' + salt + h)
@staticmethod
def _hash_pbkdf2(username, pwd, salt=None):
"""Hash username and password, generating salt value if required
Use PBKDF2 from Beaker
:returns: base-64 encoded str.
"""
if salt is None:
salt = os.urandom(32)
assert len(salt) == 32, "Incorrect salt length"
cleartext = "%s\0%s" % (username, pwd)
h = crypto.generateCryptoKeys(cleartext, salt, 10)
if len(h) != 32:
raise RuntimeError("The PBKDF2 hash is %d bytes long instead"
"of 32. The pycrypto library might be missing." % len(h))
# 'p' for PBKDF2
return b64encode('p' + salt + h)
def _verify_password(self, username, pwd, salted_hash):
"""Verity username/password pair against a salted hash
:returns: bool
"""
decoded = b64decode(salted_hash)
hash_type = decoded[0]
salt = decoded[1:33]
if hash_type == 'p': # PBKDF2
h = self._hash_pbkdf2(username, pwd, salt)
return salted_hash == h
if hash_type == 's': # scrypt
h = self._hash_scrypt(username, pwd, salt)
return salted_hash == h
raise RuntimeError("Unknown hashing algorithm: %s" % hash_type)
def _purge_expired_registrations(self, exp_time=96):
"""Purge expired registration requests.
:param exp_time: expiration time (hours)
:type exp_time: float.
"""
for uuid, data in self._store.pending_registrations.items():
creation = datetime.strptime(data['creation_date'],
"%Y-%m-%d %H:%M:%S.%f")
now = datetime.utcnow()
maxdelta = timedelta(hours=exp_time)
if now - creation > maxdelta:
self._store.pending_registrations.pop(uuid)
def _reset_code(self, username, email_addr):
"""generate a reset_code token
:param username: username
:type username: str.
:param email_addr: email address
:type email_addr: str.
:returns: Base-64 encoded token
"""
h = self._hash(username, email_addr)
t = "%d" % time()
reset_code = ':'.join((username, email_addr, t, h))
return b64encode(reset_code)
class User(object):
def __init__(self, username, cork_obj, session=None):
"""Represent an authenticated user, exposing useful attributes:
username, role, level, description, email_addr, session_creation_time,
session_accessed_time, session_id. The session-related attributes are
available for the current user only.
:param username: username
:type username: str.
:param cork_obj: instance of :class:`Cork`
"""
self._cork = cork_obj
assert username in self._cork._store.users, "Unknown user"
self.username = username
user_data = self._cork._store.users[username]
self.role = user_data['role']
self.description = user_data['desc']
self.email_addr = user_data['email_addr']
self.level = self._cork._store.roles[self.role]
if session is not None:
try:
self.session_creation_time = session['_creation_time']
self.session_accessed_time = session['_accessed_time']
self.session_id = session['_id']
except:
pass
def update(self, role=None, pwd=None, email_addr=None):
"""Update an user account data
:param role: change user role, if specified
:type role: str.
:param pwd: change user password, if specified
:type pwd: str.
:param email_addr: change user email address, if specified
:type email_addr: str.
:raises: AAAException on nonexistent user or role.
"""
username = self.username
if username not in self._cork._store.users:
raise AAAException("User does not exist.")
if role is not None:
if role not in self._cork._store.roles:
raise AAAException("Nonexistent role.")
self._cork._store.users[username]['role'] = role
if pwd is not None:
self._cork._store.users[username]['hash'] = self._cork._hash(
username, pwd)
if email_addr is not None:
self._cork._store.users[username]['email_addr'] = email_addr
self._cork._store.save_users()
def delete(self):
"""Delete user account
:raises: AAAException on nonexistent user.
"""
try:
self._cork._store.users.pop(self.username)
except KeyError:
raise AAAException("Nonexistent user.")
self._cork._store.save_users()
class Mailer(object):
def __init__(self, sender, smtp_url, join_timeout=5):
"""Send emails asyncronously
:param sender: Sender email address
:type sender: str.
:param smtp_server: SMTP server
:type smtp_server: str.
"""
self.sender = sender
self.join_timeout = join_timeout
self._threads = []
self._conf = self._parse_smtp_url(smtp_url)
def _parse_smtp_url(self, url):
"""Parse SMTP URL"""
match = re.match(r"""
( # Optional protocol
(?P<proto>smtp|starttls|ssl) # Protocol name
://
)?
( # Optional user:pass@
(?P<user>[^:]*) # Match every char except ':'
(: (?P<pass>.*) )? @ # Optional :pass
)?
(?P<fqdn> # Required FQDN on IP address
()| # Empty string
( # FQDN
[a-zA-Z_\-] # First character cannot be a number
[a-zA-Z0-9_\-\.]{,254}
)
|( # IPv4
([0-9]{1,3}\.){3}
[0-9]{1,3}
)
|( # IPv6
\[ # Square brackets
([0-9a-f]{,4}:){1,8}
[0-9a-f]{,4}
\]
)
)
( # Optional :port
:
(?P<port>[0-9]{,5}) # Up to 5-digits port
)?
[/]?
$
""", url, re.VERBOSE)
if not match:
raise RuntimeError("SMTP URL seems incorrect")
d = match.groupdict()
if d['proto'] is None:
d['proto'] = 'smtp'
if d['port'] is None:
d['port'] = 25
else:
d['port'] = int(d['port'])
if not 0 < d['port'] < 65536:
raise RuntimeError("Incorrect SMTP port")
return d
def send_email(self, email_addr, subject, email_text):
"""Send an email
:param email_addr: email address
:type email_addr: str.
:param subject: subject
:type subject: str.
:param email_text: email text
:type email_text: str.
:raises: AAAException if smtp_server and/or sender are not set
"""
if not (self._conf['fqdn'] and self.sender):
raise AAAException("SMTP server or sender not set")
msg = MIMEMultipart('alternative')
msg['Subject'] = subject
msg['From'] = self.sender
msg['To'] = email_addr
part = MIMEText(email_text, 'html')
msg.attach(part)
log.debug("Sending email using %s" % self._conf['fqdn'])
thread = Thread(target=self._send, args=(email_addr, msg.as_string()))
thread.start()
self._threads.append(thread)
def _send(self, email_addr, msg): # pragma: no cover
"""Deliver an email using SMTP
:param email_addr: recipient
:type email_addr: str.
:param msg: email text
:type msg: str.
"""
proto = self._conf['proto']
assert proto in ('smtp', 'starttls', 'ssl'), \
"Incorrect protocol: %s" % proto
try:
if proto == 'ssl':
log.debug("Setting up SSL")
session = SMTP_SSL(self._conf['fqdn'])
else:
session = SMTP(self._conf['fqdn'])
if proto == 'starttls':
log.debug('Sending EHLO and STARTTLS')
session.ehlo()
session.starttls()
session.ehlo()
if self._conf['user'] is not None:
log.debug('Performing login')
session.login(self._conf['user'], self._conf['pass'])
log.debug('Sending')
session.sendmail(self.sender, email_addr, msg)
session.quit()
log.info('Email sent')
except Exception as e:
log.error("Error sending email: %s" % e, exc_info=True)
def join(self):
"""Flush email queue by waiting the completion of the existing threads
:returns: None
"""
return [t.join(self.join_timeout) for t in self._threads]
def __del__(self):
"""Class destructor: wait for threads to terminate within a timeout"""
self.join()
```
#### File: lib/cork/mongodb_backend.py
```python
from logging import getLogger
log = getLogger(__name__)
from .base_backend import Backend, Table
try:
import pymongo
try:
from pymongo import MongoClient
except ImportError: # pragma: no cover
# Backward compatibility with PyMongo 2.2
from pymongo import Connection as MongoClient
pymongo_available = True
except ImportError: # pragma: no cover
pymongo_available = False
class MongoTable(Table):
"""Abstract MongoDB Table.
Allow dictionary-like access.
"""
def __init__(self, name, key_name, collection):
self._name = name
self._key_name = key_name
self._coll = collection
def create_index(self):
"""Create collection index."""
self._coll.create_index(
self._key_name,
drop_dups=True,
unique=True,
)
def __len__(self):
return self._coll.count()
def __contains__(self, value):
r = self._coll.find_one({self._key_name: value})
return r is not None
def __iter__(self):
"""Iter on dictionary keys"""
r = self._coll.find(fields=[self._key_name,])
return (i[self._key_name] for i in r)
def iteritems(self):
"""Iter on dictionary items.
:returns: generator of (key, value) tuples
"""
r = self._coll.find()
for i in r:
d = i.copy()
d.pop(self._key_name)
d.pop('_id')
yield (i[self._key_name], d)
def pop(self, key_val):
"""Remove a dictionary item"""
r = self[key_val]
self._coll.remove({self._key_name: key_val}, safe=True)
return r
class MongoSingleValueTable(MongoTable):
"""MongoDB table accessible as a simple key -> value dictionary.
Used to store roles.
"""
# Values are stored in a MongoDB "column" named "val"
def __init__(self, *args, **kw):
super(MongoSingleValueTable, self).__init__(*args, **kw)
def __setitem__(self, key_val, data):
assert not isinstance(data, dict)
spec = {self._key_name: key_val}
data = {self._key_name: key_val, 'val': data}
self._coll.update(spec, data, upsert=True, safe=True)
def __getitem__(self, key_val):
r = self._coll.find_one({self._key_name: key_val})
if r is None:
raise KeyError(key_val)
return r['val']
class MongoMutableDict(dict):
"""Represent an item from a Table. Acts as a dictionary.
"""
def __init__(self, parent, root_key, d):
"""Create a MongoMutableDict instance.
:param parent: Table instance
:type parent: :class:`MongoTable`
"""
super(MongoMutableDict, self).__init__(d)
self._parent = parent
self._root_key = root_key
def __setitem__(self, k, v):
super(MongoMutableDict, self).__setitem__(k, v)
self._parent[self._root_key] = self
class MongoMultiValueTable(MongoTable):
"""MongoDB table accessible as a dictionary.
"""
def __init__(self, *args, **kw):
super(MongoMultiValueTable, self).__init__(*args, **kw)
def __setitem__(self, key_val, data):
assert isinstance(data, dict)
key_name = self._key_name
if key_name in data:
assert data[key_name] == key_val
else:
data[key_name] = key_val
spec = {key_name: key_val}
self._coll.update(spec, data, upsert=True)
def __getitem__(self, key_val):
r = self._coll.find_one({self._key_name: key_val})
if r is None:
raise KeyError(key_val)
return MongoMutableDict(self, key_val, r)
class MongoDBBackend(Backend):
def __init__(self, db_name='cork', hostname='localhost', port=27017, initialize=False):
"""Initialize MongoDB Backend"""
connection = MongoClient(host=hostname, port=port)
db = connection[db_name]
self.users = MongoMultiValueTable('users', 'login', db.users)
self.pending_registrations = MongoMultiValueTable(
'pending_registrations',
'pending_registration',
db.pending_registrations
)
self.roles = MongoSingleValueTable('roles', 'role', db.roles)
if initialize:
self._initialize_storage()
def _initialize_storage(self):
"""Create MongoDB indexes."""
for c in (self.users, self.roles, self.pending_registrations):
c.create_index()
def save_users(self):
pass
def save_roles(self):
pass
def save_pending_registrations(self):
pass
```
#### File: lib/wombatwiki/parser.py
```python
import re
import emoticons, interwiki
NOFOLLOW_OUTLINKS = 1
EMPH_RE = re.compile(
r'(?P<nowiki1>\{\{\{.*?\}\}\})'
+ r'|(?P<emph>\'{2,3})'
+ r'|(?P<bold>\*\*.*?\*\*)'
+ r'|(?P<code>\{\{.*?\}\})'
)
# + r'|(?P<toc><TableOfContents.*?>)'
MAIN_RE = re.compile(
r'(?P<nowiki2>\{\{\{.*?\}\}\})'
+ r'|(?P<toc>\<TableOfContents.*?\>)'
+ r'|(?P<para>\n\s*$)'
+ r'|(?P<list>\n\s+[*#]\s+?)'
+ r'|(?P<heading>\n[=_]{1,6}.+[=_]{1,6}\s*$)'
+ r'|(?P<std_line_start>^\n)'
#+ r'|(?P<brcm>\n?\\{2})'
+ r'|(?P<rule>\n?-{4,})'
+ r'|(?P<sformat>\{{2,}|\}{2,})'
+ r'|(?P<comment>\[\[.*?\]\])'
+ r'|\[(?P<link>(http|https|ftp|nntp|news|file)\:[^\s]+\s+[^]]+)\]'
+ r'|\[(?P<interwiki>\w+?;.*?;.*)\]'
+ r'|\[(?P<wikilink>(?:[A-Z]+[a-z]+-*[_A-Z0-9][_A-Za-z0-9]+)\s+[^\]]+)\]'
+ r'|(?P<wiki>\b(?:[A-Z]+[a-z]+-*[_A-Z0-9][_A-Za-z0-9]+)\b)'
+ r'|(?P<image>\bimg[lcr]?:\S*\b)'
+ r'|(?P<url>(http|https|ftp|nntp|news|file)\:[^\s\'"]+)'
+ r'|(?P<www>www\.[^\s\'"]+)'
+ r'|(?P<email>(mailto:)?[-\w.+]+@[a-zA-Z0-9\-.]+)'
+ r'|(?P<break><br>)'
+ r'|(?P<emoticon>[\(\/][A-Z8\;\:\-\|\(\)\*\@]{1,3}[\)\\])'
)
#~ # Save this for later
#~ interwikiprompt = '''
#~ <script language="javascript" type="text/javascript" charset="ISO-8859-1"><!--
#~ function ask(pURL) {
#~ var x = prompt("Enter the word you're searching for:", "");
#~ if (x != null) {
#~ var pos = pURL.indexOf("$1");
#~ if (pos > 0) {
#~ top.location.assign(pURL.substring(0, pos) + x + pURL.substring(pos + 2, pURL.length));
#~ } else {
#~ top.location.assign(pURL + x);
#~ }
#~ }
#~ }
#~ //--></script>
#~ '''
def formatwikiname(name):
return re.sub('([a-z])([A-Z])', r'\1 \2', name).replace('_', ' ')
class WikiParser(object):
def __init__(self,virtualdir=''):
self.virtualdir = virtualdir
self.emph_re = EMPH_RE
self.main_re = MAIN_RE
self.toggles = {'i': 0, 'b': 0, 'pre': 0, 'nowiki': 0}
#all methods in the form _*_repl are helpers for replace, substituting wiki markup tokens
#with appropriate HTML tags
def _sformat_repl(self, s):
"for special formatting: either 'preformatted text', or 'no wiki translation'"
r=''
if '{' in s:
if len(s) not in (3,4) and not self.toggles['pre']:
r += '<pre>'
self.toggles['pre'] = 1
s = s[2:]
if len(s) >= 3 and not self.toggles['nowiki']:
r += '<nowiki>' #yes I know there's no such tag, but it's useful to see!
self.toggles['nowiki'] = 1
s = s[3:]
else:
if len(s) >= 3 and self.toggles['nowiki']:
r += '</nowiki>'
self.toggles['nowiki'] = 0
s = s[3:]
if len(s) >=2 and self.toggles['pre']:
r += '</pre>'
self.toggles['pre'] = 0
s = s[2:]
return r + s
def _code_repl(self, s):
return '<code>%s</code>' % s[2:-2]
def _nowiki1_repl(self, s):
return s
def _nowiki2_repl(self, s):
return '<nowiki>%s</nowiki>' % s[3:-3]
def _comment_repl(self, s):
if s.count('--'): #likely to be invalid comment
return s
else:
return '<!--%s-->' % s[2:-2]
def _toc_repl(self, s):
self.toc_requested = 1
m = re.search(r':([0-9]+),*([0-9]*)>', s)
if m is not None:
self.toc_minlevel = int(m.groups(0)[0])
if m.groups(0)[1] == '':
self.toc_maxlevel = self.toc_minlevel
else:
self.toc_maxlevel = int(m.groups(0)[1])
return '<TableOfContents>'
def _heading_repl(self, s):
m = re.search(r'([=_]{1,6})(.+)\1', s) #refine resolution of heading
if m:
hlevel = len(m.group(1))
hcount = len(self.headings) + 1
text = m.group(2)
self.headings.append((hcount, hlevel, text))
return self.dedent() + '\n<a name="h%s"><h%s>%s</h%s></a>\r' % (hcount, hlevel, text, hlevel)
else:
return s
def _para_repl(self, s):
if self.toggles['pre']:
return '\n\r'
else:
return self.dedent() + '\n<p />\r'
def _brcm_repl(self, s):
return '\n<br clear="all">\r'
def _emph_repl(self, s):
if len(s) == 3:
self.toggles['b'] = not self.toggles['b']
return ('</b>', '<b>')[self.toggles['b']]
else:
self.toggles['i'] = not self.toggles['i']
return ('</i>', '<i>')[self.toggles['i']]
def _bold_repl(self, s):
return '<b>%s</b>' % s[2:-2]
def _italic_repl(self, s):
return '<i>%s</i>' % s[2:-2]
def _wiki_repl(self, s):
return '<a class="wiki" href="%s/%s">%s</a>' % (self.virtualdir, s, formatwikiname(s))
def _rule_repl(self, s):
size = s.count('-') - 3
if size > 8: size = 8
return '\n<hr size=%s>\r' % size
def _image_repl(self, s):
r = '<img src="%s"' % s.split(':', 1)[1]
if s[3] == 'l':
r += ' align="left"'
elif s[3] == 'r':
r += ' align="right"'
r += ' />'
if s[3] == 'c':
r = '\n<p align="center">%s</p>\r' % r
return r
def _url_repl(self, s):
rel = ('', ' rel="nofollow"')[NOFOLLOW_OUTLINKS]
return '<a class="external" target="external" href="%s"%s>%s</a>' % (s, rel, s)
def _link_repl(self, s):
rel = ('', ' rel="nofollow"')[NOFOLLOW_OUTLINKS]
h, a = s.split(' ', 1)
return '<a class="external" target="external" href="%s"%s>%s</a>' % (h, rel, a)
def _wikilink_repl(self, s):
h, a = s.split(' ', 1)
#w = WikiPage(h)
return '<a class="wiki" href="/%s">%s</a>' % (h, a)
#if w.existcode:
# return '<a class="wiki" href="%s">%s</a>' % (w.get_href(), a)
#else:
# return '[%s<a class="nonexistent" href="%s">?</a> %s]' % (h, w.get_href(), a)
def _interwiki_repl(self, s):
parts = s.split(';')
i = parts[0]
a = parts[-1]
p = tuple(parts[1:-1])
if interwiki.interwiki.has_key(i):
h = interwiki.interwiki[i] % p
return '<a class="wikilink" href="%s">%s</a>' % (h, a)
else:
return '[%s]' % s
def _www_repl(self, s):
return self._url_repl('http://' + s)
def _email_repl(self, s):
if s[:7] == 'mailto:':
href = s
else:
href = 'mailto:' + s
return '<a href="%s">%s</a>' % (href, s)
def _list_repl(self, s):
if self.toggles['pre']: return s
s = s[1:].rstrip()
listtype, itemtag, indent = {'*': ('ul', '<li>', len(s) - 1),
'#': ('ol', '<li>', len(s) - 1)}.get(
s[-1],('blockquote', '<br>', len(s)))
oldlistlevel = len(self.listqueue)
r = ''
for i in range(indent, oldlistlevel):#if indent<oldlistlevel
r += '\n%s</%s>\r' % (' ' * i, self.listqueue.pop())
for i in range(oldlistlevel, indent):#if indent>oldlistlevel
r += '\n%s<%s>\r' % (' ' * i, listtype); self.listqueue.append(listtype)
if listtype != self.listqueue[-1]:#same indent but different flavour list
r += '\n%s</%s>%s<%s>\r' % (' ' * indent, self.listqueue.pop(), ' ' * indent, listtype)
self.listqueue.append(listtype)
r += '\n' + ' ' * indent + itemtag
return r
def _std_line_start_repl(self, s):
if self.toggles['pre']:
r = '\n'
else:
r = self.dedent()
return r
def _break_repl(self, s):
return '<br>'
def _emoticon_repl(self, s):
r = s
i = emoticons.emoticon_image(s)
if i is not None:
r = '<img src="%s/%s" %s>' % ('icons', i, emoticons.img_properties)
return r
def dedent(self):
'closes lists when required'
r = ''
while self.listqueue:
r += '\n' + ' ' * (len(self.listqueue)-1) + '</%s>\r' % self.listqueue.pop()
return r
def togglesoff(self):
'closes b,i,pre and nowiki tags in case the user has not defined closing tags'
r = ''
for k, v in self.toggles.items():
if v:
r += '</%s>' % k
self.toggles[k] = 0
return r
def replace(self, match):
'calls appropriate helper (based on named RE group) to replace each type of token'
tokentype = match.lastgroup
token = match.groupdict()[tokentype]
if self.toggles['nowiki'] and token[:3] != '}}}':
return token
else:
return getattr(self, '_' + tokentype + '_repl')(token)
def __call__(self, page, text, clearmargins=0):
'main HTML formatter function'
if text is None:
return None
if not text.strip(): return ''
self.listqueue = []
self.headings = []
self.toc_requested = 0
self.toc_minlevel = 1
self.toc_maxlevel = 9999
#text = cgi.escape(text, 1)
intable = 0
html = '\r'
n = 0
for line in text.splitlines():
new_html = '\n' + line
new_html = re.sub(self.emph_re, self.replace, new_html)
new_html = re.sub(self.emph_re, self.replace, new_html)
new_html = re.sub(self.main_re, self.replace, new_html)
# Table processing
sym = line[:2]
if sym in ("||", "!!") :
if not intable:
intable = 1
if sym == "||":
html += '<table border="1" cellspacing="0">\n'
else:
html += '<table border="0" cellspacing="0">\n'
else:
if intable:
html += "\n</table>\n"
intable = 0
if intable:
tag1 = '<td valign="top">'
tag2 = "</td>"
#~ if sym == "!!":
#~ tag1 = "<th>"
#~ tag2 = "</th>"
cells = (' %s%s' % (tag2, tag1)).join(new_html.split(sym)[1:-1])
new_html = '<tr>%s%s %s</tr>\n' % (tag1, cells, tag2)
boundary = html[-1] + new_html[0]
if '\r' not in boundary and '\n' not in boundary:
html += '<br>\r\n'
html += new_html
if intable: html += "</table>"
if self.toc_requested:
toc = '\n<ul>\n'
minl = min([l for c,l,h in self.headings])
lastl = minl
for c,l,h in self.headings:
if self.toc_minlevel <= l <= self.toc_maxlevel:
if l > lastl: toc += '\n<ul>\n'
if l < lastl: toc += '\n</ul>\n'
lastl = l
toc += '<li><a href="#h%s">%s</a></li>\n' % (c,h)
for l in range(lastl, minl-1, -1):
toc += '</ul>\n'
html = html.replace('<TableOfContents>', toc)
html += self.togglesoff() + self.dedent()
if clearmargins: html += '<br clear="all">'
html = html.replace('\r\n', '\n'); html = html.replace('\r', '\n')
return html
def processvisited(visited, page, default='FrontPage'):
if not visited:
visited = [default]
if page not in visited:
visited.append(page)
else:
visited = visited[0:visited.index(page)+1]
if len(visited) > 5:
visited = [default] + visited[-4:]
return visited
def formatbreadcrumbs(pages, virtualdir=''):
return ' > '.join(['<a href="%s/%s">%s</a>' % (virtualdir, page, page) for page in pages])
``` |
{
"source": "JimenaAndrea/architext",
"score": 2
} |
#### File: architext/entities/exit.py
```python
import mongoengine
from . import item as item_module
from . import room as room_module
class Exit(mongoengine.Document):
name = mongoengine.StringField(required=True)
destination = mongoengine.ReferenceField('Room', required=True)
description = mongoengine.StringField()
visible = mongoengine.StringField(choices=['listed', 'hidden', 'obvious'], default='listed')
is_open = mongoengine.BooleanField(default=True)
key_names = mongoengine.ListField(mongoengine.StringField())
room = mongoengine.ReferenceField('Room', default=None)
def __init__(self, *args, save_on_creation=True, **kwargs):
super().__init__(*args, **kwargs)
if self.id is None and save_on_creation:
self.save()
def save(self):
self.ensure_i_am_valid()
super().save()
def ensure_i_am_valid(self):
name_conditions = self._get_name_validation_conditions(self.name, self.room, self)
for condition in name_conditions.values():
if not condition['condition']:
raise condition['exception']
@classmethod
def _get_name_validation_conditions(cls, exit_name, local_room, ignore_item=None):
return item_module.Item._get_name_validation_conditions(exit_name, local_room, ignore_item)
@classmethod
def name_is_valid(cls, exit_name, local_room, ignore_item=None):
return item_module.Item.name_is_valid(exit_name, local_room, ignore_item)
def add_key(self, item_name):
self.key_names.append(item_name)
self.save()
def remove_key(self, item_name):
self.key_names.remove(item_name)
self.save()
def open(self):
self.is_open = True
self.save()
def close(self):
self.is_open = False
self.save()
def is_obvious(self):
return self.visible == 'obvious'
def is_listed(self):
return self.visible == 'listed'
def is_hidden(self):
return self.visible == 'hidden'
def clone(self, new_destination, new_room=None):
new_exit = Exit(name=self.name, destination=new_destination, room=new_room, visible=self.visible, is_open=self.is_open, key_names=self.key_names.copy())
return new_exit
@classmethod
def get_exits_in_world_state(cls, world_state):
exits_in_world_state = []
for room in room_module.Room.objects(world_state=world_state):
exits_in_world_state += room.exits
return exits_in_world_state
```
#### File: architext/entities/user.py
```python
import mongoengine
from . import inventory as inventory_module
from . import location_save as location_save_module
from . import exceptions
from .. import util
from .. import entities
import hashlib
def validate_user_name(name):
if '\n' in name:
raise exceptions.ValueWithLineBreaks()
elif len(name) > entities.User.NAME_MAX_LENGTH:
raise exceptions.ValueTooLong()
elif name == "":
raise exceptions.EmptyName()
class User(mongoengine.Document):
NAME_MAX_LENGTH = 26
name = mongoengine.StringField(required=True, validation=validate_user_name)
room = mongoengine.ReferenceField('Room')
client_id = mongoengine.IntField(default=None)
master_mode = mongoengine.BooleanField(default=False)
joined_worlds = mongoengine.ListField(mongoengine.ReferenceField('World'))
_password_hash = mongoengine.BinaryField(required=True)
def __init__(self, *args, password=None, save_on_creation=True, **kwargs):
super().__init__(*args, **kwargs)
if self.id is None and save_on_creation:
self._password_hash = self.hash_password(password)
self.save()
def match_password(self, password):
hash = self.hash_password(password)
return self._password_hash == hash
def hash_password(self, password):
return hashlib.sha256(bytes(password, 'utf-8')).digest()
def move(self, exit_name):
if exit_name in [exit.name for exit in self.room.exits]:
self.room = self.room.get_exit(exit_name).destination
self.save()
def teleport(self, room):
self.room = room
self.save()
def get_location_save(self, world):
return next(location_save_module.LocationSave.objects(user=self, world=world), None)
def enter_world(self, world):
location_save = self.get_location_save(world)
if location_save is not None and location_save.room.world_state == world.world_state:
self.room = location_save.room
else:
self.room = world.world_state.starting_room
if world not in self.joined_worlds:
self.joined_worlds.append(world)
self.save()
def leave_world(self):
if self.room is not None:
current_world = self.room.world_state.get_world()
location_save = self.get_location_save(current_world)
if location_save is not None:
location_save.change_room(self.room)
else:
location_save_module.LocationSave(user=self, world=self.room.world_state.get_world(), room=self.room)
self.room = None
self.save()
def save_item(self, item):
item_snapshot = item.clone()
item_snapshot.saved_in = self.room.world_state
item_snapshot.item_id = item_snapshot._generate_item_id()
item_snapshot.save()
self.save()
return item_snapshot
def connect(self, client_id):
self.client_id = client_id
self.save()
def disconnect(self):
self.client_id = None
self.save()
def enter_master_mode(self):
self.master_mode = True
self.save()
def leave_master_mode(self):
self.master_mode = False
self.save()
def get_inventory_from(self, world_state):
inventory = next(inventory_module.Inventory.objects(user=self, world_state=world_state), None)
if inventory is None:
inventory = inventory_module.Inventory(user=self, world_state=world_state)
return inventory
def get_current_world_inventory(self):
return self.get_inventory_from(self.room.world_state)
```
#### File: architext/verbs/craft.py
```python
from . import verb
from .. import entities
from .. import util
import architext.strings as strings
class Craft(verb.Verb):
"""This verb allows users to create items that are placed in their current room"""
command = _('craft')
save_command = _('craftsaved')
permissions = verb.PRIVILEGED
def __init__(self, session):
super().__init__(session)
self.new_item = entities.Item(room=self.session.user.room, save_on_creation=False)
self.current_process_function = self.process_first_message
self.save_create = False # if true, the item will be saved and not placed in the room.
def process(self, message):
if message == '/':
self.session.send_to_client(strings.cancelled)
self.finish_interaction()
else:
self.current_process_function(message)
def process_first_message(self, message):
if message == self.save_command:
self.save_create = True
title = _("You are save-crafting an item.")
body = _(
"It won't be created at this room but as a saved item that you'll be able to spawn later.\n"
"\n"
"Enter the following fields\n"
" ⚑ Item's name"
)
else:
title = _("You start crafting an item")
body = _("Enter the following fields\n ⚑ Item's name")
out_message = strings.format(title, body, cancel=True)
self.session.send_to_client(out_message)
self.current_process_function = self.process_item_name
def process_item_name(self, message):
self.new_item.name = message
try:
self.new_item.ensure_i_am_valid()
except entities.EmptyName:
self.session.send_to_client(strings.is_empty)
except entities.WrongNameFormat:
self.session.send_to_client(strings.wrong_format)
except entities.RoomNameClash:
self.session.send_to_client(strings.room_name_clash)
except entities.TakableItemNameClash:
self.session.send_to_client(strings.takable_name_clash)
else:
self.session.send_to_client(_(' 👁 Description [default "{default_description}"]').format(default_description=strings.default_description))
self.current_process_function = self.process_item_description
def process_item_description(self, message):
self.new_item.description = message
self.session.send_to_client(_(
' 🔍 Visibility\n'
' Write:\n'
) +
strings.visibility_list
)
self.current_process_function = self.process_visibility
def process_visibility(self, message):
if message.lower() in strings.visible_input_options:
self.new_item.visible = 'obvious'
elif message.lower() in strings.listed_input_options:
self.new_item.visible = 'listed'
elif message.lower() in strings.hidden_input_options:
self.new_item.visible = 'hidden'
elif message.lower() in strings.takable_input_options:
self.new_item.visible = 'takable'
else:
self.session.send_to_client(_('Answer "listed", "visible", "hidden" or "takable.'))
return
if self.save_create:
self.new_item.saved_in = self.session.user.room.world_state
self.new_item.item_id = self.new_item._generate_item_id()
self.new_item.room = None
try:
self.new_item.save()
except entities.NameNotGloballyUnique:
self.session.send_to_client(_("There is an item or exit with that name in this world. Takable items need an unique name. Choose another visibility or start over to choose another name."))
else:
if not self.save_create:
self.new_item.put_in_room(self.session.user.room)
self.session.send_to_client(_("Item crafted!"))
if not self.session.user.master_mode:
self.session.send_to_others_in_room(_("{user_name} has crafted something here.").format(user_name=self.session.user.name))
self.finish_interaction()
```
#### File: architext/verbs/custom_verb.py
```python
from .. import entities
from .. import util
from .verb import Verb
from .. import session
import architext.strings as strings
class CustomVerb(Verb):
command = ''
@classmethod
def can_process(cls, message, session):
'''true if any custom verb corresponds to the message'''
return super().can_process(message, session) and cls.search_for_custom_verb(message, session) is not None
@classmethod
def search_for_custom_verb(cls, message, session):
if len(message.split(" ", 1)) == 2: # if the message has the form "verb item"
target_verb_name, target_item_name = message.split(" ", 1)
candidate_items = session.user.room.items + session.user.get_current_world_inventory().items
items_they_may_be_referring_to = util.possible_meanings(target_item_name, [i.name for i in candidate_items])
if len(items_they_may_be_referring_to) == 1:
target_item_name = items_they_may_be_referring_to[0]
suitable_item_found = next(filter(lambda i: i.name==target_item_name, candidate_items))
suitable_verb_found_in_item = next(filter(lambda v: v.is_name(target_verb_name), suitable_item_found.custom_verbs), None)
if suitable_verb_found_in_item is not None:
return suitable_verb_found_in_item
else:
target_verb_name = message
suitable_verb_found_in_room = next(filter(lambda v: v.is_name(target_verb_name), session.user.room.custom_verbs), None)
if suitable_verb_found_in_room is not None:
return suitable_verb_found_in_room
world = session.user.room.world_state
suitable_verb_found_in_world = next(filter(lambda v: v.is_name(target_verb_name), world.custom_verbs), None)
if suitable_verb_found_in_world is not None:
return suitable_verb_found_in_world
return None
def __init__(self, session):
super().__init__(session)
self.custom_verb_definition = None
def process(self, message):
if self.custom_verb_definition is None:
self.custom_verb_definition = self.search_for_custom_verb(message, self.session)
if self.custom_verb_definition is None:
raise Exception('Invalid message passed to verbs.CustomVerb')
self.execute_custom_verb(self.custom_verb_definition)
self.finish_interaction()
def execute_custom_verb(self, custom_verb):
from .. import session
if isinstance(self.session, session.GhostSession):
depth = self.session.depth + 1
else:
depth = 0
try:
creator_session = self.session if not isinstance(self.session, session.GhostSession) else self.session.creator_session
ghost = session.GhostSession(self.session.server, self.session.user.room, creator_session, depth=depth)
except session.GhostSessionMaxDepthExceeded:
#TODO log max recursion depth exceeded (this self.session has no logger)
pass
else:
for message in custom_verb.commands:
formatted_message = self.format_custom_verb_message(message)
ghost.process_message(formatted_message)
ghost.disconnect()
def format_custom_verb_message(self, message):
if isinstance(self.session, session.GhostSession):
working_session = self.session.creator_session
else:
working_session = self.session
message = message.replace(strings.user_name_placeholder, working_session.user.name)
return message
```
#### File: architext/verbs/edit_world.py
```python
from . import verb
import textwrap
from .. import entities
import architext.strings as strings
class EditWorld(verb.Verb):
command = _('editworld')
permissions = verb.CREATOR
def __init__(self, session):
super().__init__(session)
self.world = self.session.user.room.world_state.get_world()
self.option_number = None
self.current_process_function = self.process_first_message
def process(self, message):
if message == '/':
self.session.send_to_client(strings.cancelled)
self.finish_interaction()
else:
self.current_process_function(message)
def process_first_message(self, message):
title = _('Editing this world: "{world_name}"').format(world_name=self.world.name)
body = _(
'Enter the number of the value you want to edit.\n'
' 0 - Name\n'
' 1 - Make public/private\n'
' 2 - Edit freedom'
)
out_message = strings.format(title, body, cancel=True)
self.session.send_to_client(out_message)
self.current_process_function = self.process_option_number
def process_option_number(self, message):
try:
message = int(message)
except ValueError:
self.session.send_to_client(strings.not_a_number)
return
options = {
0: {
"out_message": _('Enter the new name:'),
"next_process_function": self.process_new_world_name,
},
1: {
"out_message": _(
'This world is {actual_value}.\n'
'Do you want to change it to {new_value}? [yes/no]'
).format(
actual_value=(strings.public if self.world.public else strings.private),
new_value=(strings.public if not self.world.public else strings.private)
),
"next_process_function": self.process_public_choice,
},
2: {
"out_message": _(
'Who should be able to edit the world?\n'
' 0 - All users.\n'
' 1 - Only you and your designated editors.'
),
"next_process_function": self.process_edit_freedom_option,
}
}
try:
chosen_option = options[message]
except KeyError:
self.session.send_to_client(strings.wrong_value)
return
self.session.send_to_client(chosen_option["out_message"])
self.current_process_function = chosen_option["next_process_function"]
def process_new_world_name(self, message):
if not message:
self.session.send_to_client(strings.is_empty)
return
world = self.session.user.room.world_state.get_world()
world.name = message
world.save()
self.finish_interaction()
self.session.send_to_client(_("The name has been successfully changed."))
return
def process_public_choice(self, message):
if message.lower() in strings.yes_input_options:
try:
self.world.toggle_public()
except entities.PublicWorldLimitReached:
self.session.send_to_client(_('You have reached the limit of public worlds in this server. Try to make another world private or ask the admin to increase your limit.'))
self.finish_interaction()
return
self.session.send_to_client(_('This world is now {public_or_private}.').format(public_or_private=(strings.public if self.world.public else strings.private)))
self.finish_interaction()
elif message.lower() in strings.no_input_options:
self.session.send_to_client(_('OK. The world remains {public_or_private}').format(public_or_private=(strings.public if self.world.public else strings.private)))
self.finish_interaction()
else:
self.session.send_to_client(_('Please enter "yes" or "no".'))
def process_edit_freedom_option(self, message):
if message == '0':
self.session.user.room.world_state.get_world().set_to_free_edition()
self.session.send_to_client(_("Everybody can edit this world now."))
self.finish_interaction()
elif message == '1':
self.session.user.room.world_state.get_world().set_to_privileged_edition()
self.session.send_to_client(_("Only your designated editors and you can edit this world now."))
self.finish_interaction()
else:
self.session.send_to_client(strings.wrong_value)
```
#### File: architext/verbs/export.py
```python
from . import verb
from .. import entities
import json
import textwrap
from .. import util
class ExportWorld(verb.Verb):
command = _("export")
pretty_command = _("export pretty")
permissions = verb.PRIVILEGED
def process(self, message):
world_state = self.session.user.room.world_state
world_state_dict_representation = self.dump_world_state(world_state)
if message == self.pretty_command:
export = json.dumps(
world_state_dict_representation,
indent=4,
separators=(',', ': ')
)
else:
export = util.encode_dict(world_state_dict_representation)
header = _(
'Your world:\n'
'────────────────────────────────────────────────────────────\n'
)
footer = _(
'────────────────────────────────────────────────────────────\n'
'\n'
'You have exported your actual world. Copy the text between the horizontal lines and save it anywhere.\n'
'You can import this and any exported world at the lobby.\n'
'Note that the "export pretty" option may mess with the whitespaces in your names and descriptions.'
)
self.session.send_to_client(header + export + '\n' + footer)
self.finish_interaction()
def dump_item(self, item):
custom_verbs = [self.dump_custom_verb(verb) for verb in item.custom_verbs]
return {
"item_id": item.item_id,
"name": item.name,
"description": item.description,
"visible": item.visible,
"custom_verbs": custom_verbs
}
def dump_custom_verb(self, custom_verb):
return {
"names": custom_verb.names,
"commands": custom_verb.commands,
}
def dump_exit(self, exit):
return {
"name": exit.name,
"description": exit.description,
"destination": exit.destination.alias,
"room": exit.room.alias,
"visible": exit.visible,
"is_open": exit.is_open,
"key_names": exit.key_names
}
def dump_room(self, room):
custom_verbs = [self.dump_custom_verb(verb) for verb in room.custom_verbs]
items = [self.dump_item(item) for item in entities.Item.objects(room=room)]
return {
"name": room.name,
"alias": room.alias,
"description": room.description,
"custom_verbs": custom_verbs,
"items": items
}
def dump_inventories(self, inventories):
items = []
for inventory in inventories:
items += inventory.items
return [self.dump_item(item) for item in items]
def dump_world_state(self, world_state):
starting_room = world_state.starting_room
other_rooms = entities.Room.objects(world_state=world_state, alias__ne=starting_room.alias)
other_rooms = [self.dump_room(room) for room in other_rooms]
custom_verbs = [self.dump_custom_verb(verb) for verb in world_state.custom_verbs]
exits = []
for room in entities.Room.objects(world_state=world_state):
exits += room.exits
exits = [self.dump_exit(exit) for exit in exits]
# all items in inventories are extracted to be placed at the importer inventory.
inventories = entities.Inventory.objects(world_state=world_state)
inventory = self.dump_inventories(inventories)
saved_items = entities.Item.objects(saved_in=world_state)
saved_items = [self.dump_item(item) for item in saved_items]
return {
"next_room_id": world_state._next_room_id,
"starting_room": self.dump_room(starting_room),
"other_rooms": other_rooms,
"custom_verbs": custom_verbs,
"exits": exits,
"inventory": inventory,
"saved_items": saved_items
}
```
#### File: architext/verbs/inventory.py
```python
from .verb import Verb
from .. import util
import functools
from .. import entities
import architext.strings as strings
class Take(Verb):
'''Takes a item to your inventory.
usage:
command item_name
'''
command = _('take ')
def process(self, message):
partial_name = message[len(self.command):]
selected_item = util.name_to_entity(self.session, partial_name, substr_match=['room_items'])
if selected_item == 'many':
self.session.send_to_client(strings.many_found)
elif selected_item is None:
self.session.send_to_client(strings.not_found)
elif selected_item.visible != 'takable':
self.session.send_to_client(_('{item_name}: You can\'t take that item.').format(item_name=selected_item.name))
else:
self.session.user.get_current_world_inventory().add_item(selected_item)
selected_item.remove_from_room()
self.session.send_to_client(_('You took {item_name}.').format(item_name=selected_item.name))
self.finish_interaction()
class Drop(Verb):
'''Drops a item from your inventory.
usage:
command item_name
'''
command = _('drop ')
def process(self, message):
partial_name = message[len(self.command):]
selected_item = util.name_to_entity(self.session, partial_name, substr_match=['inventory'])
if selected_item is None:
self.session.send_to_client(_('You don\'t have that item.'))
elif selected_item == "many":
self.session.send_to_client(_('There are more than one item with a similar name in your inventory. Be more specific.'))
else:
self.session.user.get_current_world_inventory().remove_item(selected_item)
selected_item.put_in_room(self.session.user.room)
self.session.send_to_client(_('You dropped {item_name}.').format(item_name=selected_item.name))
self.finish_interaction()
class Inventory(Verb):
'''Shows what you have in your inventory'''
command = _('inventory')
def process(self, message):
if len(self.session.user.get_current_world_inventory().items) < 1:
self.session.send_to_client(_('Your inventory is empty'))
else:
item_names = [item.name for item in self.session.user.get_current_world_inventory().items]
item_list_items = [f'● {name}' for name in item_names]
inventory_list = '\n'.join(item_list_items)
self.session.send_to_client(_('You carry:\n{inventory_list}').format(inventory_list=inventory_list))
self.finish_interaction()
class Give(Verb):
command = _("give '")
def process(self, message):
message = message[len(self.command):]
target_user_name, target_item_name = message.split("' ", 1)
target_user = next(entities.User.objects(name=target_user_name, room=self.session.user.room, client_id__ne=None), None)
item = next(entities.Item.objects(name=target_item_name, room=self.session.user.room, visible='takable'), None)
if target_user is not None and item is not None:
target_user.get_current_world_inventory().add_item(item)
self.session.send_to_client(_('Done.'))
else:
self.session.send_to_client(_("The item/user is not in this room."))
self.finish_interaction()
class TakeFrom(Verb):
command = _("takefrom '")
def process(self, message):
message = message[len(self.command):]
target_user_name, target_item_name = message.split("' ", 1)
target_user = next(entities.User.objects(name=target_user_name, room=self.session.user.room, client_id__ne=None), None)
if target_user is not None:
target_item = next(filter(lambda i: i.name==target_item_name, target_user.get_current_world_inventory().items), None)
if target_item is not None:
target_user.get_current_world_inventory().remove_item(target_item)
target_item.put_in_room(target_user.room)
self.session.send_to_client(_('Done.'))
else:
self.session.send_to_client(_('The item is not in that user\'s inventory.'))
else:
self.session.send_to_client(_('That user is not here.'))
self.finish_interaction()
```
#### File: architext/verbs/remodel.py
```python
from . import verb
import architext.strings as strings
class Remodel(verb.Verb):
"""Lets players edit every aspect of a room"""
command = _('reform')
permissions = verb.PRIVILEGED
def __init__(self, session):
super().__init__(session)
self.option_number = None
self.current_process_function = self.process_first_message
def process(self, message):
if message == '/':
self.session.send_to_client(strings.cancelled)
self.finish_interaction()
else:
self.current_process_function(message)
def process_first_message(self, message):
title = _('Reforming room "{room_name}"').format(room_name=self.session.user.room.name)
body = _(
'Enter the number of the field to modify\n'
' 0 - Name\n'
' 1 - Description'
)
out_message = strings.format(title, body, cancel=True)
self.session.send_to_client(out_message)
self.current_process_function = self.process_reform_option
def process_reform_option(self, message):
try:
message = int(message)
except:
self.session.send_to_client(strings.not_a_number)
return
max_number = 1
if 0 <= message <= max_number:
self.option_number = message
self.session.send_to_client(_('Enter the new value'))
self.current_process_function = self.process_reform_value
else:
self.session.send_to_client(strings.wrong_value)
def process_reform_value(self, message):
option = self.option_number
if message:
if option == 0:
self.session.user.room.name = message
elif option == 1:
self.session.user.room.description = message
self.session.user.room.save()
self.session.send_to_client(_('Reform completed.'))
self.finish_interaction()
else:
self.session.send_to_client(strings.is_empty)
```
#### File: architext/verbs/shout.py
```python
from .verb import Verb
class Shout(Verb):
"""Let players send messages to every player connected in the same world"""
command = _('shout ')
def process(self, message):
command_length = len(self.command)
out_message = f'{self.session.user.name} shouts "¡¡{message[command_length:].upper()}!!"'
self.session.send_to_all(out_message)
self.finish_interaction()
``` |
{
"source": "JimenaMV/ideuy-py",
"score": 3
} |
#### File: ideuy-py/script/build_national_grid_shapefile.py
```python
import os
import fiona
from ideuy.download import BASE_HOST, DATA_PATH, download_grid
def create_national_grid_geojson(*, original_grid, output_dir):
# Create a new geojson with the same schema< CRS and features than the original,
# but with an extra column 'data_path', using the recently built map.
dst_path = os.path.join(output_dir, 'national_grid.geojson')
with fiona.open(original_grid) as src:
schema = src.schema.copy()
schema['properties']['data_path'] = 'str:180'
with fiona.open(dst_path,
'w',
driver='GeoJSON',
crs=src.crs,
schema=src.schema) as dst:
for feat in src:
feat = feat.copy()
remesa = feat['properties']['Remesa']
data_path = f'CN_Remesa_{remesa:0>2}/02_Ortoimagenes/'
feat['properties']['data_path'] = data_path
dst.write(feat)
return dst_path
def main():
output_dir = './out/'
# First, download original urban grid
original_grid = download_grid('national', output_dir=output_dir)
# Create new urban grid using features from original one,
# and listings from data repository
new_grid = create_national_grid_geojson(original_grid=original_grid,
output_dir=output_dir)
print("New national grid shapefile written at:", new_grid)
if __name__ == '__main__':
main()
```
#### File: ideuy-py/script/build_urban_grid_shapefile.py
```python
import os
from urllib.parse import urlparse
import fiona
import requests
from bs4 import BeautifulSoup
from shapely.geometry import shape
from ideuy.download import BASE_HOST, DATA_PATH, download_grid
IMAGE_FORMAT_PATH = '02_RGBI_8bits'
def list_all_images():
res = []
for remesa in range(1, 11):
# Build remesa URL for 02_Ortoimagenes, and download directory listing
url = f'{BASE_HOST}{DATA_PATH}CU_Remesa_{remesa:0>2}/02_Ortoimagenes/'
city_dirs = list_directory(url)
for city_dir in city_dirs:
# Build city URL
city_url = f'{city_dir}{IMAGE_FORMAT_PATH}/'
urls = list_directory(city_url)
print(city_dir, len(urls))
res.extend(urls)
return res
def list_directory(url):
res = requests.get(url)
if not res.ok:
raise RuntimeError(
f'Failed to list files at {url}. Please retry later.')
# Parse HTML response
soup = BeautifulSoup(res.content, 'html.parser')
# Gather all file links in directory listing
files = []
for row in soup.table.find_all('td'):
for link in row.find_all('a'):
files.append(link)
# Ignore parent directory link
files = [f for f in files if f.text != 'Parent Directory']
# Build list of absolute URLs for each link
return [f'{url}{f.get("href")}' for f in files]
def build_data_path_by_coord_dictionary(urls):
res = {}
for url in urls:
path = urlparse(url).path
data_path = path[path.index(DATA_PATH) +
len(DATA_PATH):path.index(IMAGE_FORMAT_PATH)]
filename = path.split('/')[-1]
parts = filename.split('_')
coord = parts[0]
res[coord] = data_path
return res
def create_urban_grid_geojson(*, original_grid, image_urls, output_dir):
# Build dict of (MGRS coord, image dirname)
paths_by_coord = build_data_path_by_coord_dictionary(image_urls)
# Create a new geojson with the same schema< CRS and features than the original,
# but with an extra column 'data_path', using the recently built map.
dst_path = os.path.join(output_dir, 'urban_grid.geojson')
with fiona.open(original_grid) as src:
schema = src.schema.copy()
schema['properties']['data_path'] = 'str:180'
with fiona.open(dst_path,
'w',
driver='GeoJSON',
crs=src.crs,
schema=src.schema) as dst:
for feat in src:
feat = feat.copy()
coord = feat['properties']['Nombre']
feat['properties']['data_path'] = paths_by_coord[coord]
dst.write(feat)
return dst_path
def main():
output_dir = './out/'
# First, download original urban grid
original_grid = download_grid('urban', output_dir=output_dir)
# List directories recursively in data repository
image_urls = list_all_images()
# Create new urban grid using features from original one,
# and listings from data repository
new_grid = create_urban_grid_geojson(original_grid=original_grid,
image_urls=image_urls,
output_dir=output_dir)
print("New urban grid shapefile written at:", new_grid)
if __name__ == '__main__':
main()
```
#### File: src/ideuy/download.py
```python
import logging
import os
from functools import partial
from multiprocessing.pool import ThreadPool
from urllib.parse import urlparse
from urllib.request import urlopen
import fiona
import requests
from tqdm import tqdm
BASE_HOST = 'https://visualizador.ide.uy'
DATA_PATH = '/descargas/datos/'
DEFAULT_CRS = 'epsg:4326'
# deprecated: move to script/build_*
GRID_SHP_EXTS = ['cpg', 'dbf', 'prj', 'shp', 'shx']
GRID_PATHS_BY_TYPE = {
'national': 'Grillas/Nacional/Articulacion_Ortoimagenes_Nacional',
'urban': 'Grillas/Urbana/Articulacion_Ortoimagenes_Urbana',
}
DIRS_BY_FORMAT = {
'rgbi_16bit': '01_RGBI_16bits',
'rgbi_8bit': '02_RGBI_8bits',
'rgb_8bit': '03_RGB_8bits'
}
FORMAT_PART_BY_FORMAT = {
'rgbi_16bit': 'RGBI_16',
'rgbi_8bit': 'RGBI_16',
'rgb_8bit': 'RGB_8'
}
FILE_ID_BY_TYPE = {
'national': '{coord}_{format}_Remesa_{remesa_id:0>2}',
'urban': '{coord}_{format}_Remesa_{remesa_id:0>2}_{city_id}'
}
EXTS_BY_FORMAT = {
'rgbi_16bit': ['tif'],
'rgbi_8bit': ['tif'],
'rgb_8bit': ['jpg', 'jgw']
}
# deprecated
IMAGE_URL = f'{BASE_HOST}{DATA_PATH}' '{type_dir}/02_Ortoimagenes/{product_path}'
_logger = logging.getLogger(__name__)
def download_images_from_grid_vector(grid_vector,
num_jobs=1,
*,
output_dir,
type_id,
product_type_id):
with fiona.open(grid_vector) as src:
features = list(src)
with ThreadPool(num_jobs) as pool:
worker = partial(download_feature_image,
output_dir=output_dir,
type_id=type_id,
product_type_id=product_type_id)
with tqdm(total=len(features)) as pbar:
for _ in enumerate(pool.imap_unordered(worker, features)):
pbar.update()
def download_feature_image(feat, *, output_dir, type_id, product_type_id):
props = feat['properties']
coord = props['Nombre']
data_path = props['data_path']
download_image(output_dir=output_dir,
type_id=type_id,
product_type_id=product_type_id,
data_path=data_path,
coord=coord)
def download_image(dry_run=False,
*,
output_dir,
type_id,
product_type_id,
data_path,
coord):
if type_id not in ('national', 'urban'):
raise RuntimeError(
"Invalid type_id. Should be either 'national' or 'urban'")
format_part = FORMAT_PART_BY_FORMAT[product_type_id]
remesa_id = int(data_path.split('/')[0].split('_')[2]) # e.g CU_Remesa_03/...
if type_id == 'natioanl':
name = FILE_ID_BY_TYPE[type_id].format(coord=coord,
format=format_part,
remesa_id=remesa_id)
else:
city_id = [p for p in data_path.split('/') if p][-1].split('_')[-1]
name = FILE_ID_BY_TYPE[type_id].format(coord=coord,
format=format_part,
remesa_id=remesa_id,
city_id=city_id)
exts = EXTS_BY_FORMAT[product_type_id]
filenames = [f'{name}.{ext}' for ext in exts]
product_type_dir = DIRS_BY_FORMAT[product_type_id]
urls = [
f'{BASE_HOST}{DATA_PATH}{data_path}{product_type_dir}/{filename}'
for filename in filenames
]
# Workaround: for some reason, CU_Remesa_10 contains JPG2000 files, which
# have extensions .jp2/.j2w instead of .jpg/.jgw
if type_id == 'urban' and remesa_id == 10:
urls = [
url.replace('.jpg', '.jp2').replace('.jgw', '.j2w') for url in urls
]
res = []
for url in urls:
res.append(download_from_url(url, output_dir))
return res
def download_all(urls,
num_jobs=1,
file_size=None,
flatten=True,
*,
output_dir):
with ThreadPool(num_jobs) as pool:
worker = partial(download_from_url,
output_dir=output_dir,
flatten=flatten,
file_size=file_size)
with tqdm(total=len(urls)) as pbar:
for _ in enumerate(pool.imap_unordered(worker, urls)):
pbar.update()
# deprecated: move to script/build_*
def download_grid(type_id, *, output_dir):
if type_id not in GRID_PATHS_BY_TYPE.keys():
raise RuntimeError("type_id is invalid")
base_url = f'{BASE_HOST}{DATA_PATH}{GRID_PATHS_BY_TYPE[type_id]}'
res = None
for ext in GRID_SHP_EXTS:
url = f'{base_url}.{ext}'
output_path, _ = download_from_url(url, output_dir)
if ext == 'shp':
res = output_path
return res
def download_from_url(url, output_dir, file_size=None, flatten=True):
"""
Download from a URL
@param: url to download file
@param: output_dir place to put the file
@param: file_size specify the output file size, only downloads up to this point
@param: flatten: keep original dir structure in URL or not
"""
_logger.info(f"Download {url} to {output_dir}")
# Extract path from url (without the leading slash)
path = urlparse(url).path[1:]
if flatten:
filename = path.split("/")[-1]
dst = os.path.join(output_dir, filename)
else:
dst = os.path.join(output_dir, path)
real_file_size = int(urlopen(url).info().get('Content-Length', -1))
if not file_size or file_size > real_file_size:
file_size = real_file_size
if os.path.exists(dst):
first_byte = os.path.getsize(dst)
else:
first_byte = 0
if first_byte >= file_size:
return dst, file_size
header = {"Range": "bytes=%s-%s" % (first_byte, file_size)}
pbar = tqdm(total=file_size,
initial=first_byte,
unit='B',
unit_scale=True,
desc=url.split('/')[-1])
req = requests.get(url, headers=header, stream=True)
os.makedirs(os.path.dirname(dst), exist_ok=True)
with open(dst, 'ab') as f:
for chunk in req.iter_content(chunk_size=1024):
if chunk:
f.write(chunk)
pbar.update(1024)
pbar.close()
return dst, file_size
```
#### File: src/ideuy/query.py
```python
import json
import logging
from fnmatch import fnmatch
from itertools import zip_longest, islice
import requests
from shapely.ops import transform
from ideuy.vector import get_vector_bounds_and_crs, reproject_shape, flip
HOSTNAME = "https://visualizador.ide.uy"
SERVICE_PATH = "/geonetwork/srv/eng/q"
SERVICE_URL = f"{HOSTNAME}{SERVICE_PATH}"
MAX_PAGE_SIZE = 100
DEFAULT_CRS = 'epsg:4326'
DEFAULT_PARAMS = {
"_content_type": "json",
"bucket": "s101",
"fast": "index",
"resultType": "details",
"sortBy": "relevance"
}
_logger = logging.getLogger(__name__)
def query(query=None, aoi=None, limit=None, categories=[], file_filters=[]):
if not categories:
categories = []
params = {**DEFAULT_PARAMS, 'facet.q': '&'.join(categories)}
if query:
params['title'] = f'{query}*'
if aoi:
# TODO: Query for each feature geometry bounds in AOI file...
bounds, crs = get_vector_bounds_and_crs(aoi)
if crs != DEFAULT_CRS:
# If crs is not the default one, reproject
bounds = reproject_shape(bounds, crs, DEFAULT_CRS)
# Flip (latitude,longitude) because the web service expects it the other way...
bounds = transform(flip, bounds)
params['geometry'] = bounds.wkt
gen = query_all_pages(params)
if limit:
gen = islice(gen, limit)
products = build_products(gen)
return products
def build_products(raw_products):
res = []
for result in raw_products:
files = []
# Build list of downloadable files in product
links = result['link']
# Make sure links is a list (e.g. when there is only 1 link)
if not isinstance(links, list):
links = [links]
for link in links:
parts = link.split("|")
link_id, name, url = parts[0], parts[1], parts[2]
# Replace file:// URL for current https static assets URL
if url.startswith('file://'):
url = url.replace("file:///opt/", f"{HOSTNAME}/")
files.append(dict(id=link_id, name=name, url=url))
res.append(dict(**result, __files=files))
return res
def filter_products_by_files(products, file_filters=[]):
res = []
for product in products:
files = []
# For each file filter, add filtered files to new files list
for filt in file_filters:
key, pattern = filt.split('/')
files.extend(
[f for f in product['__files'] if fnmatch(f[key], pattern)])
# Only return product if it has any file, after filtering
if files:
product['__files'] = files
res.append(product)
return res
def grouper(iterable, n, fillvalue=None):
"Collect data into fixed-length chunks or blocks"
# grouper('ABCDEFG', 3, 'x') --> ABC DEF Gxx"
args = [iter(iterable)] * n
return zip_longest(*args, fillvalue=fillvalue)
def query_all_pages(params):
"""Generates results for all pages"""
i = 1
while True:
page_params = {**params, 'from': i, 'to': (i + MAX_PAGE_SIZE - 1)}
_logger.info(f"Query: {page_params}")
res = requests.get(SERVICE_URL, params=page_params)
if not res.ok:
raise RuntimeError(
"Status code: {res.status_code}. Response: {res.content}")
body = json.loads(res.content)
metadata = body.get('metadata', [])
# Make sure metadata is a list (e.g. when there is only 1 result)
if not isinstance(metadata, list):
metadata = [metadata]
for row in metadata:
yield row
# If page results count is less than max page size,
# this is the last page, so return:
if len(metadata) < MAX_PAGE_SIZE:
return
# Otherwise, increment item_from and item_to to query next page
i += MAX_PAGE_SIZE
```
#### File: src/ideuy/vector.py
```python
import json
import logging
import os
import tempfile
from functools import partial
import fiona
import pkg_resources
import pyproj
from shapely.geometry import box, mapping, shape
from shapely.ops import transform, unary_union
from ideuy.download import download_grid
_logger = logging.getLogger(__name__)
DATA_DIR = pkg_resources.resource_filename('ideuy', 'data')
URBAN_GRID_PATH = os.path.join(DATA_DIR, 'urban_grid.geojson')
NATIONAL_GRID_PATH = os.path.join(DATA_DIR, 'national_grid.geojson')
GRIDS_BY_TYPE = {'urban': URBAN_GRID_PATH, 'national': NATIONAL_GRID_PATH}
def get_vector_bounds_and_crs(vector):
with fiona.open(vector) as src:
return box(*src.bounds), src.crs['init']
def reproject_shape(shp, from_crs, to_crs):
project = partial(pyproj.transform, pyproj.Proj(from_crs),
pyproj.Proj(to_crs))
return transform(project, shp)
def flip(x, y):
"""Flips the x and y coordinate values"""
return y, x
def filter_by_aoi(aoi_vector, *, output, type_id, grid_vector):
"""
Filter a grid vector using polygons from the AOI vector,
and create a filtered grid GeoJSON as output.
"""
if not grid_vector:
grid_vector = GRIDS_BY_TYPE[type_id]
# Open aoi_vector, union all polygons into a single AOI polygon
with fiona.open(aoi_vector) as src:
aoi_polys = [shape(f['geometry']) for f in src]
aoi_polys = [shp for shp in aoi_polys if shp.is_valid]
aoi_crs = src.crs
# Union over all AOI shapes to form a single AOI multipolygon,
# in case there are many.
aoi_poly = unary_union(aoi_polys)
# For each feature in grid vector, filter those polygons that
# intersect with AOI
with fiona.open(grid_vector) as src:
if aoi_crs != src.crs:
raise RuntimeError("AOI vector has different CRS than grid. "
"Please make sure it is EPSG:5381.")
with fiona.open(output,
'w',
driver='GeoJSON',
crs=src.crs,
schema=src.schema) as dst:
for feat in src:
shp = shape(feat['geometry'])
if shp.intersects(aoi_poly):
dst.write(feat)
``` |
{
"source": "jimenbian/-",
"score": 3
} |
#### File: -/3.1/Systematic_sampling.py
```python
import random
def SystematicSampling(dataMat,number):
length=len(dataMat)
k=length/number
sample=[]
i=0
if k>0 :
while len(sample)!=number:
sample.append(dataMat[0+i*k])
i+=1
return sample
else :
return RandomSampling(dataMat,number)
```
#### File: -/5.2/DBSCAN.py
```python
def regionQuery(self,P,eps):
result = []
for d in self.dataSet:
if (((d[0]-P[0])**2 + (d[1] - P[1])**2)**0.5)<=eps:
result.append(d)
return result
def expandCluster(self,point,NeighbourPoints,C,eps,MinPts):
C.addPoint(point)
for p in NeighbourPoints:
if p not in self.visited:
self.visited.append(p)
np = self.regionQuery(p,eps)
if len(np) >= MinPts:
for n in np:
if n not in NeighbourPoints:
NeighbourPoints.append(n)
for c in self.Clusters:
if not c.has(p):
if not C.has(p):
C.addPoint(p)
if len(self.Clusters) == 0:
if not C.has(p):
C.addPoint(p)
self.Clusters.append(C)
```
#### File: -/7.3/predict_pic_generate.py
```python
def imageprepare(argv):
"""
This function returns the pixel values.
The imput is a png file location.
"""
im = Image.open(argv).convert('L')
width = float(im.size[0])
height = float(im.size[1])
newImage = Image.new('L', (28, 28), (255)) #creates white canvas of 28x28 pixels
if width > height: #check which dimension is bigger
#Width is bigger. Width becomes 20 pixels.
nheight = int(round((20.0/width*height),0)) #resize height according to ratio width
if (nheigth == 0): #rare case but minimum is 1 pixel
nheigth = 1
# resize and sharpen
img = im.resize((20,nheight), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)
wtop = int(round(((28 - nheight)/2),0)) #caculate horizontal pozition
newImage.paste(img, (4, wtop)) #paste resized image on white canvas
else:
#Height is bigger. Heigth becomes 20 pixels.
nwidth = int(round((20.0/height*width),0)) #resize width according to ratio height
if (nwidth == 0): #rare case but minimum is 1 pixel
nwidth = 1
# resize and sharpen
img = im.resize((nwidth,20), Image.ANTIALIAS).filter(ImageFilter.SHARPEN)
wleft = int(round(((28 - nwidth)/2),0)) #caculate vertical pozition
newImage.paste(img, (wleft, 4)) #paste resized image on white canvas
#newImage.save("sample.png")
tv = list(newImage.getdata()) #get pixel values
#normalize pixels to 0 and 1. 0 is pure white, 1 is pure black.
tva = [ (255-x)*1.0/255.0 for x in tv]
return tva
``` |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.