code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def strip_accents(s): nfkd = unicodedata.normalize('NFKD', unicode(s)) return u''.join(ch for ch in nfkd if not unicodedata.combining(ch))
Strip accents to prepare for slugification.
def slugify(s): s = strip_accents(s.replace("'", '').lower()) return re.sub('[^a-z0-9]+', ' ', s).strip().replace(' ', '-')
Converts the given string to a URL slug.
def _legacy_status(stat): # 2d0c00002a0000 if stat[:2] == '30' or stat[:2] == '47': # RX1 CT ooo = stat[4:5] # console.log("legstat. " + o); if ooo == '0': return 0 if ooo == '8': return 100 if stat == '7e': return 0 if stat == '7f': return 100 if len(stat) == 6: # old try: val = int(stat[4:], 16) except ValueError: val = 0 hwt = stat[:2] if hwt == '01': # old dim return round(((125 - val) / 125) * 100) if hwt == '02': # old rel return 100 if val == 127 else 0 if hwt == '28': # LED DIM if stat[2:4] == '01': if stat[4:] == '78': return 0 return round(((120 - val) / 120) * 100) # Additional decodes not part of qsmobile.js if stat.upper().find('ON') >= 0: # Relay return 100 if (not stat) or stat.upper().find('OFF') >= 0: return 0 if stat.endswith('%'): # New style dimmers if stat[:-1].isdigit: return int(stat[:-1]) _LOGGER.debug("val='%s' used a -1 fallback in legacy_status", stat) return -1
Legacy status method from the 'qsmobile.js' library. Pass in the 'val' from &devices or the 'data' received after calling a specific ID.
def decode_qwikcord(packet, channel=1): val = str(packet.get('val', '')) if len(val) != 16: return None if channel == 1: return int(val[6:12], 16) # CTavg return int(val[12:], 16)
Extract the qwikcord current measurements from val (CTavg, CTsum).
def decode_door(packet, channel=1): val = str(packet.get(QSDATA, '')) if len(val) == 6 and val.startswith('46') and channel == 1: return val[-1] == '0' return None
Decode a door sensor.
def decode_imod(packet, channel=1): val = str(packet.get(QSDATA, '')) if len(val) == 8 and val.startswith('4e'): try: _map = ((5, 1), (5, 2), (5, 4), (4, 1), (5, 1), (5, 2))[ channel - 1] return (int(val[_map[0]], 16) & _map[1]) == 0 except IndexError: return None return None
Decode an 4 channel imod. May support 6 channels.
def decode_pir(packet, channel=1): val = str(packet.get(QSDATA, '')) if len(val) == 8 and val.startswith('0f') and channel == 1: return int(val[-4:], 16) > 0 return None
Decode a PIR.
def decode_temperature(packet, channel=1): val = str(packet.get(QSDATA, '')) if len(val) == 12 and val.startswith('34') and channel == 1: temperature = int(val[-4:], 16) return round(float((-46.85 + (175.72 * (temperature / pow(2, 16)))))) return None
Decode the temperature.
def decode_humidity(packet, channel=1): val = str(packet.get(QSDATA, '')) if len(val) == 12 and val.startswith('34') and channel == 1: humidity = int(val[4:-4], 16) return round(float(-6 + (125 * (humidity / pow(2, 16))))) return None
Decode the humidity.
def set_value(self, qsid, new): # Set value & encode new to be passed to QSUSB try: dev = self[qsid] except KeyError: raise KeyError("Device {} not found".format(qsid)) if new < 0: new = 0 if new == dev.value: return if dev.is_dimmer: new = _MAX if new > (_MAX * .9) else new else: # QSType.relay and any other new = _MAX if new > 0 else 0 def success(): """Success closure to update value.""" self[qsid].value = new _LOGGER.debug("set success %s=%s", qsid, new) self._cb_value_changed(self, qsid, new) newqs = round(math.pow(round(new / _MAX * 100), 1 / self.dim_adj)) _LOGGER.debug("%s hass=%s --> %s", qsid, new, newqs) self._cb_set_qsvalue(qsid, newqs, success)
Set a value.
def update_devices(self, devices): for qspacket in devices: try: qsid = qspacket[QS_ID] except KeyError: _LOGGER.debug("Device without ID: %s", qspacket) continue if qsid not in self: self[qsid] = QSDev(data=qspacket) dev = self[qsid] dev.data = qspacket # Decode value from QSUSB newqs = _legacy_status(qspacket[QS_VALUE]) if dev.is_dimmer: # Adjust dimmer exponentially to get a smoother effect newqs = min(round(math.pow(newqs, self.dim_adj)), 100) newin = round(newqs * _MAX / 100) if abs(dev.value - newin) > 1: # Significant change _LOGGER.debug("%s qs=%s --> %s", qsid, newqs, newin) dev.value = newin self._cb_value_changed(self, qsid, newin)
Update values from response of URL_DEVICES, callback if changed.
def geist_replay(wrapped, instance, args, kwargs): path_parts = [] file_parts = [] if hasattr(wrapped, '__module__'): module = wrapped.__module__ module_file = sys.modules[module].__file__ root, _file = os.path.split(module_file) path_parts.append(root) _file, _ = os.path.splitext(_file) file_parts.append(_file) if hasattr(wrapped, '__objclass__'): file_parts.append(wrapped.__objclass__.__name__) elif hasattr(wrapped, '__self__'): file_parts.append(wrapped.__self__.__class__.__name__) file_parts.append(wrapped.__name__ + '.log') path_parts.append('_'.join(file_parts)) filename = os.path.join(*path_parts) if is_in_record_mode(): platform_backend = get_platform_backend() backend = RecordingBackend( source_backend=platform_backend, recording_filename=filename ) else: backend = PlaybackBackend( recording_filename=filename ) gui = GUI(backend) return wrapped(gui, *args, **kwargs)
Wraps a test of other function and injects a Geist GUI which will enable replay (set environment variable GEIST_REPLAY_MODE to 'record' to active record mode.
def reverse_cyk_transforms(root): # type: (Nonterminal) -> Nonterminal root = InverseContextFree.transform_from_chomsky_normal_form(root) root = InverseContextFree.unit_rules_restore(root) root = InverseContextFree.epsilon_rules_restore(root) return root
Reverse transformation made to grammar before CYK. Performs following steps: - transform from chomsky normal form - restore unit rules - restore epsilon rules :param root: Root node of the parsed tree. :return: Restored parsed tree.
def _geom_solve_p_from_mu(mu, b): def p_eq(x, mu, b): x, mu, b = Decimal(x), Decimal(mu), Decimal(b) return ( (x / (1 - x)) - ((b + 1) / (x**-b - 1)) - mu ) # x here is the param raised to the k_agg power, or 1 - p return 1 - optim.brentq(p_eq, 1e-16, 100, args=(mu, b), disp=True)
For the geom_uptrunc, given mu and b, return p. Ref: Harte 2011, Oxford U Press. Eq. 7.50.
def _nbinom_ztrunc_p(mu, k_agg): p_eq = lambda p, mu, k_agg: (k_agg * p) / (1 - (1 + p)**-k_agg) - mu # The upper bound needs to be large. p will increase with increasing mu # and decreasing k_agg p = optim.brentq(p_eq, 1e-10, 1e10, args=(mu, k_agg)) return p
Calculates p parameter for truncated negative binomial Function given in Sampford 1955, equation 4 Note that omega = 1 / 1 + p in Sampford
def _ln_choose(n, k_agg): ''' log binomial coefficient with extended gamma factorials. n and k_agg may be int or array - if both array, must be the same length. ''' gammaln = special.gammaln return gammaln(n + 1) - (gammaln(k_agg + 1) + gammaln(n - k_agg + 1)f _ln_choose(n, k_agg): ''' log binomial coefficient with extended gamma factorials. n and k_agg may be int or array - if both array, must be the same length. ''' gammaln = special.gammaln return gammaln(n + 1) - (gammaln(k_agg + 1) + gammaln(n - k_agg + 1))
log binomial coefficient with extended gamma factorials. n and k_agg may be int or array - if both array, must be the same length.
def _solve_k_from_mu(data, k_array, nll, *args): # TODO: See if a root finder like fminbound would work with Decimal used in # logpmf method (will this work with arrays?) nll_array = np.zeros(len(k_array)) for i in range(len(k_array)): nll_array[i] = nll(data, k_array[i], *args) min_nll_idx = np.argmin(nll_array) return k_array[min_nll_idx]
For given args, return k_agg from searching some k_range. Parameters ---------- data : array k_range : array nll : function args : Returns -------- :float Minimum k_agg
def _trunc_logser_solver(bins, b): if bins == b: p = 0 else: BOUNDS = [0, 1] DIST_FROM_BOUND = 10 ** -15 m = np.array(np.arange(1, np.int(b) + 1)) y = lambda x: np.sum(x ** m / b * bins) - np.sum((x ** m) / m) p = optim.bisect(y, BOUNDS[0] + DIST_FROM_BOUND, min((sys.float_info[0] / bins) ** (1 / b), 2), xtol=1.490116e-08, maxiter=1000) return p
Given bins (S) and b (N) solve for MLE of truncated logseries parameter p Parameters ----------- bins : float Number of bins. Considered S in an ecological context b : float Upper truncation of distribution. Considered N in an ecological context Returns ------- : float MLE estimate of p Notes ------ Adapted from Ethan White's macroecology_tools
def _expon_solve_lam_from_mu(mu, b): def lam_eq(lam, mu, b): # Small offset added to denominator to avoid 0/0 erors lam, mu, b = Decimal(lam), Decimal(mu), Decimal(b) return ( (1 - (lam*b + 1) * np.exp(-lam*b)) / (lam - lam * np.exp(-lam*b) + Decimal(1e-32)) - mu ) return optim.brentq(lam_eq, -100, 100, args=(mu, b), disp=True)
For the expon_uptrunc, given mu and b, return lam. Similar to geom_uptrunc
def _make_rank(dist_obj, n, mu, sigma, crit=0.5, upper=10000, xtol=1): qs = (np.arange(1, n + 1) - 0.5) / n rank = np.empty(len(qs)) brute_ppf = lambda val, prob: prob - dist_obj.cdf(val, mu, sigma) qs_less = qs <= crit ind = np.sum(qs_less) # Use ppf if qs are below crit rank[qs_less] = dist_obj.ppf(qs[qs_less], mu, sigma) # Use brute force if they are above for i, tq in enumerate(qs[~qs_less]): j = ind + i try: # TODO: Use an adaptable lower bound to increase speed rank[j] = np.abs(np.ceil(optim.brentq(brute_ppf, -1, upper, args=(tq,), xtol=xtol))) except ValueError: # If it is above the upper bound set all remaining values # to the previous value rank[j:] = np.repeat(rank[j - 1], len(rank[j:])) break return rank
Make rank distribution using both ppf and brute force. Setting crit = 1 is equivalent to just using the ppf Parameters ---------- {0}
def _mean_var(vals, pmf): mean = np.sum(vals * pmf) var = np.sum(vals ** 2 * pmf) - mean ** 2 return mean, var
Calculates the mean and variance from vals and pmf Parameters ---------- vals : ndarray Value range for a distribution pmf : ndarray pmf values corresponding with vals Returns ------- : tuple (mean, variance)
def rank(self, n, *args): return self.ppf((np.arange(1, n+1) - 0.5) / n, *args)
{0}
def rvs_alt(self, *args, **kwargs): l = kwargs.get('l', 1) b = kwargs.get('b', 1e5) size = kwargs.get('size', 1) model_cdf = self.cdf(np.arange(l, b + 1), *args) unif_rands = np.random.random(size) model_rands = np.array([np.where(tx <= model_cdf)[0][0] + l for tx in unif_rands]) return model_rands
{0}
def fit_mle(self, data, b=None): # Take mean of data as MLE of distribution mean, then calculate p mu = np.mean(data) if not b: b = np.sum(data) p = _geom_solve_p_from_mu_vect(mu, b) # Just return float, not len 1 array if len(np.atleast_1d(p)) == 1: return float(p), b else: return p, b
%(super)s In addition to data, requires ``b``, the upper limit of the distribution.
def fit_mle(self, data, init_vals=(80, 80)): if len(data) > 1: mu = np.mean(data) var = np.var(data) theta0 = var / mu alpha0 = mu / theta0 else: alpha0 = init_vals[0] theta0 = init_vals[1] def mle(params): return -np.sum(np.log(self.pmf(data, params[0], params[1]))) # Bounded fmin? alpha, theta = optim.fmin(mle, x0=[alpha0, theta0], disp=0) return alpha, theta
%(super)s In addition to data, can take init_vals which allows the user to specify initial values for (alpha, theta) during the optimization.
def fit_mle(self, data, k_array=np.arange(0.1, 100, 0.1)): # todo: check and mention in docstring biases of mle for k_agg data = np.array(data) mu = np.mean(data) return mu, _solve_k_from_mu(data, k_array, nbinom_nll, mu)
%(super)s In addition to data, gives an optional keyword argument k_array containing the values to search for k_agg. A brute force search is then used to find the parameter k_agg.
def translate_args(self, mu, k_agg, return_p=False): if return_p: return nbinom_ztrunc_p(mu, k_agg), k_agg else: return mu, k_agg
%(super)s The keyword argument return_p computes the p values used to define the the truncated negative binomial
def fit_mle(self, data, k_agg0=0.5): mu = np.mean(data) def mle(k): return -np.sum(np.log(self.pmf(data, mu, k))) k = optim.fmin(mle, x0=k_agg0, disp=0) return mu, k[0]
%(super)s In addition to data, gives an optional keyword argument k_agg0 that specifies the initial value of k_agg used in the optimization.
def fit_mle(self, data, b=None): data = np.array(data) length = len(data) if not b: b = np.sum(data) return _trunc_logser_solver(length, b), b
%(super)s b : float The upper bound of the distribution. If None, fixed at sum(data)
def rank(self, n, mu, sigma, crit=.5, upper=10000, xtol=1): return _make_rank(self, n, mu, sigma, crit=crit, upper=upper, xtol=xtol)
%(super)s Additional Parameters ---------------------- {0}
def fit_mle(self, data, b=None): # Take mean of data as MLE of distribution mean, then calculate p mu = np.mean(data) if not b: b = np.sum(data) lam = _expon_solve_lam_from_mu_vect(mu, b) # Just return float, not len 1 array if len(np.atleast_1d(lam)) == 1: return float(lam), b else: return lam, b
%(super)s Additional Parameters ---------------------- b : float The upper limit of the distribution
def fit_mle(self, data, fix_mean=False): if not fix_mean: sigma, _, scale = stats.lognorm.fit(data, floc=0) return np.log(scale), sigma else: mean = np.mean(data) # MLE fxn to be optmimized mle = lambda sigma, x, mean: -1 *\ np.sum(self._pdf_w_mean(x, mean, sigma)) sigma = optim.fmin(mle, np.array([np.std(np.log(data), ddof=1)]), args=(data, mean), disp=0)[0] return self.translate_args(mean, sigma)
%(super)s Additional Parameters ---------------------- fix_mean : bool Default False. If True, fixes mean before optimizing sigma
def _pdf_w_mean(self, x, mean, sigma): # Lognorm pmf with mean for optimization mu, sigma = self.translate_args(mean, sigma) return self.logpdf(x, mu, sigma)
Calculates the pdf of a lognormal distribution with parameters mean and sigma Parameters ---------- mean : float or ndarray Mean of the lognormal distribution sigma : float or ndarray Sigma parameter of the lognormal distribution Returns ------- : float or ndarray pdf of x
def union_join(left, right, left_as='left', right_as='right'): attrs = {} attrs.update(get_object_attrs(right)) attrs.update(get_object_attrs(left)) attrs[left_as] = left attrs[right_as] = right if isinstance(left, dict) and isinstance(right, dict): return attrs else: joined_class = type(left.__class__.__name__ + right.__class__.__name__, (Union,), {}) return joined_class(attrs)
Join function truest to the SQL style join. Merges both objects together in a sum-type, saving references to each parent in ``left`` and ``right`` attributes. >>> Dog = namedtuple('Dog', ['name', 'woof', 'weight']) >>> dog = Dog('gatsby', 'Ruff!', 15) >>> Cat = namedtuple('Cat', ['name', 'meow', 'weight']) >>> cat = Cat('pleo', 'roooowwwr', 12) >>> catdog = union_join(cat, dog, 'cat', 'dog') >>> catdog.name pleo >>> catdog.woof Ruff! >>> catdog.dog.name gatsby :param left: left object to be joined with right :param right: right object to be joined with left :return: joined object with attrs/methods from both parents available
def removeKeyButtonEvent(self, buttons= [] ): for i in range( 0, len(buttons)-1 ): GPIO.remove_event_detect( buttons[i] )
! \~english Remove key button event callbacks @param buttons: an array of button Ids. eg. [ 12,13,15, ...] \~chinese 移除按键事件回调 @param buttons: 按钮ID数组。 例如: [12,13,15,...]
def configKeyButtons( self, enableButtons = [], bounceTime = DEF_BOUNCE_TIME_NORMAL, pullUpDown = GPIO.PUD_UP, event = GPIO.BOTH ): for key in enableButtons: self.setKeyButton( key["id"], key["callback"], bounceTime, pullUpDown, event ) pass
! \~english Config multi key buttons IO and event on same time @param enableButtons: an array of key button configs. eg. <br> [{ "id":BUTTON_ACT_A, "callback": aCallbackFun }, ... ] @param bounceTime: Default set to DEF_BOUNCE_TIME_NORMAL @param pullUpDown: Default set to GPIO.PUD_UP @param event: Default set to GPIO.BOTH. it can be: { GPIO.RISING | GPIO.FALLING | GPIO.BOTH } \~chinese 同时配置多个按键IO和事件 @param enableButtons: 组按键配置 例如: <br> [{ "id":BUTTON_ACT_A, "callback": aCallbackFun }, ... ] @param bounceTime: 默认 DEF_BOUNCE_TIME_NORMAL @param pullUpDown: 默认 GPIO.PUD_UP @param event: 默认 GPIO.BOTH 它可以是: { GPIO.RISING | GPIO.FALLING | GPIO.BOTH } \~ \n @see DEF_BOUNCE_TIME_SHORT_MON (10ms) @see DEF_BOUNCE_TIME_SHORT (50ms) @see DEF_BOUNCE_TIME_NORMAL (100ms) @see DEF_BOUNCE_TIME_LONG (200ms)
def best_match_from_list(item,options,fuzzy=90,fname_match=True,fuzzy_fragment=None,guess=False): '''Returns the best match from :meth:`matches_from_list` or ``None`` if no good matches''' matches = matches_from_list(item,options,fuzzy,fname_match,fuzzy_fragment,guess) if len(matches)>0: return matches[0] return Nonf best_match_from_list(item,options,fuzzy=90,fname_match=True,fuzzy_fragment=None,guess=False): '''Returns the best match from :meth:`matches_from_list` or ``None`` if no good matches''' matches = matches_from_list(item,options,fuzzy,fname_match,fuzzy_fragment,guess) if len(matches)>0: return matches[0] return None
Returns the best match from :meth:`matches_from_list` or ``None`` if no good matches
def best_item_from_list(item,options,fuzzy=90,fname_match=True,fuzzy_fragment=None,guess=False): '''Returns just the best item, or ``None``''' match = best_match_from_list(item,options,fuzzy,fname_match,fuzzy_fragment,guess) if match: return match[0] return Nonf best_item_from_list(item,options,fuzzy=90,fname_match=True,fuzzy_fragment=None,guess=False): '''Returns just the best item, or ``None``''' match = best_match_from_list(item,options,fuzzy,fname_match,fuzzy_fragment,guess) if match: return match[0] return None
Returns just the best item, or ``None``
async def create_hlk_sw16_connection(port=None, host=None, disconnect_callback=None, reconnect_callback=None, loop=None, logger=None, timeout=None, reconnect_interval=None): client = SW16Client(host, port=port, disconnect_callback=disconnect_callback, reconnect_callback=reconnect_callback, loop=loop, logger=logger, timeout=timeout, reconnect_interval=reconnect_interval) await client.setup() return client
Create HLK-SW16 Client class.
def _reset_timeout(self): if self._timeout: self._timeout.cancel() self._timeout = self.loop.call_later(self.client.timeout, self.transport.close)
Reset timeout for date keep alive.
def reset_cmd_timeout(self): if self._cmd_timeout: self._cmd_timeout.cancel() self._cmd_timeout = self.loop.call_later(self.client.timeout, self.transport.close)
Reset timeout for command execution.
def _handle_lines(self): while b'\xdd' in self._buffer: linebuf, self._buffer = self._buffer.rsplit(b'\xdd', 1) line = linebuf[-19:] self._buffer += linebuf[:-19] if self._valid_packet(line): self._handle_raw_packet(line) else: self.logger.warning('dropping invalid data: %s', binascii.hexlify(line))
Assemble incoming data into per-line packets.
def _valid_packet(raw_packet): if raw_packet[0:1] != b'\xcc': return False if len(raw_packet) != 19: return False checksum = 0 for i in range(1, 17): checksum += raw_packet[i] if checksum != raw_packet[18]: return False return True
Validate incoming packet.
def send_packet(self): waiter, packet = self.client.waiters.popleft() self.logger.debug('sending packet: %s', binascii.hexlify(packet)) self.client.active_transaction = waiter self.client.in_transaction = True self.client.active_packet = packet self.reset_cmd_timeout() self.transport.write(packet)
Write next packet in send queue.
def format_packet(command): frame_header = b"\xaa" verify = b"\x0b" send_delim = b"\xbb" return frame_header + command.ljust(17, b"\x00") + verify + send_delim
Format packet to be sent.
def connection_lost(self, exc): if exc: self.logger.error('disconnected due to error') else: self.logger.info('disconnected because of close/abort.') if self.disconnect_callback: asyncio.ensure_future(self.disconnect_callback(), loop=self.loop)
Log when connection is closed, if needed call callback.
async def setup(self): while True: fut = self.loop.create_connection( lambda: SW16Protocol( self, disconnect_callback=self.handle_disconnect_callback, loop=self.loop, logger=self.logger), host=self.host, port=self.port) try: self.transport, self.protocol = \ await asyncio.wait_for(fut, timeout=self.timeout) except asyncio.TimeoutError: self.logger.warning("Could not connect due to timeout error.") except OSError as exc: self.logger.warning("Could not connect due to error: %s", str(exc)) else: self.is_connected = True if self.reconnect_callback: self.reconnect_callback() break await asyncio.sleep(self.reconnect_interval)
Set up the connection with automatic retry.
def stop(self): self.reconnect = False self.logger.debug("Shutting down.") if self.transport: self.transport.close()
Shut down transport.
async def handle_disconnect_callback(self): self.is_connected = False if self.disconnect_callback: self.disconnect_callback() if self.reconnect: self.logger.debug("Protocol disconnected...reconnecting") await self.setup() self.protocol.reset_cmd_timeout() if self.in_transaction: self.protocol.transport.write(self.active_packet) else: packet = self.protocol.format_packet(b"\x1e") self.protocol.transport.write(packet)
Reconnect automatically unless stopping.
def register_status_callback(self, callback, switch): if self.status_callbacks.get(switch, None) is None: self.status_callbacks[switch] = [] self.status_callbacks[switch].append(callback)
Register a callback which will fire when state changes.
def _send(self, packet): fut = self.loop.create_future() self.waiters.append((fut, packet)) if self.waiters and self.in_transaction is False: self.protocol.send_packet() return fut
Add packet to send queue.
async def turn_on(self, switch=None): if switch is not None: switch = codecs.decode(switch.rjust(2, '0'), 'hex') packet = self.protocol.format_packet(b"\x10" + switch + b"\x01") else: packet = self.protocol.format_packet(b"\x0a") states = await self._send(packet) return states
Turn on relay.
async def turn_off(self, switch=None): if switch is not None: switch = codecs.decode(switch.rjust(2, '0'), 'hex') packet = self.protocol.format_packet(b"\x10" + switch + b"\x02") else: packet = self.protocol.format_packet(b"\x0b") states = await self._send(packet) return states
Turn off relay.
async def status(self, switch=None): if switch is not None: if self.waiters or self.in_transaction: fut = self.loop.create_future() self.status_waiters.append(fut) states = await fut state = states[switch] else: packet = self.protocol.format_packet(b"\x1e") states = await self._send(packet) state = states[switch] else: if self.waiters or self.in_transaction: fut = self.loop.create_future() self.status_waiters.append(fut) state = await fut else: packet = self.protocol.format_packet(b"\x1e") state = await self._send(packet) return state
Get current relay status.
def value(self, item): # type: (Any) -> Any if isinstance(item, weakref.ReferenceType): if item() is None: raise TreeDeletedException() return item() return item
Return value stored in weakref. :param item: Object from which get the value. :return: Value stored in the weakref, otherwise original value. :raise TreeDeletedException: when weakref is already deleted.
def remove_all(self, item): # type: (Any) -> None item = self.ref(item) while list.__contains__(self, item): list.remove(self, item)
Remove all occurrence of the parameter. :param item: Value to delete from the WeakList.
def index(self, item, **kwargs): # type: (Any, dict) -> int return list.index(self, self.ref(item), **kwargs)
Get index of the parameter. :param item: Item for which get the index. :return: Index of the parameter in the WeakList.
def insert(self, index, item): # type: (int, Any) -> None return list.insert(self, index, self.ref(item))
Insert item at the specific index. :param index: Index where to insert the item. :param item: Item to insert.
def sort(self, *, key: Optional[Callable[[Any], Any]] = None, reverse: bool = False) -> None: return list.sort(self, key=self._sort_key(key), reverse=reverse)
Sort _WeakList. :param key: Key by which to sort, default None. :param reverse: True if return reversed WeakList, false by default.
def get_feed(self, datasource_id): info = self.__metadb.one( """ SELECT to_json(ds) as datasource , to_json(fc) as connector , to_json(fct) as connector_type , to_json(ctp) as connector_type_preset , json_build_object('email', u.email, 'full_name', u.full_name) as author_user FROM meta.feed_datasource ds LEFT JOIN meta.feed_connector fc ON fc.id=ds.connector_id LEFT JOIN meta.feed_connector_type fct ON fct.id=fc.connector_type_id LEFT JOIN meta.feed_connector_type_preset ctp ON ctp.id=ds.connector_type_preset_id LEFT JOIN meta.user_list u ON u.id=ds.author_user_id WHERE ds.id = :datasource_id::uuid """, {"datasource_id": datasource_id} ) return FeedDataSource(**info)
Получение настроек для фида :param datasource_id: идентификатор фида :return: FeedDataSource
def get_data(self, datasource, callback): task = self.__app.worker.current_task media_metadata = datasource.connector_type_preset['preset_data']['media_metadata'] result_data = task['result_data'] tmp_file = NamedTemporaryFile(delete=False, suffix=SOURCE_FORMAT_EXTENSION.get(media_metadata['sourceFormat'])) self.__app.log.info("Открываем файл", {"filename": tmp_file.name}) with open(tmp_file.name, 'wb') as f: callback(f) self.__app.log.info("start media upload") result_data['stage_id'] = "persist_media_file" self.__starter.update_task_result_data(task) result = self.__media.upload(open(tmp_file.name), { "ttlInSec": 60 * 60 * 24, # 24h "entityId": 2770, "objectId": task.get('data', {}).get("ds_id"), "info": {"metadata": media_metadata} }) result_data['stage_id'] = "generate_media_finish" result_data['media_id'] = result['id'] self.__starter.update_task_result_data(task) return result
Сохранение медиафайла :param task: :param media_metadata: :param file_suffix: :param callback: :return:
def datasource_process(self, datasource_id): # TODO Выпилить потом класс используется для другого # TODO без applicationId не выбираются поля сущностей. Подумать на сколько это НЕ нормально response = self.__app.native_api_call('feed', 'datasource/' + datasource_id + '/process?applicationId=1', {}, self.__options, False, None, False, http_method="POST") return json.loads(response.text)
deprecated Запускает настроенные обработки в фиде :param datasource_id: uuid
def _delta_dir(): repo = Repo() current_dir = os.getcwd() repo_dir = repo.tree().abspath delta_dir = current_dir.replace(repo_dir, '') if delta_dir: return delta_dir + '/' else: return ''
returns the relative path of the current directory to the git repository. This path will be added the 'filename' path to find the file. It current_dir is the git root, this function returns an empty string. Keyword Arguments: <none> Returns: str -- relative path of the current dir to git root dir empty string if current dir is the git root dir
def commit(filename): try: repo = Repo() # gitcmd = repo.git # gitcmd.commit(filename) index = repo.index index.commit("Updated file: {0}".format(filename)) except Exception as e: print("exception while commit: %s" % e.message)
Commit (git) a specified file This method does the same than a :: $ git commit -a "message" Keyword Arguments: :filename: (str) -- name of the file to commit Returns: <nothing>
def add_file_to_repo(filename): try: repo = Repo() index = repo.index index.add([_delta_dir() + filename]) except Exception as e: print("exception while gitadding file: %s" % e.message)
Add a file to the git repo This method does the same than a :: $ git add filename Keyword Arguments: :filename: (str) -- name of the file to commit Returns: <nothing>
def reset_to_last_commit(): try: repo = Repo() gitcmd = repo.git gitcmd.reset(hard=True) except Exception: pass
reset a modified file to his last commit status This method does the same than a :: $ git reset --hard Keyword Arguments: <none> Returns: <nothing>
def commit_history(filename): result = [] repo = Repo() for commit in repo.head.commit.iter_parents(paths=_delta_dir() + filename): result.append({'date': datetime.fromtimestamp(commit.committed_date + commit.committer_tz_offset), 'hexsha': commit.hexsha}) return result
Retrieve the commit history for a given filename. Keyword Arguments: :filename: (str) -- full name of the file Returns: list of dicts -- list of commit if the file is not found, returns an empty list
def read_committed_file(gitref, filename): repo = Repo() commitobj = repo.commit(gitref) blob = commitobj.tree[_delta_dir() + filename] return blob.data_stream.read()
Retrieve the content of a file in an old commit and returns it. Ketword Arguments: :gitref: (str) -- full reference of the git commit :filename: (str) -- name (full path) of the file Returns: str -- content of the file
def get(self, key, _else=None): with self._lock: self.expired() # see if everything expired try: value = self._dict[key].get() return value except KeyError: return _else except ValueError: return _else
The method to get an assets value
def set(self, key, value, expires=None, future=None): # assert the values above with self._lock: try: self._dict[key].set(value, expires=expires, future=future) except KeyError: self._dict[key] = moment(value, expires=expires, future=future, lock=self._lock) return value
Set a value
def values(self): self.expired() values = [] for key in self._dict.keys(): try: value = self._dict[key].get() values.append(value) except: continue return values
Will only return the current values
def has_key(self, key): if key in self._dict: try: self[key] return True except ValueError: return False except KeyError: return False return False
Does the key exist? This method will check to see if it has expired too.
def unit_rules_restore(root): # type: (Nonterminal) -> Nonterminal items = Traversing.post_order(root) items = filter(lambda x: isinstance(x, ReducedUnitRule), items) for rule in items: parent_nonterm = rule.from_symbols[0] # type: Nonterminal # restore chain of unit rules for r in rule.by_rules: created_rule = r() # type: Rule parent_nonterm._set_to_rule(created_rule) created_rule._from_symbols.append(parent_nonterm) created_nonterm = r.toSymbol() # type: Nonterminal created_rule._to_symbols.append(created_nonterm) created_nonterm._set_from_rule(created_rule) parent_nonterm = created_nonterm # restore last rule last_rule = rule.end_rule() # type: Rule last_rule._from_symbols.append(parent_nonterm) parent_nonterm._set_to_rule(last_rule) for ch in rule.to_symbols: # type: Nonterminal ch._set_from_rule(last_rule) last_rule._to_symbols.append(ch) return root
Transform parsed tree for grammar with removed unit rules. The unit rules will be returned back to the tree. :param root: Root of the parsed tree. :return: Modified tree.
def remaining_bytes(self, meta=True): pos, self._pos = self._pos, len(self.buffer) return self.buffer[pos:]
Returns the remaining, unread bytes from the buffer.
def decode(self, bytes): self.buffer = bytes self._pos = 0 Packet = identifier.get_packet_from_id(self._read_variunt()) # unknown packets will be None from the identifier if Packet is None: return None packet = Packet() packet.ParseFromString(self.remaining_bytes()) return packet
Decodes the packet off the byte string.
def encode(self, packet): id = identifier.get_packet_id(packet) if id is None: raise EncoderException('unknown packet') self._write_variunt(id) self._write(packet.SerializeToString()) return bytes(self.buffer)
Pushes a packet to the writer, encoding it on the internal buffer.
def setup_logging(level): logging.root.setLevel(level) logging.root.addHandler(STREAM_HANDLER)
Setup logger.
def create(self, data): # todo: copy-paste code from representation.validate -> refactor if data is None: return None prototype = {} errors = {} # create and populate the prototype for field_name, field_spec in self.spec.fields.items(): try: value = self._create_value(data, field_name, self.spec) except ValidationError, e: if field_name not in self.default_create_values: if hasattr(e, 'message_dict'): # prefix error keys with top level field name errors.update(dict(zip( [field_name + '.' + key for key in e.message_dict.keys()], e.message_dict.values()))) else: errors[field_name] = e.messages else: key_name = self.property_name_map[field_name] prototype[key_name] = value # check extra fields if self.prevent_extra_fields: extras = set(data.keys()) - set(self.property_name_map.keys()) if extras: errors[', '.join(extras)] = ['field(s) not allowed'] # if errors, raise ValidationError if errors: raise ValidationError(errors) # return dict or object based on the prototype _data = deepcopy(self.default_create_values) _data.update(prototype) if self.klass: instance = self.klass() instance.__dict__.update(prototype) return instance else: return prototype
Create object from the given data. The given data may or may not have been validated prior to calling this function. This function will try its best in creating the object. If the resulting object cannot be produced, raises ``ValidationError``. The spec can affect how individual fields will be created by implementing ``clean()`` for the fields needing customization. :param data: the data as a dictionary. :return: instance of ``klass`` or dictionary. :raises: ``ValidationError`` if factory is unable to create object.
def serialize(self, entity, request=None): def should_we_insert(value, field_spec): return value not in self.missing or field_spec.required errors = {} ret = {} for field_name, field_spec in self.spec.fields.items(): value = self._get_value_for_serialization(entity, field_name, field_spec) func = self._get_serialize_func(field_name, self.spec) try: # perform serialization value = func(value, entity, request) if should_we_insert(value, field_spec): ret[field_name] = value except ValidationError, e: if hasattr(e, 'message_dict'): # prefix error keys with top level field name errors.update(dict(zip( [field_name + '.' + key for key in e.message_dict.keys()], e.message_dict.values()))) else: errors[field_name] = e.messages if errors: raise ValidationError(errors) return None if ret == {} else ret
Serialize entity into dictionary. The spec can affect how individual fields will be serialized by implementing ``serialize()`` for the fields needing customization. :returns: dictionary
def _create_value(self, data, name, spec): field = getattr(self, 'create_' + name, None) if field: # this factory has a special creator function for this field return field(data, name, spec) value = data.get(name) return spec.fields[name].clean(value)
Create the value for a field. :param data: the whole data for the entity (all fields). :param name: name of the initialized field. :param spec: spec for the whole entity.
def _get_serialize_func(self, name, spec): func = getattr(self, 'serialize_' + name, None) if func: # this factory has a special serializer function for this field return func func = getattr(spec.fields[name], 'serialize', None) if func: return func return lambda value, entity, request: value
Return the function that is used for serialization.
def _get_value_for_serialization(self, data, name, spec): name = self.property_name_map[name] return getattr(data, name, None)
Return the value of the field in entity (or ``None``).
def _create_mappings(self, spec): ret = dict(zip(set(spec.fields), set(spec.fields))) ret.update(dict([(n, s.alias) for n, s in spec.fields.items() if s.alias])) return ret
Create property name map based on aliases.
def all_substrings(s): ''' yields all substrings of a string ''' join = ''.join for i in range(1, len(s) + 1): for sub in window(s, i): yield join(subf all_substrings(s): ''' yields all substrings of a string ''' join = ''.join for i in range(1, len(s) + 1): for sub in window(s, i): yield join(sub)
yields all substrings of a string
def equivalent_release_for_product(self, product): releases = self._default_manager.filter( version__startswith=self.major_version() + '.', channel=self.channel, product=product).order_by('-version') if not getattr(settings, 'DEV', False): releases = releases.filter(is_public=True) if releases: return sorted( sorted(releases, reverse=True, key=lambda r: len(r.version.split('.'))), reverse=True, key=lambda r: r.version.split('.')[1])[0]
Returns the release for a specified product with the same channel and major version with the highest minor version, or None if no such releases exist
def notes(self, public_only=False): tag_index = dict((tag, i) for i, tag in enumerate(Note.TAGS)) notes = self.note_set.order_by('-sort_num', 'created') if public_only: notes = notes.filter(is_public=True) known_issues = [n for n in notes if n.is_known_issue_for(self)] new_features = sorted( sorted( (n for n in notes if not n.is_known_issue_for(self)), key=lambda note: tag_index.get(note.tag, 0)), key=lambda n: n.tag == 'Fixed' and n.note.startswith(self.version), reverse=True) return new_features, known_issues
Retrieve a list of Note instances that should be shown for this release, grouped as either new features or known issues, and sorted first by sort_num highest to lowest and then by created date, which is applied to both groups, and then for new features we also sort by tag in the order specified by Note.TAGS, with untagged notes coming first, then finally moving any note with the fixed tag that starts with the release version to the top, for what we call "dot fixes".
def to_dict(self): data = model_to_dict(self, exclude=['id']) data['title'] = unicode(self) data['slug'] = self.slug data['release_date'] = self.release_date.date().isoformat() data['created'] = self.created.isoformat() data['modified'] = self.modified.isoformat() new_features, known_issues = self.notes(public_only=False) for note in known_issues: note.tag = 'Known' data['notes'] = [n.to_dict(self) for n in chain(new_features, known_issues)] return data
Return a dict all all data about the release
def to_simple_dict(self): return { 'version': self.version, 'product': self.product, 'channel': self.channel, 'is_public': self.is_public, 'slug': self.slug, 'title': unicode(self), }
Return a dict of only the basic data about the release
def playTone(self, freq, reps = 1, delay = 0.1, muteDelay = 0.0): if freq == 0: self.stopTone() self._delay(delay) #sleep(delay) return False if self._pwmPlayer == None: self._initPWMPlayer(freq) for r in range(0,reps): self._pwmPlayer.start(self.TONE_DUTY) self._pwmPlayer.ChangeFrequency( freq ) self._delay(delay) #sleep(delay) if muteDelay>0: self.stopTone() self._delay(muteDelay) #sleep(muteDelay) return True
! \~english Play a tone \~chinese 播放音符 \~english @param freq @param reps @param delay >= 0(s) if 0 means do not delay. tone play will be Stop immediately <br> @param muteDelay >= 0(s) If 0 means no pause after playing, play the next note immediately \~chinese @param freq: 频率 @param reps: 重复次数 @param delay >= 0(s) 如果是 0 意味着不延迟。 音符会立即停止播放 <br> @param muteDelay >= 0(s) 如果是 0 表示音符播放结束后没有停顿,立刻播放下一个音符
def playToneList(self, playList = None): if playList == None: return False for t in playList: self.playTone(t["freq"], t["reps"], t["delay"], t["muteDelay"]) self.stopTone() return True
! \~english Play tone from a tone list @param playList a array of tones \~chinese 播放音调列表 @param playList: 音调数组 \~english @note <b>playList</b> format:\n \~chinese @note <b>playList</b> 格式:\n \~ <pre> [ {"freq": 440, "reps": 1, "delay": 0.08, "muteDelay": 0.15}, {"freq": 567, "reps": 3, "delay": 0.08, "muteDelay": 0.15}, ... ] </pre>\n \~english \e delay: >= 0(s) if 0 means do not delay. tone play will be Stop immediately <br> \e muteDelay: 0.15 >= 0(s) If 0 means no pause after playing, play the next note immediately \~chinese \e delay: >= 0(s)如果是 0 意味着不延迟。 音调会立即停止播放 <br> \e muteDelay: >= 0(s)如果是 0 表示播放音符结束后没有停顿,立刻播放下一个音符
def all(self, instance): url = self._url.format(instance=instance) response = requests.get(url, **self._default_request_kwargs) data = self._get_response_data(response) return self._concrete_acl_list(data)
Get all ACLs associated with the instance specified by name. :param str instance: The name of the instance from which to fetch ACLs. :returns: A list of :py:class:`Acl` objects associated with the specified instance. :rtype: list
def create(self, instance, cidr_mask, description, **kwargs): # Build up request data. url = self._url.format(instance=instance) request_data = { 'cidr_mask': cidr_mask, 'description': description } request_data.update(kwargs) # Call to create an instance. response = requests.post( url, data=json.dumps(request_data), **self._default_request_kwargs ) # Log outcome of instance creation request. if response.status_code == 200: logger.info('Successfully created a new ACL for instance {} with: {}.' .format(instance, request_data)) else: logger.info('Failed to create a new ACL for instance {} with: {}.' .format(instance, request_data)) data = self._get_response_data(response) return self._concrete_acl(data)
Create an ACL entry for the specified instance. :param str instance: The name of the instance to associate the new ACL entry with. :param str cidr_mask: The IPv4 CIDR mask for the new ACL entry. :param str description: A short description for the new ACL entry. :param collector kwargs: (optional) Additional key=value pairs to be supplied to the creation payload. **Caution:** fields unrecognized by the API will cause this request to fail with a 400 from the API.
def get(self, instance, acl): base_url = self._url.format(instance=instance) url = '{base}{aclid}/'.format(base=base_url, aclid=acl) response = requests.get(url, **self._default_request_kwargs) data = self._get_response_data(response) return self._concrete_acl(data)
Get an ACL by ID belonging to the instance specified by name. :param str instance: The name of the instance from which to fetch the ACL. :param str acl: The ID of the ACL to fetch. :returns: An :py:class:`Acl` object, or None if ACL does not exist. :rtype: :py:class:`Acl`
def delete(self, instance, acl): base_url = self._url.format(instance=instance) url = '{base}{aclid}/'.format(base=base_url, aclid=acl) response = requests.delete(url, **self._default_request_kwargs) if response.status_code == 200: logger.info('Successfully deleted ACL {}'.format(acl)) else: logger.info('Failed to delete ACL {}'.format(acl)) logger.info('Response: [{0}] {1}'.format(response.status_code, response.content)) raise errors.ObjectRocketException('Failed to delete ACL.')
Delete an ACL by ID belonging to the instance specified by name. :param str instance: The name of the instance on which the ACL exists. :param str acll: The ID of the ACL to delete.
def _concrete_acl(self, acl_doc): if not isinstance(acl_doc, dict): return None # Attempt to instantiate an Acl object with the given dict. try: return Acl(document=acl_doc, acls=self) # If construction fails, log the exception and return None. except Exception as ex: logger.exception(ex) logger.error('Could not instantiate ACL document. You probably need to upgrade to a ' 'recent version of the client. Document which caused this error: {}' .format(acl_doc)) return None
Concretize an ACL document. :param dict acl_doc: A document describing an ACL entry. Should come from the API. :returns: An :py:class:`Acl`, or None. :rtype: :py:class:`bases.BaseInstance`
def _concrete_acl_list(self, acl_docs): if not acl_docs: return [] return list(filter(None, [self._concrete_acl(acl_doc=doc) for doc in acl_docs]))
Concretize a list of ACL documents. :param list acl_docs: A list of ACL documents. Should come from the API. :returns: A list of :py:class:`ACL` objects. :rtype: list
def _default_request_kwargs(self): defaults = copy.deepcopy(super(Acls, self)._default_request_kwargs) defaults.setdefault('headers', {}).update({ 'X-Auth-Token': self._client.auth._token }) return defaults
The default request keyword arguments to be passed to the requests library.
def _url(self): base_url = self._client._url.rstrip('/') return '{}/instances/{}/acls/{}/'.format(base_url, self.instance_name, self.id)
The URL of this ACL object.
def first(pipe, items=1): ''' first is essentially the next() function except it's second argument determines how many of the first items you want. If items is more than 1 the output is an islice of the generator. If items is 1, the first item is returned ''' pipe = iter(pipe) return next(pipe) if items == 1 else islice(pipe, 0, itemsf first(pipe, items=1): ''' first is essentially the next() function except it's second argument determines how many of the first items you want. If items is more than 1 the output is an islice of the generator. If items is 1, the first item is returned ''' pipe = iter(pipe) return next(pipe) if items == 1 else islice(pipe, 0, items)
first is essentially the next() function except it's second argument determines how many of the first items you want. If items is more than 1 the output is an islice of the generator. If items is 1, the first item is returned
def create_state(cls, state, **kwargs): kwargs['_return_http_data_only'] = True if kwargs.get('async'): return cls._create_state_with_http_info(state, **kwargs) else: (data) = cls._create_state_with_http_info(state, **kwargs) return data
Create State Create a new State This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async=True >>> thread = api.create_state(state, async=True) >>> result = thread.get() :param async bool :param State state: Attributes of state to create (required) :return: State If the method is called asynchronously, returns the request thread.