code
stringlengths
52
7.75k
docs
stringlengths
1
5.85k
def encode(cls, string, errors='strict'): if errors != 'strict': raise UnicodeError('Unsupported error handling {0}'.format(errors)) unicode_string = cls._ensure_unicode_string(string) encoded = unicode_string.translate(cls._encoding_table) return encoded, len(string)
Return the encoded version of a string. :param string: The input string to encode. :type string: `basestring` :param errors: The error handling scheme. Only 'strict' is supported. :type errors: `basestring` :return: Tuple of encoded string and number of input bytes consumed. :rtype: `tuple` (`unicode`, `int`)
def decode(cls, string, errors='strict'): if errors != 'strict': raise UnicodeError('Unsupported error handling {0}'.format(errors)) unicode_string = cls._ensure_unicode_string(string) decoded = unicode_string.translate(cls._decoding_table) return decoded, len(string)
Return the decoded version of a string. :param string: The input string to decode. :type string: `basestring` :param errors: The error handling scheme. Only 'strict' is supported. :type errors: `basestring` :return: Tuple of decoded string and number of input bytes consumed. :rtype: `tuple` (`unicode`, `int`)
def search_function(cls, encoding): if encoding == cls._codec_name: return codecs.CodecInfo( name=cls._codec_name, encode=cls.encode, decode=cls.decode, ) return None
Search function to find 'rotunicode' codec.
def _ensure_unicode_string(string): if not isinstance(string, six.text_type): string = string.decode('utf-8') return string
Returns a unicode string for string. :param string: The input string. :type string: `basestring` :returns: A unicode string. :rtype: `unicode`
def dict(self, **kwargs): return dict( time=self.timestamp, mesg=self.raw, **kwargs )
Dictionary representation.
def api(name, url, token, **kwargs): return Client(token).api(name, url, **kwargs)
Shortcut for caling methods on `Client(token, version)`.
def cli(): parser = argparse.ArgumentParser() parser.add_argument('api', help=""" API to call. One one of 'article', 'frontpage', 'product', 'image', 'analyze', or 'discussion'. """) parser.add_argument('url', help=""" URL to pass as the 'url' parameter. """) parser.add_argument('token', help=""" API key (token). Get one at https://www.diffbot.com/. """) parser.add_argument('-a', '--all', help=""" Request all fields. """, action='store_true') parser.add_argument('-f', '--file', help=""" File to read data from. Use '-' to read from STDIN. """) fields = text = html = None _args = parser.parse_args() if _args.all: fields = '*' if _args.file == '-': text = sys.stdin.read() elif _args.file: with open(_args.file, 'rb') as src: if os.path.splitext(_args.file)[1] in ('.html', '.htm'): html = src.read().decode(ENCODING) else: text = src.read().decode(ENCODING) print(json.dumps((api(_args.api, _args.url, _args.token, html=html or None, text=text or None, fields=fields)), sort_keys=True, indent=2))
Command line tool.
def _get(url, params=None): try: response = requests.get(url, params=params) response.raise_for_status() # If JSON fails, return raw data # (e.g. when downloading CSV job logs). try: return response.json() except ValueError: return response.text except NameError: url = '{0}?{1}'.format(url, urllib.urlencode(params)) return json.loads(urllib2.urlopen(url).read().decode(ENCODING))
HTTP GET request.
def _post(url, data, content_type, params=None): try: response = requests.post(url, params=params, data=data, headers={ 'Content-Type': content_type, }) response.raise_for_status() return response.json() except NameError: url = '{0}?{1}'.format(url, urllib.urlencode(params)) req = urllib2.Request(url, data.encode(ENCODING), { 'Content-Type': content_type, }) return json.loads(urllib2.urlopen(req).read().decode(ENCODING))
HTTP POST request.
def api(self, name, url, **kwargs): if name not in self._apis: raise ValueError('API name must be one of {0}, not {1!r}.'.format( tuple(self._apis), name)) fields = kwargs.get('fields') timeout = kwargs.get('timeout') text = kwargs.get('text') html = kwargs.get('html') if text and html: raise ValueError(u'Both `text` and `html` arguments provided!') params = {'url': url, 'token': self._token} if timeout: params['timeout'] = timeout if fields: if not isinstance(fields, str): fields = ','.join(sorted(fields)) params['fields'] = fields url = self.endpoint(name) if text or html: content_type = html and 'text/html' or 'text/plain' return self._post(url, text or html, content_type, params=params) return self._get(url, params=params)
Generic API method.
def crawl(self, urls, name='crawl', api='analyze', **kwargs): # If multiple seed URLs are specified, join with whitespace. if isinstance(urls, list): urls = ' '.join(urls) url = self.endpoint('crawl') process_url = self.endpoint(api) params = { 'token': self._token, 'seeds': urls, 'name': name, 'apiUrl': process_url, } # Add any additional named parameters as accepted by Crawlbot params['maxToCrawl'] = 10 params.update(kwargs) self._get(url, params=params) return Job(self._token, name, self._version)
Crawlbot API. Returns a diffbot.Job object to check and retrieve crawl status.
def main(): try: # Retrieve the first USB device device = AlarmDecoder(SerialDevice(interface=SERIAL_DEVICE)) # Set up an event handler and open the device device.on_lrr_message += handle_lrr_message with device.open(baudrate=BAUDRATE): while True: time.sleep(1) except Exception as ex: print('Exception:', ex)
Example application that prints messages from the panel to the terminal.
def handle_lrr_message(sender, message): print(sender, message.partition, message.event_type, message.event_data)
Handles message events from the AlarmDecoder.
def close(self): try: self._running = False self._read_thread.stop() self._device.close() except Exception: pass self.on_close()
Closes the device.
def main(): try: # Retrieve the first USB device device = AlarmDecoder(SerialDevice(interface=SERIAL_DEVICE)) # Set up an event handler and open the device device.on_alarm += handle_alarm with device.open(baudrate=BAUDRATE): while True: time.sleep(1) except Exception as ex: print('Exception:', ex)
Example application that sends an email when an alarm event is detected.
def handle_alarm(sender, **kwargs): zone = kwargs.pop('zone', None) text = "Alarm: Zone {0}".format(zone) # Build the email message msg = MIMEText(text) msg['Subject'] = SUBJECT msg['From'] = FROM_ADDRESS msg['To'] = TO_ADDRESS s = smtplib.SMTP(SMTP_SERVER) # Authenticate if needed if SMTP_USERNAME is not None: s.login(SMTP_USERNAME, SMTP_PASSWORD) # Send the email s.sendmail(FROM_ADDRESS, TO_ADDRESS, msg.as_string()) s.quit() print('sent alarm email:', text)
Handles alarm events from the AlarmDecoder.
def _parse_message(self, data): try: _, values = data.split(':') values = values.split(',') # Handle older-format events if len(values) <= 3: self.event_data, self.partition, self.event_type = values self.version = 1 # Newer-format events else: self.event_data, self.partition, self.event_type, self.report_code = values self.version = 2 event_type_data = self.event_type.split('_') self.event_prefix = event_type_data[0] # Ex: CID self.event_source = get_event_source(self.event_prefix) # Ex: LRR_EVENT_TYPE.CID self.event_status = int(event_type_data[1][0]) # Ex: 1 or 3 self.event_code = int(event_type_data[1][1:], 16) # Ex: 0x100 = Medical # replace last 2 digits of event_code with report_code, if applicable. if not self.skip_report_override and self.report_code not in ['00', 'ff']: self.event_code = int(event_type_data[1][1] + self.report_code, 16) self.event_description = get_event_description(self.event_source, self.event_code) except ValueError: raise InvalidMessageError('Received invalid message: {0}'.format(data))
Parses the raw message from the device. :param data: message data to parse :type data: string :raises: :py:class:`~alarmdecoder.util.InvalidMessageError`
def dict(self, **kwargs): return dict( time = self.timestamp, event_data = self.event_data, event_type = self.event_type, partition = self.partition, report_code = self.report_code, event_prefix = self.event_prefix, event_source = self.event_source, event_status = self.event_status, event_code = hex(self.event_code), event_description = self.event_description, **kwargs )
Dictionary representation
def main(): try: # Retrieve an AD2 device that has been exposed with ser2sock on localhost:10000. ssl_device = SocketDevice(interface=('localhost', 10000)) # Enable SSL and set the certificates to be used. # # The key/cert attributes can either be a filesystem path or an X509/PKey # object from pyopenssl. ssl_device.ssl = True ssl_device.ssl_ca = SSL_CA # CA certificate ssl_device.ssl_key = SSL_KEY # Client private key ssl_device.ssl_certificate = SSL_CERT # Client certificate device = AlarmDecoder(ssl_device) # Set up an event handler and open the device device.on_message += handle_message with device.open(): while True: time.sleep(1) except Exception as ex: print('Exception:', ex)
Example application that opens a device that has been exposed to the network with ser2sock and SSL encryption and authentication.
def rotunicode(io_object, decode=False): rotu_fn = get_rotunicode_function_for_decode_argument(decode=decode) return map(rotu_fn, map(safe_unicode, stream_file_lines(io_object)))
Rotate ASCII <-> non-ASCII characters in a file. :param io_object: The file object to convert. :type io_object: :class:`io.TextIOWrapper` :param decode: If True, perform a rotunicode-decode (rotate from non-ASCII to ASCII). Defaults to False (rotate from ASCII to non-ASCII). :type decode: `bool` :return: Yield the converted lines of the file. :rtype: `generator` of `unicode`
def ruencode(string, extension=False): if extension: file_name = string file_ext = '' else: file_name, file_ext = splitext(string) encoded_value, _ = _ROT_UNICODE.encode(file_name) return encoded_value + file_ext
Encode a string using 'rotunicode' codec. :param string: The input string to encode. :type string: `basestring` :param extension: True if the entire input string should be encoded. False to split the input string using :func:`os.path.splitext` and encode only the file name portion keeping the extension as is. :type extension: `bool` :return: Encoded string. :rtype: `unicode`
def find_all(cls, vid=None, pid=None): if not have_pyftdi: raise ImportError('The USBDevice class has been disabled due to missing requirement: pyftdi or pyusb.') cls.__devices = [] query = cls.PRODUCT_IDS if vid and pid: query = [(vid, pid)] try: cls.__devices = Ftdi.find_all(query, nocache=True) except (usb.core.USBError, FtdiError) as err: raise CommError('Error enumerating AD2USB devices: {0}'.format(str(err)), err) return cls.__devices
Returns all FTDI devices matching our vendor and product IDs. :returns: list of devices :raises: :py:class:`~alarmdecoder.util.CommError`
def find(cls, device=None): if not have_pyftdi: raise ImportError('The USBDevice class has been disabled due to missing requirement: pyftdi or pyusb.') cls.find_all() if len(cls.__devices) == 0: raise NoDeviceError('No AD2USB devices present.') if device is None: device = cls.__devices[0] vendor, product, sernum, ifcount, description = device return USBDevice(interface=sernum, vid=vendor, pid=product)
Factory method that returns the requested :py:class:`USBDevice` device, or the first device. :param device: Tuple describing the USB device to open, as returned by find_all(). :type device: tuple :returns: :py:class:`USBDevice` object utilizing the specified device :raises: :py:class:`~alarmdecoder.util.NoDeviceError`
def start_detection(cls, on_attached=None, on_detached=None): if not have_pyftdi: raise ImportError('The USBDevice class has been disabled due to missing requirement: pyftdi or pyusb.') cls.__detect_thread = USBDevice.DetectThread(on_attached, on_detached) try: cls.find_all() except CommError: pass cls.__detect_thread.start()
Starts the device detection thread. :param on_attached: function to be called when a device is attached **Callback definition:** *def callback(thread, device)* :type on_attached: function :param on_detached: function to be called when a device is detached **Callback definition:** *def callback(thread, device)* :type on_detached: function
def interface(self, value): self._interface = value if isinstance(value, int): self._device_number = value else: self._serial_number = value
Sets the interface used to connect to the device. :param value: may specify either the serial number or the device index :type value: string or int
def open(self, baudrate=BAUDRATE, no_reader_thread=False): # Set up defaults if baudrate is None: baudrate = USBDevice.BAUDRATE self._read_thread = Device.ReadThread(self) # Open the device and start up the thread. try: self._device.open(self._vendor_id, self._product_id, self._endpoint, self._device_number, self._serial_number, self._description) self._device.set_baudrate(baudrate) if not self._serial_number: self._serial_number = self._get_serial_number() self._id = self._serial_number except (usb.core.USBError, FtdiError) as err: raise NoDeviceError('Error opening device: {0}'.format(str(err)), err) except KeyError as err: raise NoDeviceError('Unsupported device. ({0:04x}:{1:04x}) You probably need a newer version of pyftdi.'.format(err[0][0], err[0][1])) else: self._running = True self.on_open() if not no_reader_thread: self._read_thread.start() return self
Opens the device. :param baudrate: baudrate to use :type baudrate: int :param no_reader_thread: whether or not to automatically start the reader thread. :type no_reader_thread: bool :raises: :py:class:`~alarmdecoder.util.NoDeviceError`
def close(self): try: Device.close(self) # HACK: Probably should fork pyftdi and make this call in .close() self._device.usb_dev.attach_kernel_driver(self._device_number) except Exception: pass
Closes the device.
def write(self, data): try: self._device.write_data(data) self.on_write(data=data) except FtdiError as err: raise CommError('Error writing to device: {0}'.format(str(err)), err)
Writes data to the device. :param data: data to write :type data: string :raises: :py:class:`~alarmdecoder.util.CommError`
def read(self): ret = None try: ret = self._device.read_data(1) except (usb.core.USBError, FtdiError) as err: raise CommError('Error reading from device: {0}'.format(str(err)), err) return ret
Reads a single character from the device. :returns: character read from the device :raises: :py:class:`~alarmdecoder.util.CommError`
def read_line(self, timeout=0.0, purge_buffer=False): def timeout_event(): """Handles read timeout event""" timeout_event.reading = False timeout_event.reading = True if purge_buffer: self._buffer = b'' got_line, ret = False, None timer = threading.Timer(timeout, timeout_event) if timeout > 0: timer.start() try: while timeout_event.reading: buf = self._device.read_data(1) if buf != b'': ub = bytes_hack(buf) self._buffer += ub if ub == b"\n": self._buffer = self._buffer.rstrip(b"\r\n") if len(self._buffer) > 0: got_line = True break else: time.sleep(0.01) except (usb.core.USBError, FtdiError) as err: raise CommError('Error reading from device: {0}'.format(str(err)), err) else: if got_line: ret, self._buffer = self._buffer, b'' self.on_read(data=ret) else: raise TimeoutError('Timeout while waiting for line terminator.') finally: timer.cancel() return ret
Reads a line from the device. :param timeout: read timeout :type timeout: float :param purge_buffer: Indicates whether to purge the buffer prior to reading. :type purge_buffer: bool :returns: line that was read :raises: :py:class:`~alarmdecoder.util.CommError`, :py:class:`~alarmdecoder.util.TimeoutError`
def _get_serial_number(self): return usb.util.get_string(self._device.usb_dev, 64, self._device.usb_dev.iSerialNumber)
Retrieves the FTDI device serial number. :returns: string containing the device serial number
def parse_color(color): if len(color) not in (3, 4, 6, 8): raise ValueError('bad color %s' % repr(color)) if len(color) in (3, 4): r = int(color[0], 16) * 0x11 g = int(color[1], 16) * 0x11 b = int(color[2], 16) * 0x11 elif len(color) in (6, 8): r = int(color[0:2], 16) g = int(color[2:4], 16) b = int(color[4:6], 16) if len(color) == 4: a = int(color[3], 16) * 0x11 elif len(color) == 8: a = int(color[6:8], 16) else: a = 0xff return (r, g, b, a)
Parse a color value. I've decided not to expect a leading '#' because it's a comment character in some shells. >>> parse_color('4bf') == (0x44, 0xbb, 0xff, 0xff) True >>> parse_color('ccce') == (0xcc, 0xcc, 0xcc, 0xee) True >>> parse_color('d8b4a2') == (0xd8, 0xb4, 0xa2, 0xff) True >>> parse_color('12345678') == (0x12, 0x34, 0x56, 0x78) True Raises ValueError on errors.
def check_color(option, opt, value): try: return parse_color(value) except ValueError: raise optparse.OptionValueError("option %s: invalid color value: %r" % (opt, value))
Validate and convert an option value of type 'color'. ``option`` is an optparse.Option instance. ``opt`` is a string with the user-supplied option name (e.g. '--bgcolor'). ``value`` is the user-supplied value.
def pick_orientation(img1, img2, spacing, desired_aspect=1.618): w1, h1 = img1.size w2, h2 = img2.size size_a = (w1 + spacing + w2, max(h1, h2, 1)) size_b = (max(w1, w2, 1), h1 + spacing + h2) aspect_a = size_a[0] / size_a[1] aspect_b = size_b[0] / size_b[1] goodness_a = min(desired_aspect, aspect_a) / max(desired_aspect, aspect_a) goodness_b = min(desired_aspect, aspect_b) / max(desired_aspect, aspect_b) return 'lr' if goodness_a >= goodness_b else 'tb'
Pick a tiling orientation for two images. Returns either 'lr' for left-and-right, or 'tb' for top-and-bottom. Picks the one that makes the combined image have a better aspect ratio, where 'better' is defined as 'closer to 1:1.618'.
def tile_images(img1, img2, mask1, mask2, opts): w1, h1 = img1.size w2, h2 = img2.size if opts.orientation == 'auto': opts.orientation = pick_orientation(img1, img2, opts.spacing) B, S = opts.border, opts.spacing if opts.orientation == 'lr': w, h = (B + w1 + S + w2 + B, B + max(h1, h2) + B) pos1 = (B, (h - h1) // 2) pos2 = (B + w1 + S, (h - h2) // 2) separator_line = [(B + w1 + S//2, 0), (B + w1 + S//2, h)] else: w, h = (B + max(w1, w2) + B, B + h1 + S + h2 + B) pos1 = ((w - w1) // 2, B) pos2 = ((w - w2) // 2, B + h1 + S) separator_line = [(0, B + h1 + S//2), (w, B + h1 + S//2)] img = Image.new('RGBA', (w, h), opts.bgcolor) img.paste(img1, pos1, mask1) img.paste(img2, pos2, mask2) ImageDraw.Draw(img).line(separator_line, fill=opts.sepcolor) return img
Combine two images into one by tiling them. ``mask1`` and ``mask2`` provide optional masks for alpha-blending; pass None to avoid. Fills unused areas with ``opts.bgcolor``. Puts a ``opts.spacing``-wide bar with a thin line of ``opts.sepcolor`` color between them. ``opts.orientation`` can be 'lr' for left-and-right, 'tb' for top-and-bottom, or 'auto' for automatic.
def spawn_viewer(viewer, img, filename, grace): tempdir = tempfile.mkdtemp(prefix='imgdiff-') try: imgfile = os.path.join(tempdir, filename) img.save(imgfile) started = time.time() subprocess.call([viewer, imgfile]) elapsed = time.time() - started if elapsed < grace: # Program exited too quickly. I think it forked and so may not # have had enough time to even start looking for the temp file # we just created. Wait a bit before removing the temp file. time.sleep(grace - elapsed) finally: shutil.rmtree(tempdir)
Launch an external program to view an image. ``img`` is an Image object. ``viewer`` is a command name. Arguments are not allowed; exactly one argument will be passed: the name of the image file. ``filename`` is the suggested filename for a temporary file. ``grace`` is the number of seconds to wait after spawning the viewer before removing the temporary file. Useful if your viewer forks into background before it opens the file.
def tweak_diff(diff, opacity): mask = diff.point(lambda i: opacity + i * (255 - opacity) // 255) return mask
Adjust a difference map into an opacity mask for a given lowest opacity. Performs a linear map from [0; 255] to [opacity; 255]. The result is that similar areas will have a given opacity, while dissimilar areas will be opaque.
def diff_badness(diff): # identical pictures = black image = return 0 # completely different pictures = white image = return lots return sum(i * n for i, n in enumerate(diff.histogram()))
Estimate the "badness" value of a difference map. Returns 0 if the pictures are identical Returns a large number if the pictures are completely different (e.g. a black field and a white field). More specifically, returns ``255 * width * height`` where ``(width, height) == diff.size``. Returns something in between for other situations.
def best_diff(img1, img2, opts): w1, h1 = img1.size w2, h2 = img2.size w, h = min(w1, w2), min(h1, h2) best = None best_value = 255 * w * h + 1 xr = abs(w1 - w2) + 1 yr = abs(h1 - h2) + 1 p = Progress(xr * yr, timeout=opts.timeout) for x in range(xr): if w1 > w2: x1, x2 = x, 0 else: x1, x2 = 0, x for y in range(yr): if h1 > h2: y1, y2 = y, 0 else: y1, y2 = 0, y p.next() this = diff(img1, img2, (x1, y1), (x2, y2)) this_value = diff_badness(this) if this_value < best_value: best = this best_value = this_value best_pos = (x1, y1), (x2, y2) return best, best_pos
Find the best alignment of two images that minimizes the differences. Returns (diff, alignments) where ``diff`` is a difference map, and ``alignments`` is a tuple ((x1, y2), (x2, y2)). See ``diff()`` for the description of the alignment numbers.
def simple_highlight(img1, img2, opts): try: diff, ((x1, y1), (x2, y2)) = best_diff(img1, img2, opts) except KeyboardInterrupt: return None, None diff = diff.filter(ImageFilter.MaxFilter(9)) diff = tweak_diff(diff, opts.opacity) # If the images have different sizes, the areas outside the alignment # zone are considered to be dissimilar -- filling them with 0xff. # Perhaps it would be better to compare those bits with bars of solid # color, filled with opts.bgcolor? mask1 = Image.new('L', img1.size, 0xff) mask2 = Image.new('L', img2.size, 0xff) mask1.paste(diff, (x1, y1)) mask2.paste(diff, (x2, y2)) return mask1, mask2
Try to align the two images to minimize pixel differences. Produces two masks for img1 and img2. The algorithm works by comparing every possible alignment of the images, finding the aligment that minimzes the differences, and then smoothing it a bit to reduce spurious matches in areas that are perceptibly different (e.g. text).
def open(path, broken=False): with maybe_gzip_open(path) as f: yield reader(f, broken=broken)
Context manager for opening and reading json lines files. If file extension suggests gzip (.gz or .gzip), file is decompressed on fly. Pass broken=True if you expect the file can be truncated or broken otherwise; reader will try to recover as much data as possible in this case.
def controlParameters(self, module, status): if self.use_legacy_protocol : return '''{}<NickName>Socket 1</NickName><Description>Socket 1</Description> <OPStatus>{}</OPStatus><Controller>1</Controller>'''.format(self.moduleParameters(module), status) else: return '''{}<NickName>Socket 1</NickName><Description>Socket 1</Description> <OPStatus>{}</OPStatus>'''.format(self.moduleParameters(module), status)
Returns control parameters as XML. :type module: str :type status: str :param module: The module number/ID :param status: The state to set (i.e. true (on) or false (off)) :return XML string to join with payload
def fetchMyCgi(self): try: response = urlopen(Request('http://{}/my_cgi.cgi'.format(self.ip), b'request=create_chklst')); except (HTTPError, URLError): _LOGGER.warning("Failed to open url to {}".format(self.ip)) self._error_report = True return None lines = response.readlines() return {line.decode().split(':')[0].strip(): line.decode().split(':')[1].strip() for line in lines}
Fetches statistics from my_cgi.cgi
def current_consumption(self): res = 'N/A' if self.use_legacy_protocol: # Use /my_cgi.cgi to retrieve current consumption try: res = self.fetchMyCgi()['Meter Watt'] except: return 'N/A' else: try: res = self.SOAPAction('GetCurrentPowerConsumption', 'CurrentConsumption', self.moduleParameters("2")) except: return 'N/A' if res is None: return 'N/A' try: res = float(res) except ValueError: _LOGGER.error("Failed to retrieve current power consumption from SmartPlug") return res
Get the current power consumption in Watt.
def total_consumption(self): if self.use_legacy_protocol: # TotalConsumption currently fails on the legacy protocol and # creates a mess in the logs. Just return 'N/A' for now. return 'N/A' res = 'N/A' try: res = self.SOAPAction("GetPMWarningThreshold", "TotalConsumption", self.moduleParameters("2")) except: return 'N/A' if res is None: return 'N/A' try: float(res) except ValueError: _LOGGER.error("Failed to retrieve total power consumption from SmartPlug") return res
Get the total power consumpuntion in the device lifetime.
def state(self): response = self.SOAPAction('GetSocketSettings', 'OPStatus', self.moduleParameters("1")) if response is None: return 'unknown' elif response.lower() == 'true': return ON elif response.lower() == 'false': return OFF else: _LOGGER.warning("Unknown state %s returned" % str(response.lower())) return 'unknown'
Get the device state (i.e. ON or OFF).
def state(self, value): if value.upper() == ON: return self.SOAPAction('SetSocketSettings', 'SetSocketSettingsResult', self.controlParameters("1", "true")) elif value.upper() == OFF: return self.SOAPAction('SetSocketSettings', 'SetSocketSettingsResult', self.controlParameters("1", "false")) else: raise TypeError("State %s is not valid." % str(value))
Set device state. :type value: str :param value: Future state (either ON or OFF)
def auth_payload(self, login_pwd): payload = '''<?xml version="1.0" encoding="utf-8"?> <soap:Envelope xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns:xsd="http://www.w3.org/2001/XMLSchema" xmlns:soap="http://schemas.xmlsoap.org/soap/envelope/"> <soap:Body> <Login xmlns="http://purenetworks.com/HNAP1/"> <Action>login</Action> <Username>{}</Username> <LoginPassword>{}</LoginPassword> <Captcha/> </Login> </soap:Body> </soap:Envelope> '''.format(self.user, login_pwd) return payload.encode()
Generate a new payload containing generated hash information. :type login_pwd: str :param login_pwd: hashed password generated by the auth function.
def get_known_read_position(fp, buffered=True): buffer_size = io.DEFAULT_BUFFER_SIZE if buffered else 0 return max(fp.tell() - buffer_size, 0)
Return a position in a file which is known to be read & handled. It assumes a buffered file and streaming processing.
def recover(gzfile, last_good_position): # type: (gzip.GzipFile, int) -> gzip.GzipFile pos = get_recover_position(gzfile, last_good_position=last_good_position) if pos == -1: return None fp = gzfile.fileobj fp.seek(pos) # gzfile.close() return gzip.GzipFile(fileobj=fp, mode='r')
Skip to the next possibly decompressable part of a gzip file. Return a new GzipFile object if such part is found or None if it is not found.
def get_recover_position(gzfile, last_good_position): # type: (gzip.GzipFile, int) -> int with closing(mmap.mmap(gzfile.fileno(), 0, access=mmap.ACCESS_READ)) as m: return m.find(GZIP_SIGNATURE, last_good_position + 1)
Return position of a next gzip stream in a GzipFile, or -1 if it is not found. XXX: caller must ensure that the same last_good_position is not used multiple times for the same gzfile.
def maybe_gzip_open(path, *args, **kwargs): path = path_to_str(path) if path.endswith('.gz') or path.endswith('.gzip'): _open = gzip.open else: _open = open return _open(path, *args, **kwargs)
Open file with either open or gzip.open, depending on file extension. This function doesn't handle json lines format, just opens a file in a way it is decoded transparently if needed.
def path_to_str(path): try: from pathlib import Path as _Path except ImportError: # Python < 3.4 class _Path: pass if isinstance(path, _Path): return str(path) return path
Convert pathlib.Path objects to str; return other objects as-is.
def get_factor_list(self): factors = self.get_factor_from_api() if not factors: return [] ml = [[int(x)] * y for x, y in factors] return [y for x in ml for y in x]
get_factors: [['2', 3], ['3', 2]] Returns: [2, 2, 2, 3, 3]
def calculate_signature(key, data, timestamp=None): # Create a timestamp if one was not given if timestamp is None: timestamp = int(time.time()) # Construct the message from the timestamp and the data in the request message = str(timestamp) + ''.join("%s%s" % (k,v) for k,v in sorted(data.items())) # Calculate the signature (HMAC SHA256) according to RFC 2104 signature = hmac.HMAC(str(key), message, hashlib.sha256).hexdigest() return signature
Calculates the signature for the given request data.
def authenticate(self): if request.headers.get('Authorization') or request.args.get('access_token'): realm = 'Bearer realm="%s", error="invalid_token"' % __package__ else: realm = 'Bearer realm="%s"' % __package__ resp = Response(None, 401, {'WWW-Authenticate': realm}) abort(401, description='Please provide proper credentials', response=resp)
Indicate to the client that it needs to authenticate via a 401.
def check_token(self, token, allowed_roles, resource, method): resource_conf = config.DOMAIN[resource] audiences = resource_conf.get('audiences', config.JWT_AUDIENCES) return self._perform_verification(token, audiences, allowed_roles)
This function is called when a token is sent throught the access_token parameter or the Authorization header as specified in the oAuth 2 specification. The provided token is validated with the JWT_SECRET defined in the Eve configuration. The token issuer (iss claim) must be the one specified by JWT_ISSUER and the audience (aud claim) must be one of the value(s) defined by the either the "audiences" resource parameter or the global JWT_AUDIENCES configuration. If JWT_ROLES_CLAIM is defined and a claim by that name is present in the token, roles are checked using this claim. If a JWT_SCOPE_CLAIM is defined and a claim by that name is present in the token, the claim value is check, and if "viewer" is present, only GET and HEAD methods will be allowed. The scope name is then added to the list of roles with the scope: prefix. If the validation succeed, the claims are stored and accessible thru the get_authen_claims() method.
def requires_token(self, audiences=None, allowed_roles=None): def requires_token_wrapper(f): @wraps(f) def decorated(*args, **kwargs): try: token = request.args['access_token'] except KeyError: token = request.headers.get('Authorization', '').partition(' ')[2] if not self._perform_verification(token, audiences, allowed_roles): abort(401) return f(*args, **kwargs) return decorated return requires_token_wrapper
Decorator for functions that will be protected with token authentication. Token must be provvided either through access_token parameter or Authorization header. See check_token() method for further details.
def load_library(self,libname): paths = self.getpaths(libname) for path in paths: if os.path.exists(path): return self.load(path) raise ImportError("%s not found." % libname)
Given the name of a library, load it.
def getpaths(self,libname): if os.path.isabs(libname): yield libname else: # FIXME / TODO return '.' and os.path.dirname(__file__) for path in self.getplatformpaths(libname): yield path path = ctypes.util.find_library(libname) if path: yield path
Return a list of paths where the library might be found.
def to_json(content, indent=None): if isinstance(content, QuerySet): json_serializer = serializers.get_serializer('json')() serialized_content = json_serializer.serialize(content, ensure_ascii=False, indent=indent) else: try: serialized_content = json.dumps(content, cls=DecimalEncoder, ensure_ascii=False, indent=indent) except TypeError: # Fix for Django 1.5 serialized_content = json.dumps(content, ensure_ascii=False, indent=indent) return serialized_content
Serializes a python object as JSON This method uses the DJangoJSONEncoder to to ensure that python objects such as Decimal objects are properly serialized. It can also serialize Django QuerySet objects.
def to_html(data): base_html_template = Template(''' <html> <head> {% if style %} <style type="text/css"> {{ style }} </style> {% endif %} </head> <body> {% if style %} {{ body|safe }} {% else %} <pre></code>{{ body }}</code></pre> {% endif %} </body> </html> ''') code = to_json(data, indent=4) if PYGMENTS_INSTALLED: c = Context({ 'body': highlight(code, JSONLexer(), HtmlFormatter()), 'style': HtmlFormatter().get_style_defs('.highlight') }) html = base_html_template.render(c) else: c = Context({'body': code}) html = base_html_template.render(c) return html
Serializes a python object as HTML This method uses the to_json method to turn the given data object into formatted JSON that is displayed in an HTML page. If pygments in installed, syntax highlighting will also be applied to the JSON.
def to_text(data): try: serialized_content = to_json(data, indent=4) except Exception, e: serialized_content = data return serialized_content
Serializes a python object as plain text If the data can be serialized as JSON, this method will use the to_json method to format the data, otherwise the data is returned as is.
def auth_required(secret_key_func): def actual_decorator(obj): def test_func(request, *args, **kwargs): secret_key = secret_key_func(request, *args, **kwargs) return validate_signature(request, secret_key) or request.user.is_authenticated() decorator = request_passes_test(test_func) return wrap_object(obj, decorator) return actual_decorator
Requires that the user be authenticated either by a signature or by being actively logged in.
def login_required(obj): decorator = request_passes_test(lambda r, *args, **kwargs: r.user.is_authenticated()) return wrap_object(obj, decorator)
Requires that the user be logged in order to gain access to the resource at the specified the URI.
def admin_required(obj): decorator = request_passes_test(lambda r, *args, **kwargs: r.user.is_superuser) return wrap_object(obj, decorator)
Requires that the user be logged AND be set as a superuser
def signature_required(secret_key_func): def actual_decorator(obj): def test_func(request, *args, **kwargs): secret_key = secret_key_func(request, *args, **kwargs) return validate_signature(request, secret_key) decorator = request_passes_test(test_func) return wrap_object(obj, decorator) return actual_decorator
Requires that the request contain a valid signature to gain access to a specified resource.
def validate_signature(request, secret_key): # Extract the request parameters according to the HTTP method data = request.GET.copy() if request.method != 'GET': message_body = getattr(request, request.method, {}) data.update(message_body) # Make sure the request contains a signature if data.get('sig', False): sig = data['sig'] del data['sig'] else: return False # Make sure the request contains a timestamp if data.get('t', False): timestamp = int(data.get('t', False)) del data['t'] else: return False # Make sure the signature has not expired local_time = datetime.utcnow() remote_time = datetime.utcfromtimestamp(timestamp) # this stops a bug if the client clock is ever a little ahead of # the server clock. Makes the window of acceptable time current +/- 5 mins if local_time > remote_time: delta = local_time - remote_time else: delta = remote_time - local_time if delta.seconds > 5 * 60: # If the signature is older than 5 minutes, it's invalid return False # Make sure the signature is valid return sig == calculate_signature(secret_key, data, timestamp)
Validates the signature associated with the given request.
def sample_wr(population, k): "Chooses k random elements (with replacement) from a population" n = len(population) _random, _int = random.random, int # speed hack result = [None] * k for i in xrange(k): j = _int(_random() * n) result[i] = population[j] return resulf sample_wr(population, k): "Chooses k random elements (with replacement) from a population" n = len(population) _random, _int = random.random, int # speed hack result = [None] * k for i in xrange(k): j = _int(_random() * n) result[i] = population[j] return result
Chooses k random elements (with replacement) from a population
def dynamodb_connection_factory(): global _DYNAMODB_CONN global _BOTO_SESSION if not _DYNAMODB_CONN: logger.debug("Creating a DynamoDB connection.") if not _BOTO_SESSION: _BOTO_SESSION = Boto3Session( aws_access_key_id=AWS_ACCESS_KEY_ID, aws_secret_access_key=AWS_SECRET_ACCESS_KEY, region_name=AWS_REGION_NAME) _DYNAMODB_CONN = _BOTO_SESSION.resource('dynamodb') return _DYNAMODB_CONN
Since SessionStore is called for every single page view, we'd be establishing new connections so frequently that performance would be hugely impacted. We'll lazy-load this here on a per-worker basis. Since boto3.resource.('dynamodb')objects are state-less (aside from security tokens), we're not too concerned about thread safety issues.
def load(self): response = self.table.get_item( Key={'session_key': self.session_key}, ConsistentRead=ALWAYS_CONSISTENT) if 'Item' in response: session_data = response['Item']['data'] return self.decode(session_data) else: self.create() return {}
Loads session data from DynamoDB, runs it through the session data de-coder (base64->dict), sets ``self.session``. :rtype: dict :returns: The de-coded session data, as a dict.
def exists(self, session_key): response = self.table.get_item( Key={'session_key': session_key}, ConsistentRead=ALWAYS_CONSISTENT) if 'Item' in response: return True else: return False
Checks to see if a session currently exists in DynamoDB. :rtype: bool :returns: ``True`` if a session with the given key exists in the DB, ``False`` if not.
def save(self, must_create=False): # If the save method is called with must_create equal to True, I'm # setting self._session_key equal to None and when # self.get_or_create_session_key is called the new # session_key will be created. if must_create: self._session_key = None self._get_or_create_session_key() update_kwargs = { 'Key': {'session_key': self.session_key}, } attribute_names = {'#data': 'data'} attribute_values = { ':data': self.encode(self._get_session(no_load=must_create)) } set_updates = ['#data = :data'] if must_create: # Set condition to ensure session with same key doesnt exist update_kwargs['ConditionExpression'] = \ DynamoConditionAttr('session_key').not_exists() attribute_values[':created'] = int(time.time()) set_updates.append('created = :created') update_kwargs['UpdateExpression'] = 'SET ' + ','.join(set_updates) update_kwargs['ExpressionAttributeValues'] = attribute_values update_kwargs['ExpressionAttributeNames'] = attribute_names try: self.table.update_item(**update_kwargs) except ClientError as e: error_code = e.response['Error']['Code'] if error_code == 'ConditionalCheckFailedException': raise CreateError raise
Saves the current session data to the database. :keyword bool must_create: If ``True``, a ``CreateError`` exception will be raised if the saving operation doesn't create a *new* entry (as opposed to possibly updating an existing entry). :raises: ``CreateError`` if ``must_create`` is ``True`` and a session with the current session key already exists.
def delete(self, session_key=None): if session_key is None: if self.session_key is None: return session_key = self.session_key self.table.delete_item(Key={'session_key': session_key})
Deletes the current session, or the one specified in ``session_key``. :keyword str session_key: Optionally, override the session key to delete.
def minimal_raw_seqs(self): ''' m.minimal_raw_seqs() -- Return minimal list of seqs that represent consensus ''' seqs = [[], []] for letter in self.oneletter: if one2two.has_key(letter): seqs[0].append(one2two[letter][0]) seqs[1].append(one2two[letter][1]) else: seqs[0].append(letter) seqs[1].append(letter) if ''.join(seqs[0]) == ''.join(seqs[1]): return( [''.join(seqs[0])] ) else: return( [''.join(seqs[0]), ''.join(seqs[0])] f minimal_raw_seqs(self): ''' m.minimal_raw_seqs() -- Return minimal list of seqs that represent consensus ''' seqs = [[], []] for letter in self.oneletter: if one2two.has_key(letter): seqs[0].append(one2two[letter][0]) seqs[1].append(one2two[letter][1]) else: seqs[0].append(letter) seqs[1].append(letter) if ''.join(seqs[0]) == ''.join(seqs[1]): return( [''.join(seqs[0])] ) else: return( [''.join(seqs[0]), ''.join(seqs[0])] )
m.minimal_raw_seqs() -- Return minimal list of seqs that represent consensus
def _compute_oneletter(self): letters = [] for i in range(self.width): downcase = None if self.bits[i] < 0.25: letters.append('.') continue if self.bits[i] < 1.0: downcase = 'True' tups = [(self.ll[i][x],x) for x in ACGT if self.ll[i][x] > 0.0] if not tups: #Kludge if all values are negative (can this really happen?) tups = [(self.ll[i][x],x) for x in ACGT] tups.sort() tups.reverse() tups = [tups[0]] downcase = 'True' tups.sort() #Rank by LL tups.reverse() bases = [x[1] for x in tups[0:2]] bases.sort() if len(bases) == 2: L = two2one[''.join(bases)] else: L = bases[0] if downcase: L = L.lower() letters.append(L) self.oneletter = ''.join(letters)
m._compute_oneletter() -- [utility] Set the oneletter member variable
def _parse_seqs(self, LOS): self.nseqs = len(LOS) self.width = len(LOS[0]) for i in range(self.width): Dc = {'A': 0, 'C': 0, 'T': 0, 'G': 0, 'N': 0} for seq in LOS: key = seq[i] Dc[key] = Dc[key] + 1 del(Dc['N']) self.counts.append(Dc)
m._parse_seqs(LOS) -- [utility] Build a matrix of counts from a list of sequences
def _compute_ll(self): self.fracs = [] self.logP = [] self.ll = [] for i in range(self.width): Dll = {'A': 0, 'C': 0, 'T': 0, 'G': 0} Df = {'A': 0, 'C': 0, 'T': 0, 'G': 0} DlogP= {'A': 0, 'C': 0, 'T': 0, 'G': 0} for key in self.counts[i].keys(): #print i,key,self.counts[i][key],self.nseqs Pij = self.counts[i][key] / float(self.nseqs) Df [key] = Pij Dll[key] = (math.log( (self.counts[i][key] + self.bgscale*self.background[key] ) / ((self.nseqs + self.bgscale) * self.background[key]) ) / math.log(2)) if Pij > 0: DlogP[key] = math.log(Pij)/math.log(2) else: DlogP[key] = -100 #Near zero self.fracs.append(Df) self.logP.append (DlogP) self.ll.append (Dll) self.P = self.fracs self._compute_bits() self._compute_ambig_ll() self._maxscore()
m._compute_ll() -- [utility] Compute the log-likelihood matrix from the count matrix
def compute_from_ll(self,ll): self.ll = ll self.width = len(ll) self._compute_bg_from_ll() self._compute_logP_from_ll() self._compute_ambig_ll() self._compute_bits() self._compute_oneletter() self._maxscore()
m.compute_from_ll(ll) -- Build motif from an inputed log-likelihood matrix (This function reverse-calculates the probability matrix and background frequencies that were used to construct the log-likelihood matrix)
def _computeP(self): P = [] for i in range(self.width): #print i, _p = {} for L in ACGT: _p[L] = math.pow(2.0,self.logP[i][L]) P.append(_p) #print self.P = P
m._computeP() -- [utility] Compute the probability matrix (from the internal log-probability matrix)
def _compute_bits(self): bits = [] totbits = 0 bgbits = 0 bg = self.background UNCERT = lambda x: x*math.log(x)/math.log(2.0) for letter in ACGT: bgbits = bgbits + UNCERT(bg[letter]) for i in range(self.width): tot = 0 for letter in ACGT: Pij = pow(2.0, self.logP[i][letter]) tot = tot + UNCERT(Pij) #bit = Pij * self.ll[i][letter] #if bit > 0: # tot = tot + bit #print tot, bgbits, tot-bgbits bits.append(max(0,tot-bgbits)) totbits = totbits + max(0,tot-bgbits) self.bits = bits self.totalbits = totbits
m._compute_bits() -- [utility] Set m.totbits to the number of bits and m.bits to a list of bits at each position
def denoise(self,bitthresh=0.5): for i in range(self.width): tot = 0 for letter in ACGT: if self.logP: Pij = pow(2.0, self.logP[i][letter]) else: Pij = pow(2.0, self.ll[i][letter]) * self.background[letter] if Pij > 0.01: bit = Pij * self.ll[i][letter] tot = tot + bit if tot < bitthresh: #Zero Column for letter in ACGT: self.ll[i][letter] = 0.0 self.compute_from_ll(self.ll)
m.denoise(bitthresh=0.5) -- Set low-information positions (below bitthresh) to Ns
def giflogo(self,id,title=None,scale=0.8,info_str=''): return giflogo(self,id,title,scale)
m.giflogo(id,title=None,scale=0.8) -- (Requires seqlogo package) Make a gif sequence logo
def _compute_ambig_ll(self): for Dll in self.ll: for L in one2two.keys(): Dll[L] = max(Dll[one2two[L][0]], Dll[one2two[L][1]] ) Dll['N'] = 0.0 Dll['B'] = 0.0
m._compute_ambig_ll() -- Extend log-likelihood matrix to include ambiguity codes e.g. What the score of a 'S'? Here we use the max of C and G.
def compute_from_text(self,text,beta=0.001): prevlett = {'B':'A', 'D':'C', 'V':'T', 'H':'G'} countmat = [] text = re.sub('[\.\-]','N',text.upper()) for i in range(len(text)): D = {'A': 0, 'C': 0, 'T':0, 'G':0} letter = text[i] if letter in ['B', 'D', 'V', 'H']: #B == no "A", etc... _omit = prevlett[letter] for L in ACGT: if L != _omit: D[L] = 0.3333 elif one2two.has_key(letter): #Covers WSMYRK for L in list(one2two[letter]): D[L] = 0.5 elif letter == 'N': for L in D.keys(): D[L] = self.background[L] elif letter == '@': for L in D.keys(): D[L] = self.background[L]-(0.0001) D['A'] = D['A'] + 0.0004 else: D[letter] = 1.0 countmat.append(D) self.compute_from_counts(countmat,beta)
m.compute_from_text(,text,beta=0.001) -- Compute a matrix values from a text string of ambiguity codes. Use Motif_from_text utility instead to build motifs on the fly.
def new_bg(self,bg): counts = [] for pos in self.logP: D = {} for L,lp in pos.items(): D[L] = math.pow(2.0,lp) counts.append(D) self.background = bg self.compute_from_counts(counts,0)
m.new_bg(,bg) -- Change the ACGT background frequencies to those in the supplied dictionary. Recompute log-likelihood, etc. with new background.
def compute_from_counts(self,countmat,beta=0): self.counts = countmat self.width = len(countmat) self.bgscale = 0 maxcount = 0 #Determine Biggest column for col in countmat: tot = 0 for v in col.values(): tot = tot + v if tot > maxcount: maxcount = tot #Pad counts of remaining columns for col in countmat: tot = 0 for c in col.values(): tot = tot + c pad = maxcount - tot for L in col.keys(): col[L] = col[L] + pad * self.background[L] self.nseqs = maxcount nseqs = maxcount #Add pseudocounts if beta > 0: multfactor = {} bgprob = self.background pcounts= {} for L in bgprob.keys(): pcounts[L] = beta*bgprob[L]*nseqs for i in range(self.width): for L in countmat[i].keys(): _t = (countmat[i][L] + pcounts[L]) #Add pseudo _t = _t / (1.0 + beta) #Renomalize countmat[i][L] = _t #Build Motif self.counts = countmat self._compute_ll() self._compute_oneletter() self._maxscore()
m.compute_from_counts(,countmat,beta=0) -- Utility function to build a motif object from a matrix of letter counts.
def _compute_logP_from_ll(self): log = math.log logP = [] for i in range(self.width): D = {} for L in ACGT: ''' if ll = log(p/b) then 2^ll = p/b and ll = log(p) - log(b) so log(p) = ll + log(b)''' #Pij = pow(2.0, self.ll[i][letter]) * self.background[letter] D[L] = self.ll[i][L] + log(self.background[L])/log(2.) logP.append(D) self.logP = logP
m._compute_logP_from_ll() -- Compute self's logP matrix from the self.ll (log-likelihood)
def _print_ll(self): print "# ", for i in range(self.width): print " %4d "%i, print for L in ['A', 'C', 'T', 'G']: print "#%s "%L, for i in range(self.width): print "%8.3f "%self.ll[i][L], print
m._print_ll() -- Print log-likelihood (scoring) matrix
def _print_p(self): print "# ", for i in range(self.width): print " %4d "%i, print for L in ['A', 'C', 'T', 'G']: print "#%s "%L, for i in range(self.width): print "%8.3f "%math.pow(2,self.logP[i][L]), print
m._print_p() -- Print probability (frequency) matrix
def _print_counts(self): print "# ", for i in range(self.width): print " %4d "%i, print for L in ['A', 'C', 'T', 'G']: print "#%s "%L, for i in range(self.width): print "%8.3f "%self.counts[i][L], print
m._print_counts() -- Print count matrix
def _maxscore(self): total = 0 lowtot= 0 for lli in self.ll: total = total + max(lli.values()) lowtot= lowtot+ min(lli.values()) self.maxscore = total self.minscore = lowtot
m._maxscore() -- Sets self.maxscore and self.minscore
def _compute_threshold(self,z=2.0): scoretally = [] for seq in self.seqs: matches,endpoints,scores = self.scan(seq,-100) scoretally.append(scores[0]) ave,std = avestd(scoretally) self.threshold = ave - z *std
m._compute_threshold(z=2.0) -- For Motif objects assembled from a set of sequence, compute a self.threshold with a z-score based on the distribution of scores in among the original input sequences.
def bestscanseq(self,seq): matches,endpoints,scores = self.scan(seq,-100) t = zip(scores,matches) t.sort() bestseq = t[-1][1] bestscore = t[-1][0] return bestscore, bestseq
m.bestscanseq(seq) -- Return score,sequence of the best match to the motif in the supplied sequence
def bestscan(self,seq): matches,endpoints,scores = self.scan(seq,-100) if not scores: return -100 scores.sort() best = scores[-1] return best
m.bestscan(seq) -- Return the score of the best match to the motif in the supplied sequence
def matchstartorient(self,seq, factor=0.7): ans = [] txts,endpoints,scores = self.scan(seq,factor=factor) for txt, startstop in zip(txts,endpoints): start, stop = startstop rctxt = revcomplement(txt) orient = (self.bestscore(txt,1) >= self.bestscore(rctxt,1)) ans.append((start,orient)) return ans
m.matchstartorient(,seq, factor=0.7) -- Returns list of (start,orientation) coordinate pairs of matches to the motif in the supplied sequence. Factor is multiplied by m.maxscore to get a match threshold.
def scan(self, seq, threshold = '', factor=0.7): if len(seq) < self.width: return(self._scan_smaller(seq,threshold)) else: return(self._scan(seq,threshold,factor=factor))
m.scan(seq, threshold = '', factor=0.7) -- Scan the sequence. Returns three lists: matching sequences, endpoints, and scores. The value of 'factor' is multiplied by m.maxscore to get a match threshold if none is supplied
def scansum(self,seq,threshold = -1000): ll = self.ll sum = 0 width = self.width width_r = range(width) width_rcr = range(width-1,-1,-1) width_ranges = zip(width_r,width_rcr) seqcomp = seq.translate(revcompTBL) total = 0 hits = 0 etotal= 0 for offset in range(len(seq)-width+1): total_f = 0 total_r = 0 for i,ir in width_ranges: pos = offset+i total_f = total_f + ll[i][ seq[pos]] total_r = total_r + ll[i][seqcomp[pos]] total_max = max(total_f,total_r) if total_max >= threshold: total = total + total_max etotal = etotal + math.exp(total_max) hits = hits + 1 if not hits: ave = 0 else: ave = float(total)/float(hits) return(total,hits,ave,math.log(etotal))
m.scansum(seq,threshold = -1000) -- Sum of scores over every window in the sequence. Returns total, number of matches above threshold, average score, sum of exp(score)
def score(self, seq, fwd='Y'): matches, endpoints, scores = self._scan(seq,threshold=-100000,forw_only=fwd) return scores[0]
m.score(seq, fwd='Y') -- Returns the score of the first w-bases of the sequence, where w is the motif width.
def bestscore(self,seq, fwd=''): matches, endpoints, scores = self._scan(seq,threshold=-100000,forw_only=fwd) if scores: return max(scores) else: return -1000
m.bestscore(seq, fwd='') -- Returns the score of the best matching subsequence in seq.