id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
sequence
docstring
stringlengths
3
17.3k
docstring_tokens
sequence
sha
stringlengths
40
40
url
stringlengths
87
242
248,400
dariosky/wfcli
wfcli/tossl.py
WebfactionWebsiteToSsl.create_le_verification_app
def create_le_verification_app(self): """ Create the let's encrypt app to verify the ownership of the domain """ if self.LETSENCRYPT_VERIFY_APP_NAME in self._apps: logger.debug( "The LE verification APP already exists as %s" % self.LETSENCRYPT_VERIFY_APP_NAME ) verification_app = self._apps[self.LETSENCRYPT_VERIFY_APP_NAME] else: logger.info("Creating the identity-verification app for let's encrypt") verification_app = self.api.create_app( self.LETSENCRYPT_VERIFY_APP_NAME, 'static_php70', ) self._apps[self.LETSENCRYPT_VERIFY_APP_NAME] = verification_app # LE use the .well-known subfolder of the domain to do its verifications. # we will mount the app in the .well-known path, so we apply an hack to serve # the app-folder/.well-known on root app_root = os.path.join('~/webapps', self.LETSENCRYPT_VERIFY_APP_NAME) well_known_folder = os.path.join(app_root, '.well-known') if not is_link(well_known_folder): logger.info("Preparing static app for the verification") run('ln -s {app_root} {well_known_folder}'.format(**locals())) return verification_app
python
def create_le_verification_app(self): """ Create the let's encrypt app to verify the ownership of the domain """ if self.LETSENCRYPT_VERIFY_APP_NAME in self._apps: logger.debug( "The LE verification APP already exists as %s" % self.LETSENCRYPT_VERIFY_APP_NAME ) verification_app = self._apps[self.LETSENCRYPT_VERIFY_APP_NAME] else: logger.info("Creating the identity-verification app for let's encrypt") verification_app = self.api.create_app( self.LETSENCRYPT_VERIFY_APP_NAME, 'static_php70', ) self._apps[self.LETSENCRYPT_VERIFY_APP_NAME] = verification_app # LE use the .well-known subfolder of the domain to do its verifications. # we will mount the app in the .well-known path, so we apply an hack to serve # the app-folder/.well-known on root app_root = os.path.join('~/webapps', self.LETSENCRYPT_VERIFY_APP_NAME) well_known_folder = os.path.join(app_root, '.well-known') if not is_link(well_known_folder): logger.info("Preparing static app for the verification") run('ln -s {app_root} {well_known_folder}'.format(**locals())) return verification_app
[ "def", "create_le_verification_app", "(", "self", ")", ":", "if", "self", ".", "LETSENCRYPT_VERIFY_APP_NAME", "in", "self", ".", "_apps", ":", "logger", ".", "debug", "(", "\"The LE verification APP already exists as %s\"", "%", "self", ".", "LETSENCRYPT_VERIFY_APP_NAME", ")", "verification_app", "=", "self", ".", "_apps", "[", "self", ".", "LETSENCRYPT_VERIFY_APP_NAME", "]", "else", ":", "logger", ".", "info", "(", "\"Creating the identity-verification app for let's encrypt\"", ")", "verification_app", "=", "self", ".", "api", ".", "create_app", "(", "self", ".", "LETSENCRYPT_VERIFY_APP_NAME", ",", "'static_php70'", ",", ")", "self", ".", "_apps", "[", "self", ".", "LETSENCRYPT_VERIFY_APP_NAME", "]", "=", "verification_app", "# LE use the .well-known subfolder of the domain to do its verifications.", "# we will mount the app in the .well-known path, so we apply an hack to serve", "# the app-folder/.well-known on root", "app_root", "=", "os", ".", "path", ".", "join", "(", "'~/webapps'", ",", "self", ".", "LETSENCRYPT_VERIFY_APP_NAME", ")", "well_known_folder", "=", "os", ".", "path", ".", "join", "(", "app_root", ",", "'.well-known'", ")", "if", "not", "is_link", "(", "well_known_folder", ")", ":", "logger", ".", "info", "(", "\"Preparing static app for the verification\"", ")", "run", "(", "'ln -s {app_root} {well_known_folder}'", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ")", "return", "verification_app" ]
Create the let's encrypt app to verify the ownership of the domain
[ "Create", "the", "let", "s", "encrypt", "app", "to", "verify", "the", "ownership", "of", "the", "domain" ]
87a9ed30dbd456f801135a55099f0541b0614ccb
https://github.com/dariosky/wfcli/blob/87a9ed30dbd456f801135a55099f0541b0614ccb/wfcli/tossl.py#L239-L263
248,401
dariosky/wfcli
wfcli/tossl.py
WebfactionWebsiteToSsl.sync_certificates
def sync_certificates(self, subdomains=None): """ Check all certificates available in acme in the host and sync them with the webfaction certificates """ result = run(".acme.sh/acme.sh --list", quiet=True) logger.info("Syncing Webfaction certificates") for acme_certificate_description in result.split('\n')[1:]: main_domain = acme_certificate_description.split()[0] if subdomains and main_domain not in subdomains: continue if exists(os.path.join("~/.acme.sh/", main_domain)): certificate_cer = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, main_domain + ".cer") ) certificate_key = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, main_domain + ".key") ) certificate_ca = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, "ca.cer") ) certificate_name = self.slugify(main_domain) certificate = self._certificates.get(certificate_name) if (certificate is None or certificate['certificate'] != certificate_cer or certificate['private_key'] != certificate_key or certificate['intermediates'] != certificate_ca): new_certificate = dict( name=certificate_name, certificate=certificate_cer, private_key=certificate_key, intermediates=certificate_ca, ) if certificate is None: logger.info("Creating new certificate for %s" % main_domain) self.api.create_certificate(new_certificate) else: logger.info("Updating certificate for %s" % main_domain) self.api.update_certificate(new_certificate) self._certificates[certificate_name] = new_certificate
python
def sync_certificates(self, subdomains=None): """ Check all certificates available in acme in the host and sync them with the webfaction certificates """ result = run(".acme.sh/acme.sh --list", quiet=True) logger.info("Syncing Webfaction certificates") for acme_certificate_description in result.split('\n')[1:]: main_domain = acme_certificate_description.split()[0] if subdomains and main_domain not in subdomains: continue if exists(os.path.join("~/.acme.sh/", main_domain)): certificate_cer = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, main_domain + ".cer") ) certificate_key = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, main_domain + ".key") ) certificate_ca = self.get_remote_content( os.path.join("~/.acme.sh/", main_domain, "ca.cer") ) certificate_name = self.slugify(main_domain) certificate = self._certificates.get(certificate_name) if (certificate is None or certificate['certificate'] != certificate_cer or certificate['private_key'] != certificate_key or certificate['intermediates'] != certificate_ca): new_certificate = dict( name=certificate_name, certificate=certificate_cer, private_key=certificate_key, intermediates=certificate_ca, ) if certificate is None: logger.info("Creating new certificate for %s" % main_domain) self.api.create_certificate(new_certificate) else: logger.info("Updating certificate for %s" % main_domain) self.api.update_certificate(new_certificate) self._certificates[certificate_name] = new_certificate
[ "def", "sync_certificates", "(", "self", ",", "subdomains", "=", "None", ")", ":", "result", "=", "run", "(", "\".acme.sh/acme.sh --list\"", ",", "quiet", "=", "True", ")", "logger", ".", "info", "(", "\"Syncing Webfaction certificates\"", ")", "for", "acme_certificate_description", "in", "result", ".", "split", "(", "'\\n'", ")", "[", "1", ":", "]", ":", "main_domain", "=", "acme_certificate_description", ".", "split", "(", ")", "[", "0", "]", "if", "subdomains", "and", "main_domain", "not", "in", "subdomains", ":", "continue", "if", "exists", "(", "os", ".", "path", ".", "join", "(", "\"~/.acme.sh/\"", ",", "main_domain", ")", ")", ":", "certificate_cer", "=", "self", ".", "get_remote_content", "(", "os", ".", "path", ".", "join", "(", "\"~/.acme.sh/\"", ",", "main_domain", ",", "main_domain", "+", "\".cer\"", ")", ")", "certificate_key", "=", "self", ".", "get_remote_content", "(", "os", ".", "path", ".", "join", "(", "\"~/.acme.sh/\"", ",", "main_domain", ",", "main_domain", "+", "\".key\"", ")", ")", "certificate_ca", "=", "self", ".", "get_remote_content", "(", "os", ".", "path", ".", "join", "(", "\"~/.acme.sh/\"", ",", "main_domain", ",", "\"ca.cer\"", ")", ")", "certificate_name", "=", "self", ".", "slugify", "(", "main_domain", ")", "certificate", "=", "self", ".", "_certificates", ".", "get", "(", "certificate_name", ")", "if", "(", "certificate", "is", "None", "or", "certificate", "[", "'certificate'", "]", "!=", "certificate_cer", "or", "certificate", "[", "'private_key'", "]", "!=", "certificate_key", "or", "certificate", "[", "'intermediates'", "]", "!=", "certificate_ca", ")", ":", "new_certificate", "=", "dict", "(", "name", "=", "certificate_name", ",", "certificate", "=", "certificate_cer", ",", "private_key", "=", "certificate_key", ",", "intermediates", "=", "certificate_ca", ",", ")", "if", "certificate", "is", "None", ":", "logger", ".", "info", "(", "\"Creating new certificate for %s\"", "%", "main_domain", ")", "self", ".", "api", ".", "create_certificate", "(", "new_certificate", ")", "else", ":", "logger", ".", "info", "(", "\"Updating certificate for %s\"", "%", "main_domain", ")", "self", ".", "api", ".", "update_certificate", "(", "new_certificate", ")", "self", ".", "_certificates", "[", "certificate_name", "]", "=", "new_certificate" ]
Check all certificates available in acme in the host and sync them with the webfaction certificates
[ "Check", "all", "certificates", "available", "in", "acme", "in", "the", "host", "and", "sync", "them", "with", "the", "webfaction", "certificates" ]
87a9ed30dbd456f801135a55099f0541b0614ccb
https://github.com/dariosky/wfcli/blob/87a9ed30dbd456f801135a55099f0541b0614ccb/wfcli/tossl.py#L323-L362
248,402
dariosky/wfcli
wfcli/tossl.py
WebfactionWebsiteToSsl.get_remote_content
def get_remote_content(filepath): """ A handy wrapper to get a remote file content """ with hide('running'): temp = BytesIO() get(filepath, temp) content = temp.getvalue().decode('utf-8') return content.strip()
python
def get_remote_content(filepath): """ A handy wrapper to get a remote file content """ with hide('running'): temp = BytesIO() get(filepath, temp) content = temp.getvalue().decode('utf-8') return content.strip()
[ "def", "get_remote_content", "(", "filepath", ")", ":", "with", "hide", "(", "'running'", ")", ":", "temp", "=", "BytesIO", "(", ")", "get", "(", "filepath", ",", "temp", ")", "content", "=", "temp", ".", "getvalue", "(", ")", ".", "decode", "(", "'utf-8'", ")", "return", "content", ".", "strip", "(", ")" ]
A handy wrapper to get a remote file content
[ "A", "handy", "wrapper", "to", "get", "a", "remote", "file", "content" ]
87a9ed30dbd456f801135a55099f0541b0614ccb
https://github.com/dariosky/wfcli/blob/87a9ed30dbd456f801135a55099f0541b0614ccb/wfcli/tossl.py#L365-L371
248,403
dariosky/wfcli
wfcli/tossl.py
WebfactionWebsiteToSsl.get_main_domain
def get_main_domain(self, website): """ Given a list of subdomains, return the main domain of them If the subdomain are across multiple domain, then we cannot have a single website it should be splitted """ subdomains = website['subdomains'] main_domains = set() for sub in subdomains: for d in self._domains: if sub == d or sub.endswith("." + d): main_domains.add(d) if len(main_domains) > 1: logger.error( "The secure site %s cover multiple domains, it should be splitted" % website['name'] ) exit(1) elif not main_domains: logger.error("We cannot find the main domain for %s" % website['name']) return list(main_domains)[0]
python
def get_main_domain(self, website): """ Given a list of subdomains, return the main domain of them If the subdomain are across multiple domain, then we cannot have a single website it should be splitted """ subdomains = website['subdomains'] main_domains = set() for sub in subdomains: for d in self._domains: if sub == d or sub.endswith("." + d): main_domains.add(d) if len(main_domains) > 1: logger.error( "The secure site %s cover multiple domains, it should be splitted" % website['name'] ) exit(1) elif not main_domains: logger.error("We cannot find the main domain for %s" % website['name']) return list(main_domains)[0]
[ "def", "get_main_domain", "(", "self", ",", "website", ")", ":", "subdomains", "=", "website", "[", "'subdomains'", "]", "main_domains", "=", "set", "(", ")", "for", "sub", "in", "subdomains", ":", "for", "d", "in", "self", ".", "_domains", ":", "if", "sub", "==", "d", "or", "sub", ".", "endswith", "(", "\".\"", "+", "d", ")", ":", "main_domains", ".", "add", "(", "d", ")", "if", "len", "(", "main_domains", ")", ">", "1", ":", "logger", ".", "error", "(", "\"The secure site %s cover multiple domains, it should be splitted\"", "%", "website", "[", "'name'", "]", ")", "exit", "(", "1", ")", "elif", "not", "main_domains", ":", "logger", ".", "error", "(", "\"We cannot find the main domain for %s\"", "%", "website", "[", "'name'", "]", ")", "return", "list", "(", "main_domains", ")", "[", "0", "]" ]
Given a list of subdomains, return the main domain of them If the subdomain are across multiple domain, then we cannot have a single website it should be splitted
[ "Given", "a", "list", "of", "subdomains", "return", "the", "main", "domain", "of", "them", "If", "the", "subdomain", "are", "across", "multiple", "domain", "then", "we", "cannot", "have", "a", "single", "website", "it", "should", "be", "splitted" ]
87a9ed30dbd456f801135a55099f0541b0614ccb
https://github.com/dariosky/wfcli/blob/87a9ed30dbd456f801135a55099f0541b0614ccb/wfcli/tossl.py#L404-L422
248,404
ryanjdillon/pyotelem
pyotelem/glides.py
get_stroke_freq
def get_stroke_freq(Ax, Az, fs_a, nperseg, peak_thresh, stroke_ratio=None): '''Determine stroke frequency to use as a cutoff for filtering Args ---- Ax: numpy.ndarray, shape (n,) x-axis accelermeter data (longitudinal) Ay: numpy.ndarray, shape (n,) x-axis accelermeter data (lateral) Az: numpy.ndarray, shape (n,) z-axis accelermeter data (dorso-ventral) fs_a: float sampling frequency (i.e. number of samples per second) nperseg: int length of each segment (i.e. number of samples per frq. band in PSD calculation. Default to 512 (scipy.signal.welch() default is 256) peak_thresh: float PSD power level threshold. Only peaks over this threshold are returned. Returns ------- cutoff_frq: float cutoff frequency of signal (Hz) to be used for low/high-pass filtering stroke_frq: float frequency of dominant wavelength in signal stroke_ratio: float Notes ----- During all descents and ascents phases where mainly steady swimming occurs. When calculated for the whole dive it may be difficult to differentiate the peak at which stroking rate occurs as there are movements than only steady swimming Here the power spectra is calculated of the longitudinal and dorso-ventral accelerometer signals during descents and ascents to determine the dominant stroke frequency for each animal in each phase This numpy samples per f segment 512 and a sampling rate of fs_a. Output: S is the amount of power in each particular frequency (f) ''' import numpy from . import dsp from . import utils from .plots import plotdsp # Axes to be used for determining `stroke_frq` stroke_axes = [(0,'x','dorsa-ventral', Ax), (2,'z','lateral', Az)] # Lists for appending values from each axis cutoff_frqs = list() stroke_frqs = list() stroke_ratios = list() # Iterate over axes in `stroke_axes` list appending output to above lists for i, i_alph, name, data in stroke_axes: frqs, S, _, _ = dsp.calc_PSD_welch(data, fs_a, nperseg) # Find index positions of local maxima and minima in PSD delta = S.max()/1000 max_ind, min_ind = dsp.simple_peakfinder(range(len(S)), S, delta) max0 = max_ind[0] # TODO hack fix, improve later try: min0 = min_ind[0] except: min0 = None stroke_ratio = 0.4 stroke_frq = frqs[max0] # Prompt user for `cutoff_frq` value after inspecting PSD plot title = 'PSD - {} axis (n={}), {}'.format(i_alph, i, name) # Plot power spectrum against frequency distribution plotdsp.plot_welch_peaks(frqs, S, max_ind, title=title) # Get user input of cutoff frequency identified off plots cutoff_frq = utils.recursive_input('cutoff frequency', float) # Append values for axis to list cutoff_frqs.append(cutoff_frq) stroke_frqs.append(stroke_frq) stroke_ratios.append(stroke_ratio) # Average values for all axes cutoff_frq = float(numpy.mean(cutoff_frqs)) stroke_frq = float(numpy.mean(stroke_frqs)) # Handle exception of manual selection when `stroke_ratio == None` # TODO with fix try: stroke_ratio = float(numpy.mean(stroke_ratios)) except: stroke_ratio = None return cutoff_frq, stroke_frq, stroke_ratio
python
def get_stroke_freq(Ax, Az, fs_a, nperseg, peak_thresh, stroke_ratio=None): '''Determine stroke frequency to use as a cutoff for filtering Args ---- Ax: numpy.ndarray, shape (n,) x-axis accelermeter data (longitudinal) Ay: numpy.ndarray, shape (n,) x-axis accelermeter data (lateral) Az: numpy.ndarray, shape (n,) z-axis accelermeter data (dorso-ventral) fs_a: float sampling frequency (i.e. number of samples per second) nperseg: int length of each segment (i.e. number of samples per frq. band in PSD calculation. Default to 512 (scipy.signal.welch() default is 256) peak_thresh: float PSD power level threshold. Only peaks over this threshold are returned. Returns ------- cutoff_frq: float cutoff frequency of signal (Hz) to be used for low/high-pass filtering stroke_frq: float frequency of dominant wavelength in signal stroke_ratio: float Notes ----- During all descents and ascents phases where mainly steady swimming occurs. When calculated for the whole dive it may be difficult to differentiate the peak at which stroking rate occurs as there are movements than only steady swimming Here the power spectra is calculated of the longitudinal and dorso-ventral accelerometer signals during descents and ascents to determine the dominant stroke frequency for each animal in each phase This numpy samples per f segment 512 and a sampling rate of fs_a. Output: S is the amount of power in each particular frequency (f) ''' import numpy from . import dsp from . import utils from .plots import plotdsp # Axes to be used for determining `stroke_frq` stroke_axes = [(0,'x','dorsa-ventral', Ax), (2,'z','lateral', Az)] # Lists for appending values from each axis cutoff_frqs = list() stroke_frqs = list() stroke_ratios = list() # Iterate over axes in `stroke_axes` list appending output to above lists for i, i_alph, name, data in stroke_axes: frqs, S, _, _ = dsp.calc_PSD_welch(data, fs_a, nperseg) # Find index positions of local maxima and minima in PSD delta = S.max()/1000 max_ind, min_ind = dsp.simple_peakfinder(range(len(S)), S, delta) max0 = max_ind[0] # TODO hack fix, improve later try: min0 = min_ind[0] except: min0 = None stroke_ratio = 0.4 stroke_frq = frqs[max0] # Prompt user for `cutoff_frq` value after inspecting PSD plot title = 'PSD - {} axis (n={}), {}'.format(i_alph, i, name) # Plot power spectrum against frequency distribution plotdsp.plot_welch_peaks(frqs, S, max_ind, title=title) # Get user input of cutoff frequency identified off plots cutoff_frq = utils.recursive_input('cutoff frequency', float) # Append values for axis to list cutoff_frqs.append(cutoff_frq) stroke_frqs.append(stroke_frq) stroke_ratios.append(stroke_ratio) # Average values for all axes cutoff_frq = float(numpy.mean(cutoff_frqs)) stroke_frq = float(numpy.mean(stroke_frqs)) # Handle exception of manual selection when `stroke_ratio == None` # TODO with fix try: stroke_ratio = float(numpy.mean(stroke_ratios)) except: stroke_ratio = None return cutoff_frq, stroke_frq, stroke_ratio
[ "def", "get_stroke_freq", "(", "Ax", ",", "Az", ",", "fs_a", ",", "nperseg", ",", "peak_thresh", ",", "stroke_ratio", "=", "None", ")", ":", "import", "numpy", "from", ".", "import", "dsp", "from", ".", "import", "utils", "from", ".", "plots", "import", "plotdsp", "# Axes to be used for determining `stroke_frq`", "stroke_axes", "=", "[", "(", "0", ",", "'x'", ",", "'dorsa-ventral'", ",", "Ax", ")", ",", "(", "2", ",", "'z'", ",", "'lateral'", ",", "Az", ")", "]", "# Lists for appending values from each axis", "cutoff_frqs", "=", "list", "(", ")", "stroke_frqs", "=", "list", "(", ")", "stroke_ratios", "=", "list", "(", ")", "# Iterate over axes in `stroke_axes` list appending output to above lists", "for", "i", ",", "i_alph", ",", "name", ",", "data", "in", "stroke_axes", ":", "frqs", ",", "S", ",", "_", ",", "_", "=", "dsp", ".", "calc_PSD_welch", "(", "data", ",", "fs_a", ",", "nperseg", ")", "# Find index positions of local maxima and minima in PSD", "delta", "=", "S", ".", "max", "(", ")", "/", "1000", "max_ind", ",", "min_ind", "=", "dsp", ".", "simple_peakfinder", "(", "range", "(", "len", "(", "S", ")", ")", ",", "S", ",", "delta", ")", "max0", "=", "max_ind", "[", "0", "]", "# TODO hack fix, improve later", "try", ":", "min0", "=", "min_ind", "[", "0", "]", "except", ":", "min0", "=", "None", "stroke_ratio", "=", "0.4", "stroke_frq", "=", "frqs", "[", "max0", "]", "# Prompt user for `cutoff_frq` value after inspecting PSD plot", "title", "=", "'PSD - {} axis (n={}), {}'", ".", "format", "(", "i_alph", ",", "i", ",", "name", ")", "# Plot power spectrum against frequency distribution", "plotdsp", ".", "plot_welch_peaks", "(", "frqs", ",", "S", ",", "max_ind", ",", "title", "=", "title", ")", "# Get user input of cutoff frequency identified off plots", "cutoff_frq", "=", "utils", ".", "recursive_input", "(", "'cutoff frequency'", ",", "float", ")", "# Append values for axis to list", "cutoff_frqs", ".", "append", "(", "cutoff_frq", ")", "stroke_frqs", ".", "append", "(", "stroke_frq", ")", "stroke_ratios", ".", "append", "(", "stroke_ratio", ")", "# Average values for all axes", "cutoff_frq", "=", "float", "(", "numpy", ".", "mean", "(", "cutoff_frqs", ")", ")", "stroke_frq", "=", "float", "(", "numpy", ".", "mean", "(", "stroke_frqs", ")", ")", "# Handle exception of manual selection when `stroke_ratio == None`", "# TODO with fix", "try", ":", "stroke_ratio", "=", "float", "(", "numpy", ".", "mean", "(", "stroke_ratios", ")", ")", "except", ":", "stroke_ratio", "=", "None", "return", "cutoff_frq", ",", "stroke_frq", ",", "stroke_ratio" ]
Determine stroke frequency to use as a cutoff for filtering Args ---- Ax: numpy.ndarray, shape (n,) x-axis accelermeter data (longitudinal) Ay: numpy.ndarray, shape (n,) x-axis accelermeter data (lateral) Az: numpy.ndarray, shape (n,) z-axis accelermeter data (dorso-ventral) fs_a: float sampling frequency (i.e. number of samples per second) nperseg: int length of each segment (i.e. number of samples per frq. band in PSD calculation. Default to 512 (scipy.signal.welch() default is 256) peak_thresh: float PSD power level threshold. Only peaks over this threshold are returned. Returns ------- cutoff_frq: float cutoff frequency of signal (Hz) to be used for low/high-pass filtering stroke_frq: float frequency of dominant wavelength in signal stroke_ratio: float Notes ----- During all descents and ascents phases where mainly steady swimming occurs. When calculated for the whole dive it may be difficult to differentiate the peak at which stroking rate occurs as there are movements than only steady swimming Here the power spectra is calculated of the longitudinal and dorso-ventral accelerometer signals during descents and ascents to determine the dominant stroke frequency for each animal in each phase This numpy samples per f segment 512 and a sampling rate of fs_a. Output: S is the amount of power in each particular frequency (f)
[ "Determine", "stroke", "frequency", "to", "use", "as", "a", "cutoff", "for", "filtering" ]
816563a9c3feb3fa416f1c2921c6b75db34111ad
https://github.com/ryanjdillon/pyotelem/blob/816563a9c3feb3fa416f1c2921c6b75db34111ad/pyotelem/glides.py#L2-L104
248,405
ryanjdillon/pyotelem
pyotelem/glides.py
get_stroke_glide_indices
def get_stroke_glide_indices(A_g_hf, fs_a, J, t_max): '''Get stroke and glide indices from high-pass accelerometer data Args ---- A_g_hf: 1-D ndarray Animal frame triaxial accelerometer matrix at sampling rate fs_a. fs_a: int Number of accelerometer samples per second J: float Frequency threshold for detecting a fluke stroke in m/s^2. If J is not given, fluke strokes will not be located but the rotations signal (pry) will be computed. t_max: int Maximum duration allowable for a fluke stroke in seconds. A fluke stroke is counted whenever there is a cyclic variation in the pitch deviation with peak-to-peak magnitude greater than +/-J and consistent with a fluke stroke duration of less than t_max seconds, e.g., for Mesoplodon choose t_max=4. Returns ------- GL: 1-D ndarray Matrix containing the start time (first column) and end time (2nd column) of any glides (i.e., no zero crossings in t_max or more seconds). Times are in seconds. Note ---- If no J or t_max is given, J=[], or t_max=[], GL returned as None ''' import numpy from . import dsp # Check if input array is 1-D if A_g_hf.ndim > 1: raise IndexError('A_g_hf multidimensional: Glide index determination ' 'requires 1-D acceleration array as input') # Convert t_max to number of samples n_max = t_max * fs_a # Find zero-crossing start/stops in pry(:,n), rotations around n axis. zc = dsp.findzc(A_g_hf, J, n_max/2) # find glides - any interval between zeros crossings greater than `t_max` ind = numpy.where(zc[1:, 0] - zc[0:-1, 1] > n_max)[0] gl_ind = numpy.vstack([zc[ind, 0] - 1, zc[ind + 1, 1] + 1]).T # Compute mean index position of glide, Only include sections with jerk < J gl_mean_idx = numpy.round(numpy.mean(gl_ind, 1)).astype(int) gl_ind = numpy.round(gl_ind).astype(int) for i in range(len(gl_mean_idx)): col = range(gl_mean_idx[i], gl_ind[i, 0], - 1) test = numpy.where(numpy.isnan(A_g_hf[col]))[0] if test.size != 0: gl_mean_idx[i] = numpy.nan gl_ind[i,0] = numpy.nan gl_ind[i,1] = numpy.nan else: over_J1 = numpy.where(abs(A_g_hf[col]) >= J)[0][0] gl_ind[i,0] = gl_mean_idx[i] - over_J1 + 1 col = range(gl_mean_idx[i], gl_ind[i, 1]) over_J2 = numpy.where(abs(A_g_hf[col]) >= J)[0][0] gl_ind[i,1] = gl_mean_idx[i] + over_J2 - 1 GL = gl_ind GL = GL[numpy.where(GL[:, 1] - GL[:, 0] > n_max / 2)[0], :] return GL
python
def get_stroke_glide_indices(A_g_hf, fs_a, J, t_max): '''Get stroke and glide indices from high-pass accelerometer data Args ---- A_g_hf: 1-D ndarray Animal frame triaxial accelerometer matrix at sampling rate fs_a. fs_a: int Number of accelerometer samples per second J: float Frequency threshold for detecting a fluke stroke in m/s^2. If J is not given, fluke strokes will not be located but the rotations signal (pry) will be computed. t_max: int Maximum duration allowable for a fluke stroke in seconds. A fluke stroke is counted whenever there is a cyclic variation in the pitch deviation with peak-to-peak magnitude greater than +/-J and consistent with a fluke stroke duration of less than t_max seconds, e.g., for Mesoplodon choose t_max=4. Returns ------- GL: 1-D ndarray Matrix containing the start time (first column) and end time (2nd column) of any glides (i.e., no zero crossings in t_max or more seconds). Times are in seconds. Note ---- If no J or t_max is given, J=[], or t_max=[], GL returned as None ''' import numpy from . import dsp # Check if input array is 1-D if A_g_hf.ndim > 1: raise IndexError('A_g_hf multidimensional: Glide index determination ' 'requires 1-D acceleration array as input') # Convert t_max to number of samples n_max = t_max * fs_a # Find zero-crossing start/stops in pry(:,n), rotations around n axis. zc = dsp.findzc(A_g_hf, J, n_max/2) # find glides - any interval between zeros crossings greater than `t_max` ind = numpy.where(zc[1:, 0] - zc[0:-1, 1] > n_max)[0] gl_ind = numpy.vstack([zc[ind, 0] - 1, zc[ind + 1, 1] + 1]).T # Compute mean index position of glide, Only include sections with jerk < J gl_mean_idx = numpy.round(numpy.mean(gl_ind, 1)).astype(int) gl_ind = numpy.round(gl_ind).astype(int) for i in range(len(gl_mean_idx)): col = range(gl_mean_idx[i], gl_ind[i, 0], - 1) test = numpy.where(numpy.isnan(A_g_hf[col]))[0] if test.size != 0: gl_mean_idx[i] = numpy.nan gl_ind[i,0] = numpy.nan gl_ind[i,1] = numpy.nan else: over_J1 = numpy.where(abs(A_g_hf[col]) >= J)[0][0] gl_ind[i,0] = gl_mean_idx[i] - over_J1 + 1 col = range(gl_mean_idx[i], gl_ind[i, 1]) over_J2 = numpy.where(abs(A_g_hf[col]) >= J)[0][0] gl_ind[i,1] = gl_mean_idx[i] + over_J2 - 1 GL = gl_ind GL = GL[numpy.where(GL[:, 1] - GL[:, 0] > n_max / 2)[0], :] return GL
[ "def", "get_stroke_glide_indices", "(", "A_g_hf", ",", "fs_a", ",", "J", ",", "t_max", ")", ":", "import", "numpy", "from", ".", "import", "dsp", "# Check if input array is 1-D", "if", "A_g_hf", ".", "ndim", ">", "1", ":", "raise", "IndexError", "(", "'A_g_hf multidimensional: Glide index determination '", "'requires 1-D acceleration array as input'", ")", "# Convert t_max to number of samples", "n_max", "=", "t_max", "*", "fs_a", "# Find zero-crossing start/stops in pry(:,n), rotations around n axis.", "zc", "=", "dsp", ".", "findzc", "(", "A_g_hf", ",", "J", ",", "n_max", "/", "2", ")", "# find glides - any interval between zeros crossings greater than `t_max`", "ind", "=", "numpy", ".", "where", "(", "zc", "[", "1", ":", ",", "0", "]", "-", "zc", "[", "0", ":", "-", "1", ",", "1", "]", ">", "n_max", ")", "[", "0", "]", "gl_ind", "=", "numpy", ".", "vstack", "(", "[", "zc", "[", "ind", ",", "0", "]", "-", "1", ",", "zc", "[", "ind", "+", "1", ",", "1", "]", "+", "1", "]", ")", ".", "T", "# Compute mean index position of glide, Only include sections with jerk < J", "gl_mean_idx", "=", "numpy", ".", "round", "(", "numpy", ".", "mean", "(", "gl_ind", ",", "1", ")", ")", ".", "astype", "(", "int", ")", "gl_ind", "=", "numpy", ".", "round", "(", "gl_ind", ")", ".", "astype", "(", "int", ")", "for", "i", "in", "range", "(", "len", "(", "gl_mean_idx", ")", ")", ":", "col", "=", "range", "(", "gl_mean_idx", "[", "i", "]", ",", "gl_ind", "[", "i", ",", "0", "]", ",", "-", "1", ")", "test", "=", "numpy", ".", "where", "(", "numpy", ".", "isnan", "(", "A_g_hf", "[", "col", "]", ")", ")", "[", "0", "]", "if", "test", ".", "size", "!=", "0", ":", "gl_mean_idx", "[", "i", "]", "=", "numpy", ".", "nan", "gl_ind", "[", "i", ",", "0", "]", "=", "numpy", ".", "nan", "gl_ind", "[", "i", ",", "1", "]", "=", "numpy", ".", "nan", "else", ":", "over_J1", "=", "numpy", ".", "where", "(", "abs", "(", "A_g_hf", "[", "col", "]", ")", ">=", "J", ")", "[", "0", "]", "[", "0", "]", "gl_ind", "[", "i", ",", "0", "]", "=", "gl_mean_idx", "[", "i", "]", "-", "over_J1", "+", "1", "col", "=", "range", "(", "gl_mean_idx", "[", "i", "]", ",", "gl_ind", "[", "i", ",", "1", "]", ")", "over_J2", "=", "numpy", ".", "where", "(", "abs", "(", "A_g_hf", "[", "col", "]", ")", ">=", "J", ")", "[", "0", "]", "[", "0", "]", "gl_ind", "[", "i", ",", "1", "]", "=", "gl_mean_idx", "[", "i", "]", "+", "over_J2", "-", "1", "GL", "=", "gl_ind", "GL", "=", "GL", "[", "numpy", ".", "where", "(", "GL", "[", ":", ",", "1", "]", "-", "GL", "[", ":", ",", "0", "]", ">", "n_max", "/", "2", ")", "[", "0", "]", ",", ":", "]", "return", "GL" ]
Get stroke and glide indices from high-pass accelerometer data Args ---- A_g_hf: 1-D ndarray Animal frame triaxial accelerometer matrix at sampling rate fs_a. fs_a: int Number of accelerometer samples per second J: float Frequency threshold for detecting a fluke stroke in m/s^2. If J is not given, fluke strokes will not be located but the rotations signal (pry) will be computed. t_max: int Maximum duration allowable for a fluke stroke in seconds. A fluke stroke is counted whenever there is a cyclic variation in the pitch deviation with peak-to-peak magnitude greater than +/-J and consistent with a fluke stroke duration of less than t_max seconds, e.g., for Mesoplodon choose t_max=4. Returns ------- GL: 1-D ndarray Matrix containing the start time (first column) and end time (2nd column) of any glides (i.e., no zero crossings in t_max or more seconds). Times are in seconds. Note ---- If no J or t_max is given, J=[], or t_max=[], GL returned as None
[ "Get", "stroke", "and", "glide", "indices", "from", "high", "-", "pass", "accelerometer", "data" ]
816563a9c3feb3fa416f1c2921c6b75db34111ad
https://github.com/ryanjdillon/pyotelem/blob/816563a9c3feb3fa416f1c2921c6b75db34111ad/pyotelem/glides.py#L107-L185
248,406
mikejarrett/pipcheck
pipcheck/checker.py
Checker.get_updates
def get_updates( self, display_all_distributions=False, verbose=False ): # pragma: no cover """ When called, get the environment updates and write updates to a CSV file and if a new config has been provided, write a new configuration file. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. verbose (bool): If ``True``, log to terminal to terminal. """ if verbose: logging.basicConfig( stream=sys.stdout, level=logging.INFO, format='%(message)s', ) logging.info('Checking installed packages for updates...') updates = self._get_environment_updates( display_all_distributions=display_all_distributions ) if updates: for update in updates: logging.info(update) if updates and self._csv_file_name: self.write_updates_to_csv(updates) if updates and self._new_config: self.write_new_config(updates) return updates
python
def get_updates( self, display_all_distributions=False, verbose=False ): # pragma: no cover """ When called, get the environment updates and write updates to a CSV file and if a new config has been provided, write a new configuration file. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. verbose (bool): If ``True``, log to terminal to terminal. """ if verbose: logging.basicConfig( stream=sys.stdout, level=logging.INFO, format='%(message)s', ) logging.info('Checking installed packages for updates...') updates = self._get_environment_updates( display_all_distributions=display_all_distributions ) if updates: for update in updates: logging.info(update) if updates and self._csv_file_name: self.write_updates_to_csv(updates) if updates and self._new_config: self.write_new_config(updates) return updates
[ "def", "get_updates", "(", "self", ",", "display_all_distributions", "=", "False", ",", "verbose", "=", "False", ")", ":", "# pragma: no cover", "if", "verbose", ":", "logging", ".", "basicConfig", "(", "stream", "=", "sys", ".", "stdout", ",", "level", "=", "logging", ".", "INFO", ",", "format", "=", "'%(message)s'", ",", ")", "logging", ".", "info", "(", "'Checking installed packages for updates...'", ")", "updates", "=", "self", ".", "_get_environment_updates", "(", "display_all_distributions", "=", "display_all_distributions", ")", "if", "updates", ":", "for", "update", "in", "updates", ":", "logging", ".", "info", "(", "update", ")", "if", "updates", "and", "self", ".", "_csv_file_name", ":", "self", ".", "write_updates_to_csv", "(", "updates", ")", "if", "updates", "and", "self", ".", "_new_config", ":", "self", ".", "write_new_config", "(", "updates", ")", "return", "updates" ]
When called, get the environment updates and write updates to a CSV file and if a new config has been provided, write a new configuration file. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. verbose (bool): If ``True``, log to terminal to terminal.
[ "When", "called", "get", "the", "environment", "updates", "and", "write", "updates", "to", "a", "CSV", "file", "and", "if", "a", "new", "config", "has", "been", "provided", "write", "a", "new", "configuration", "file", "." ]
2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad
https://github.com/mikejarrett/pipcheck/blob/2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad/pipcheck/checker.py#L34-L71
248,407
mikejarrett/pipcheck
pipcheck/checker.py
Checker.csv_writer
def csv_writer(csvfile): """ Get a CSV writer for the version of python that is being run. """ if sys.version_info >= (3,): writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') else: writer = csv.writer(csvfile, delimiter=b',', lineterminator='\n') return writer
python
def csv_writer(csvfile): """ Get a CSV writer for the version of python that is being run. """ if sys.version_info >= (3,): writer = csv.writer(csvfile, delimiter=',', lineterminator='\n') else: writer = csv.writer(csvfile, delimiter=b',', lineterminator='\n') return writer
[ "def", "csv_writer", "(", "csvfile", ")", ":", "if", "sys", ".", "version_info", ">=", "(", "3", ",", ")", ":", "writer", "=", "csv", ".", "writer", "(", "csvfile", ",", "delimiter", "=", "','", ",", "lineterminator", "=", "'\\n'", ")", "else", ":", "writer", "=", "csv", ".", "writer", "(", "csvfile", ",", "delimiter", "=", "b','", ",", "lineterminator", "=", "'\\n'", ")", "return", "writer" ]
Get a CSV writer for the version of python that is being run.
[ "Get", "a", "CSV", "writer", "for", "the", "version", "of", "python", "that", "is", "being", "run", "." ]
2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad
https://github.com/mikejarrett/pipcheck/blob/2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad/pipcheck/checker.py#L74-L81
248,408
mikejarrett/pipcheck
pipcheck/checker.py
Checker.write_updates_to_csv
def write_updates_to_csv(self, updates): """ Given a list of updates, write the updates out to the provided CSV file. Args: updates (list): List of Update objects. """ with open(self._csv_file_name, 'w') as csvfile: csvwriter = self.csv_writer(csvfile) csvwriter.writerow(CSV_COLUMN_HEADERS) for update in updates: row = [ update.name, update.current_version, update.new_version, update.prelease, ] csvwriter.writerow(row)
python
def write_updates_to_csv(self, updates): """ Given a list of updates, write the updates out to the provided CSV file. Args: updates (list): List of Update objects. """ with open(self._csv_file_name, 'w') as csvfile: csvwriter = self.csv_writer(csvfile) csvwriter.writerow(CSV_COLUMN_HEADERS) for update in updates: row = [ update.name, update.current_version, update.new_version, update.prelease, ] csvwriter.writerow(row)
[ "def", "write_updates_to_csv", "(", "self", ",", "updates", ")", ":", "with", "open", "(", "self", ".", "_csv_file_name", ",", "'w'", ")", "as", "csvfile", ":", "csvwriter", "=", "self", ".", "csv_writer", "(", "csvfile", ")", "csvwriter", ".", "writerow", "(", "CSV_COLUMN_HEADERS", ")", "for", "update", "in", "updates", ":", "row", "=", "[", "update", ".", "name", ",", "update", ".", "current_version", ",", "update", ".", "new_version", ",", "update", ".", "prelease", ",", "]", "csvwriter", ".", "writerow", "(", "row", ")" ]
Given a list of updates, write the updates out to the provided CSV file. Args: updates (list): List of Update objects.
[ "Given", "a", "list", "of", "updates", "write", "the", "updates", "out", "to", "the", "provided", "CSV", "file", "." ]
2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad
https://github.com/mikejarrett/pipcheck/blob/2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad/pipcheck/checker.py#L83-L102
248,409
mikejarrett/pipcheck
pipcheck/checker.py
Checker.write_new_config
def write_new_config(self, updates): """ Given a list of updates, write the updates out to the provided configuartion file. Args: updates (list): List of Update objects. """ with open(self._new_config, 'w') as config_file: for update in updates: line = '{0}=={1} # The installed version is: {2}\n'.format( update.name, update.new_version, update.current_version ) config_file.write(line)
python
def write_new_config(self, updates): """ Given a list of updates, write the updates out to the provided configuartion file. Args: updates (list): List of Update objects. """ with open(self._new_config, 'w') as config_file: for update in updates: line = '{0}=={1} # The installed version is: {2}\n'.format( update.name, update.new_version, update.current_version ) config_file.write(line)
[ "def", "write_new_config", "(", "self", ",", "updates", ")", ":", "with", "open", "(", "self", ".", "_new_config", ",", "'w'", ")", "as", "config_file", ":", "for", "update", "in", "updates", ":", "line", "=", "'{0}=={1} # The installed version is: {2}\\n'", ".", "format", "(", "update", ".", "name", ",", "update", ".", "new_version", ",", "update", ".", "current_version", ")", "config_file", ".", "write", "(", "line", ")" ]
Given a list of updates, write the updates out to the provided configuartion file. Args: updates (list): List of Update objects.
[ "Given", "a", "list", "of", "updates", "write", "the", "updates", "out", "to", "the", "provided", "configuartion", "file", "." ]
2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad
https://github.com/mikejarrett/pipcheck/blob/2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad/pipcheck/checker.py#L104-L120
248,410
mikejarrett/pipcheck
pipcheck/checker.py
Checker._get_environment_updates
def _get_environment_updates(self, display_all_distributions=False): """ Check all pacakges installed in the environment to see if there are any updates availalble. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. Defaults to ``False``. Returns: list: A list of Update objects ordered based on ``instance.name``. """ updates = [] for distribution in self.pip.get_installed_distributions(): versions = self.get_available_versions(distribution.project_name) max_version = max(versions.keys()) if versions else UNKNOW_NUM update = None distribution_version = self._parse_version(distribution.version) if versions and max_version > distribution_version: update = Update( distribution.project_name, distribution.version, versions[max_version], prelease=max_version[-1] ) elif ( display_all_distributions and max_version == distribution_version ): update = Update( distribution.project_name, distribution.version, versions[max_version], ) elif display_all_distributions: update = Update( distribution.project_name, distribution.version, UNKNOWN ) if update: updates.append(update) return sorted(updates, key=lambda x: x.name)
python
def _get_environment_updates(self, display_all_distributions=False): """ Check all pacakges installed in the environment to see if there are any updates availalble. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. Defaults to ``False``. Returns: list: A list of Update objects ordered based on ``instance.name``. """ updates = [] for distribution in self.pip.get_installed_distributions(): versions = self.get_available_versions(distribution.project_name) max_version = max(versions.keys()) if versions else UNKNOW_NUM update = None distribution_version = self._parse_version(distribution.version) if versions and max_version > distribution_version: update = Update( distribution.project_name, distribution.version, versions[max_version], prelease=max_version[-1] ) elif ( display_all_distributions and max_version == distribution_version ): update = Update( distribution.project_name, distribution.version, versions[max_version], ) elif display_all_distributions: update = Update( distribution.project_name, distribution.version, UNKNOWN ) if update: updates.append(update) return sorted(updates, key=lambda x: x.name)
[ "def", "_get_environment_updates", "(", "self", ",", "display_all_distributions", "=", "False", ")", ":", "updates", "=", "[", "]", "for", "distribution", "in", "self", ".", "pip", ".", "get_installed_distributions", "(", ")", ":", "versions", "=", "self", ".", "get_available_versions", "(", "distribution", ".", "project_name", ")", "max_version", "=", "max", "(", "versions", ".", "keys", "(", ")", ")", "if", "versions", "else", "UNKNOW_NUM", "update", "=", "None", "distribution_version", "=", "self", ".", "_parse_version", "(", "distribution", ".", "version", ")", "if", "versions", "and", "max_version", ">", "distribution_version", ":", "update", "=", "Update", "(", "distribution", ".", "project_name", ",", "distribution", ".", "version", ",", "versions", "[", "max_version", "]", ",", "prelease", "=", "max_version", "[", "-", "1", "]", ")", "elif", "(", "display_all_distributions", "and", "max_version", "==", "distribution_version", ")", ":", "update", "=", "Update", "(", "distribution", ".", "project_name", ",", "distribution", ".", "version", ",", "versions", "[", "max_version", "]", ",", ")", "elif", "display_all_distributions", ":", "update", "=", "Update", "(", "distribution", ".", "project_name", ",", "distribution", ".", "version", ",", "UNKNOWN", ")", "if", "update", ":", "updates", ".", "append", "(", "update", ")", "return", "sorted", "(", "updates", ",", "key", "=", "lambda", "x", ":", "x", ".", "name", ")" ]
Check all pacakges installed in the environment to see if there are any updates availalble. Args: display_all_distributions (bool): Return distribution even if it is up-to-date. Defaults to ``False``. Returns: list: A list of Update objects ordered based on ``instance.name``.
[ "Check", "all", "pacakges", "installed", "in", "the", "environment", "to", "see", "if", "there", "are", "any", "updates", "availalble", "." ]
2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad
https://github.com/mikejarrett/pipcheck/blob/2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad/pipcheck/checker.py#L122-L170
248,411
mikejarrett/pipcheck
pipcheck/checker.py
Checker.get_available_versions
def get_available_versions(self, project_name): """ Query PyPI to see if package has any available versions. Args: project_name (str): The name the project on PyPI. Returns: dict: Where keys are tuples of parsed versions and values are the versions returned by PyPI. """ available_versions = self.pypi_client.package_releases(project_name) if not available_versions: available_versions = self.pypi_client.package_releases( project_name.capitalize() ) # ``dict()`` for Python 2.6 syntax. return dict( (self._parse_version(version), version) for version in available_versions )
python
def get_available_versions(self, project_name): """ Query PyPI to see if package has any available versions. Args: project_name (str): The name the project on PyPI. Returns: dict: Where keys are tuples of parsed versions and values are the versions returned by PyPI. """ available_versions = self.pypi_client.package_releases(project_name) if not available_versions: available_versions = self.pypi_client.package_releases( project_name.capitalize() ) # ``dict()`` for Python 2.6 syntax. return dict( (self._parse_version(version), version) for version in available_versions )
[ "def", "get_available_versions", "(", "self", ",", "project_name", ")", ":", "available_versions", "=", "self", ".", "pypi_client", ".", "package_releases", "(", "project_name", ")", "if", "not", "available_versions", ":", "available_versions", "=", "self", ".", "pypi_client", ".", "package_releases", "(", "project_name", ".", "capitalize", "(", ")", ")", "# ``dict()`` for Python 2.6 syntax.", "return", "dict", "(", "(", "self", ".", "_parse_version", "(", "version", ")", ",", "version", ")", "for", "version", "in", "available_versions", ")" ]
Query PyPI to see if package has any available versions. Args: project_name (str): The name the project on PyPI. Returns: dict: Where keys are tuples of parsed versions and values are the versions returned by PyPI.
[ "Query", "PyPI", "to", "see", "if", "package", "has", "any", "available", "versions", "." ]
2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad
https://github.com/mikejarrett/pipcheck/blob/2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad/pipcheck/checker.py#L172-L193
248,412
mikejarrett/pipcheck
pipcheck/checker.py
Checker._parse_version
def _parse_version(version): """ Parse a version string. Args: version (str): A string representing a version e.g. '1.9rc2' Returns: tuple: major, minor, patch parts cast as integer and whether or not it was a pre-release version. """ parsed_version = parse_version(version) return tuple( int(dot_version) for dot_version in parsed_version.base_version.split('.') ) + (parsed_version.is_prerelease,)
python
def _parse_version(version): """ Parse a version string. Args: version (str): A string representing a version e.g. '1.9rc2' Returns: tuple: major, minor, patch parts cast as integer and whether or not it was a pre-release version. """ parsed_version = parse_version(version) return tuple( int(dot_version) for dot_version in parsed_version.base_version.split('.') ) + (parsed_version.is_prerelease,)
[ "def", "_parse_version", "(", "version", ")", ":", "parsed_version", "=", "parse_version", "(", "version", ")", "return", "tuple", "(", "int", "(", "dot_version", ")", "for", "dot_version", "in", "parsed_version", ".", "base_version", ".", "split", "(", "'.'", ")", ")", "+", "(", "parsed_version", ".", "is_prerelease", ",", ")" ]
Parse a version string. Args: version (str): A string representing a version e.g. '1.9rc2' Returns: tuple: major, minor, patch parts cast as integer and whether or not it was a pre-release version.
[ "Parse", "a", "version", "string", "." ]
2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad
https://github.com/mikejarrett/pipcheck/blob/2ff47b9fd8914e1764c6e659ef39b77c1b1a12ad/pipcheck/checker.py#L196-L210
248,413
Carreau/insupportable
insupportable/insupportable.py
S.support
def support(self, version): """ return `True` if current python version match version passed. raise a deprecation warning if only PY2 or PY3 is supported as you probably have a conditional that should be removed. """ if not self._known_version(version): warn("unknown feature: %s"%version) return True else: if not self._get_featureset_support(version): warn("You are not supporting %s anymore "%str(version), UserWarning, self.level) if self._alone_version(version): warn("%s is the last supported feature of this group, you can simplifiy this logic. "%str(version), UserWarning,self.level) return self.predicates.get(version, True) if (not self.PY3_supported) or (not self.PY2_supported): warn("You are only supporting 1 version of Python", UserWarning, self.level) if version == PY3: return sys.version_info.major == 3 elif version == PY2: return sys.version_info.major == 2
python
def support(self, version): """ return `True` if current python version match version passed. raise a deprecation warning if only PY2 or PY3 is supported as you probably have a conditional that should be removed. """ if not self._known_version(version): warn("unknown feature: %s"%version) return True else: if not self._get_featureset_support(version): warn("You are not supporting %s anymore "%str(version), UserWarning, self.level) if self._alone_version(version): warn("%s is the last supported feature of this group, you can simplifiy this logic. "%str(version), UserWarning,self.level) return self.predicates.get(version, True) if (not self.PY3_supported) or (not self.PY2_supported): warn("You are only supporting 1 version of Python", UserWarning, self.level) if version == PY3: return sys.version_info.major == 3 elif version == PY2: return sys.version_info.major == 2
[ "def", "support", "(", "self", ",", "version", ")", ":", "if", "not", "self", ".", "_known_version", "(", "version", ")", ":", "warn", "(", "\"unknown feature: %s\"", "%", "version", ")", "return", "True", "else", ":", "if", "not", "self", ".", "_get_featureset_support", "(", "version", ")", ":", "warn", "(", "\"You are not supporting %s anymore \"", "%", "str", "(", "version", ")", ",", "UserWarning", ",", "self", ".", "level", ")", "if", "self", ".", "_alone_version", "(", "version", ")", ":", "warn", "(", "\"%s is the last supported feature of this group, you can simplifiy this logic. \"", "%", "str", "(", "version", ")", ",", "UserWarning", ",", "self", ".", "level", ")", "return", "self", ".", "predicates", ".", "get", "(", "version", ",", "True", ")", "if", "(", "not", "self", ".", "PY3_supported", ")", "or", "(", "not", "self", ".", "PY2_supported", ")", ":", "warn", "(", "\"You are only supporting 1 version of Python\"", ",", "UserWarning", ",", "self", ".", "level", ")", "if", "version", "==", "PY3", ":", "return", "sys", ".", "version_info", ".", "major", "==", "3", "elif", "version", "==", "PY2", ":", "return", "sys", ".", "version_info", ".", "major", "==", "2" ]
return `True` if current python version match version passed. raise a deprecation warning if only PY2 or PY3 is supported as you probably have a conditional that should be removed.
[ "return", "True", "if", "current", "python", "version", "match", "version", "passed", ".", "raise", "a", "deprecation", "warning", "if", "only", "PY2", "or", "PY3", "is", "supported", "as", "you", "probably", "have", "a", "conditional", "that", "should", "be", "removed", "." ]
318e05e945b33f3e7a6ead8d85a7f3a8c2b7321c
https://github.com/Carreau/insupportable/blob/318e05e945b33f3e7a6ead8d85a7f3a8c2b7321c/insupportable/insupportable.py#L161-L190
248,414
Carreau/insupportable
insupportable/insupportable.py
Context._default_warner
def _default_warner(self, message, stacklevel=1): """ default warner function use a pending deprecation warning, and correct for the correct stacklevel """ return warnings.warn(message, PendingDeprecationWarning, stacklevel=stacklevel+4)
python
def _default_warner(self, message, stacklevel=1): """ default warner function use a pending deprecation warning, and correct for the correct stacklevel """ return warnings.warn(message, PendingDeprecationWarning, stacklevel=stacklevel+4)
[ "def", "_default_warner", "(", "self", ",", "message", ",", "stacklevel", "=", "1", ")", ":", "return", "warnings", ".", "warn", "(", "message", ",", "PendingDeprecationWarning", ",", "stacklevel", "=", "stacklevel", "+", "4", ")" ]
default warner function use a pending deprecation warning, and correct for the correct stacklevel
[ "default", "warner", "function", "use", "a", "pending", "deprecation", "warning", "and", "correct", "for", "the", "correct", "stacklevel" ]
318e05e945b33f3e7a6ead8d85a7f3a8c2b7321c
https://github.com/Carreau/insupportable/blob/318e05e945b33f3e7a6ead8d85a7f3a8c2b7321c/insupportable/insupportable.py#L218-L225
248,415
FujiMakoto/IPS-Vagrant
ips_vagrant/installer/V_4_0_11.py
Installer._check_if_complete
def _check_if_complete(self, url, json_response): """ Check if a request has been completed and return the redirect URL if it has @type url: str @type json_response: list or dict @rtype: str or bool """ if '__done' in json_response and isinstance(json_response, list): mr_parts = list(urlparse(url)) mr_query = parse_qs(mr_parts[4]) mr_query['mr'] = '"' + str(json_response[0]) + '"' mr_parts[4] = urlencode(mr_query, True) mr_link = urlunparse(mr_parts) mr_j, mr_r = self._ajax(mr_link) self.log.debug('MultipleRedirect link: %s', mr_link) return super(Installer, self)._check_if_complete(url, mr_j) return False
python
def _check_if_complete(self, url, json_response): """ Check if a request has been completed and return the redirect URL if it has @type url: str @type json_response: list or dict @rtype: str or bool """ if '__done' in json_response and isinstance(json_response, list): mr_parts = list(urlparse(url)) mr_query = parse_qs(mr_parts[4]) mr_query['mr'] = '"' + str(json_response[0]) + '"' mr_parts[4] = urlencode(mr_query, True) mr_link = urlunparse(mr_parts) mr_j, mr_r = self._ajax(mr_link) self.log.debug('MultipleRedirect link: %s', mr_link) return super(Installer, self)._check_if_complete(url, mr_j) return False
[ "def", "_check_if_complete", "(", "self", ",", "url", ",", "json_response", ")", ":", "if", "'__done'", "in", "json_response", "and", "isinstance", "(", "json_response", ",", "list", ")", ":", "mr_parts", "=", "list", "(", "urlparse", "(", "url", ")", ")", "mr_query", "=", "parse_qs", "(", "mr_parts", "[", "4", "]", ")", "mr_query", "[", "'mr'", "]", "=", "'\"'", "+", "str", "(", "json_response", "[", "0", "]", ")", "+", "'\"'", "mr_parts", "[", "4", "]", "=", "urlencode", "(", "mr_query", ",", "True", ")", "mr_link", "=", "urlunparse", "(", "mr_parts", ")", "mr_j", ",", "mr_r", "=", "self", ".", "_ajax", "(", "mr_link", ")", "self", ".", "log", ".", "debug", "(", "'MultipleRedirect link: %s'", ",", "mr_link", ")", "return", "super", "(", "Installer", ",", "self", ")", ".", "_check_if_complete", "(", "url", ",", "mr_j", ")", "return", "False" ]
Check if a request has been completed and return the redirect URL if it has @type url: str @type json_response: list or dict @rtype: str or bool
[ "Check", "if", "a", "request", "has", "been", "completed", "and", "return", "the", "redirect", "URL", "if", "it", "has" ]
7b1d6d095034dd8befb026d9315ecc6494d52269
https://github.com/FujiMakoto/IPS-Vagrant/blob/7b1d6d095034dd8befb026d9315ecc6494d52269/ips_vagrant/installer/V_4_0_11.py#L11-L28
248,416
sykora/djournal
djournal/views.py
entry_index
def entry_index(request, limit=0, template='djournal/entry_index.html'): '''Returns a reponse of a fixed number of entries; all of them, by default. ''' entries = Entry.public.all() if limit > 0: entries = entries[:limit] context = { 'entries': entries, } return render_to_response( template, context, context_instance=RequestContext(request), )
python
def entry_index(request, limit=0, template='djournal/entry_index.html'): '''Returns a reponse of a fixed number of entries; all of them, by default. ''' entries = Entry.public.all() if limit > 0: entries = entries[:limit] context = { 'entries': entries, } return render_to_response( template, context, context_instance=RequestContext(request), )
[ "def", "entry_index", "(", "request", ",", "limit", "=", "0", ",", "template", "=", "'djournal/entry_index.html'", ")", ":", "entries", "=", "Entry", ".", "public", ".", "all", "(", ")", "if", "limit", ">", "0", ":", "entries", "=", "entries", "[", ":", "limit", "]", "context", "=", "{", "'entries'", ":", "entries", ",", "}", "return", "render_to_response", "(", "template", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", ")" ]
Returns a reponse of a fixed number of entries; all of them, by default.
[ "Returns", "a", "reponse", "of", "a", "fixed", "number", "of", "entries", ";", "all", "of", "them", "by", "default", "." ]
c074e1f94e07e2630034a00c7dbd768e933f85e2
https://github.com/sykora/djournal/blob/c074e1f94e07e2630034a00c7dbd768e933f85e2/djournal/views.py#L10-L26
248,417
sykora/djournal
djournal/views.py
entry_detail
def entry_detail(request, slug, template='djournal/entry_detail.html'): '''Returns a response of an individual entry, for the given slug.''' entry = get_object_or_404(Entry.public, slug=slug) context = { 'entry': entry, } return render_to_response( template, context, context_instance=RequestContext(request), )
python
def entry_detail(request, slug, template='djournal/entry_detail.html'): '''Returns a response of an individual entry, for the given slug.''' entry = get_object_or_404(Entry.public, slug=slug) context = { 'entry': entry, } return render_to_response( template, context, context_instance=RequestContext(request), )
[ "def", "entry_detail", "(", "request", ",", "slug", ",", "template", "=", "'djournal/entry_detail.html'", ")", ":", "entry", "=", "get_object_or_404", "(", "Entry", ".", "public", ",", "slug", "=", "slug", ")", "context", "=", "{", "'entry'", ":", "entry", ",", "}", "return", "render_to_response", "(", "template", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", ")" ]
Returns a response of an individual entry, for the given slug.
[ "Returns", "a", "response", "of", "an", "individual", "entry", "for", "the", "given", "slug", "." ]
c074e1f94e07e2630034a00c7dbd768e933f85e2
https://github.com/sykora/djournal/blob/c074e1f94e07e2630034a00c7dbd768e933f85e2/djournal/views.py#L28-L41
248,418
sykora/djournal
djournal/views.py
tagged_entry_index
def tagged_entry_index(request, slug, template='djournal/tagged_entry_index.html'): '''Returns a response of all entries tagged with a given tag.''' tag = get_object_or_404(Tag, slug=slug) entries = Entry.public.filter(tags__in=[tag]) context = { 'entries': entries, 'tag': tag, } return render_to_response( template, context, context_instance=RequestContext(request), )
python
def tagged_entry_index(request, slug, template='djournal/tagged_entry_index.html'): '''Returns a response of all entries tagged with a given tag.''' tag = get_object_or_404(Tag, slug=slug) entries = Entry.public.filter(tags__in=[tag]) context = { 'entries': entries, 'tag': tag, } return render_to_response( template, context, context_instance=RequestContext(request), )
[ "def", "tagged_entry_index", "(", "request", ",", "slug", ",", "template", "=", "'djournal/tagged_entry_index.html'", ")", ":", "tag", "=", "get_object_or_404", "(", "Tag", ",", "slug", "=", "slug", ")", "entries", "=", "Entry", ".", "public", ".", "filter", "(", "tags__in", "=", "[", "tag", "]", ")", "context", "=", "{", "'entries'", ":", "entries", ",", "'tag'", ":", "tag", ",", "}", "return", "render_to_response", "(", "template", ",", "context", ",", "context_instance", "=", "RequestContext", "(", "request", ")", ",", ")" ]
Returns a response of all entries tagged with a given tag.
[ "Returns", "a", "response", "of", "all", "entries", "tagged", "with", "a", "given", "tag", "." ]
c074e1f94e07e2630034a00c7dbd768e933f85e2
https://github.com/sykora/djournal/blob/c074e1f94e07e2630034a00c7dbd768e933f85e2/djournal/views.py#L43-L59
248,419
tschaume/ccsgp_get_started
ccsgp_get_started/examples/utils.py
getErrorComponent
def getErrorComponent(result, tag): """get total error contribution for component with specific tag""" return math.sqrt(sum( (error*2)**2 for (var, error) in result.error_components().items() if var.tag == tag ))
python
def getErrorComponent(result, tag): """get total error contribution for component with specific tag""" return math.sqrt(sum( (error*2)**2 for (var, error) in result.error_components().items() if var.tag == tag ))
[ "def", "getErrorComponent", "(", "result", ",", "tag", ")", ":", "return", "math", ".", "sqrt", "(", "sum", "(", "(", "error", "*", "2", ")", "**", "2", "for", "(", "var", ",", "error", ")", "in", "result", ".", "error_components", "(", ")", ".", "items", "(", ")", "if", "var", ".", "tag", "==", "tag", ")", ")" ]
get total error contribution for component with specific tag
[ "get", "total", "error", "contribution", "for", "component", "with", "specific", "tag" ]
e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2
https://github.com/tschaume/ccsgp_get_started/blob/e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2/ccsgp_get_started/examples/utils.py#L41-L47
248,420
tschaume/ccsgp_get_started
ccsgp_get_started/examples/utils.py
getEdges
def getEdges(npArr): """get np array of bin edges""" edges = np.concatenate(([0], npArr[:,0] + npArr[:,2])) return np.array([Decimal(str(i)) for i in edges])
python
def getEdges(npArr): """get np array of bin edges""" edges = np.concatenate(([0], npArr[:,0] + npArr[:,2])) return np.array([Decimal(str(i)) for i in edges])
[ "def", "getEdges", "(", "npArr", ")", ":", "edges", "=", "np", ".", "concatenate", "(", "(", "[", "0", "]", ",", "npArr", "[", ":", ",", "0", "]", "+", "npArr", "[", ":", ",", "2", "]", ")", ")", "return", "np", ".", "array", "(", "[", "Decimal", "(", "str", "(", "i", ")", ")", "for", "i", "in", "edges", "]", ")" ]
get np array of bin edges
[ "get", "np", "array", "of", "bin", "edges" ]
e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2
https://github.com/tschaume/ccsgp_get_started/blob/e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2/ccsgp_get_started/examples/utils.py#L49-L52
248,421
tschaume/ccsgp_get_started
ccsgp_get_started/examples/utils.py
getMaskIndices
def getMaskIndices(mask): """get lower and upper index of mask""" return [ list(mask).index(True), len(mask) - 1 - list(mask)[::-1].index(True) ]
python
def getMaskIndices(mask): """get lower and upper index of mask""" return [ list(mask).index(True), len(mask) - 1 - list(mask)[::-1].index(True) ]
[ "def", "getMaskIndices", "(", "mask", ")", ":", "return", "[", "list", "(", "mask", ")", ".", "index", "(", "True", ")", ",", "len", "(", "mask", ")", "-", "1", "-", "list", "(", "mask", ")", "[", ":", ":", "-", "1", "]", ".", "index", "(", "True", ")", "]" ]
get lower and upper index of mask
[ "get", "lower", "and", "upper", "index", "of", "mask" ]
e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2
https://github.com/tschaume/ccsgp_get_started/blob/e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2/ccsgp_get_started/examples/utils.py#L54-L58
248,422
tschaume/ccsgp_get_started
ccsgp_get_started/examples/utils.py
getCocktailSum
def getCocktailSum(e0, e1, eCocktail, uCocktail): """get the cocktail sum for a given data bin range""" # get mask and according indices mask = (eCocktail >= e0) & (eCocktail <= e1) # data bin range wider than single cocktail bin if np.any(mask): idx = getMaskIndices(mask) # determine coinciding flags eCl, eCu = eCocktail[idx[0]], eCocktail[idx[1]] not_coinc_low, not_coinc_upp = (eCl != e0), (eCu != e1) # get cocktail sum in data bin (always w/o last bin) uCocktailSum = fsum(uCocktail[mask[:-1]][:-1]) logging.debug(' sum: {}'.format(uCocktailSum)) # get correction for non-coinciding edges if not_coinc_low: eCl_bw = eCl - eCocktail[idx[0]-1] corr_low = (eCl - e0) / eCl_bw abs_corr_low = float(corr_low) * uCocktail[idx[0]-1] uCocktailSum += abs_corr_low logging.debug((' low: %g == %g -> %g (%g) -> %g -> {} -> {}' % ( e0, eCl, eCl - e0, eCl_bw, corr_low )).format(abs_corr_low, uCocktailSum)) if not_coinc_upp: if idx[1]+1 < len(eCocktail): eCu_bw = eCocktail[idx[1]+1] - eCu corr_upp = (e1 - eCu) / eCu_bw abs_corr_upp = float(corr_upp) * uCocktail[idx[1]] else:# catch last index (quick fix!) abs_corr_upp = eCu_bw = corr_upp = 0 uCocktailSum += abs_corr_upp logging.debug((' upp: %g == %g -> %g (%g) -> %g -> {} -> {}' % ( e1, eCu, e1 - eCu, eCu_bw, corr_upp )).format(abs_corr_upp, uCocktailSum)) else: mask = (eCocktail >= e0) idx = getMaskIndices(mask) # only use first index # catch if already at last index if idx[0] == idx[1] and idx[0] == len(eCocktail)-1: corr = (e1 - e0) / (eCocktail[idx[0]] - eCocktail[idx[0]-1]) uCocktailSum = float(corr) * uCocktail[idx[0]-1] else: # default case corr = (e1 - e0) / (eCocktail[idx[0]+1] - eCocktail[idx[0]]) uCocktailSum = float(corr) * uCocktail[idx[0]] logging.debug(' sum: {}'.format(uCocktailSum)) return uCocktailSum
python
def getCocktailSum(e0, e1, eCocktail, uCocktail): """get the cocktail sum for a given data bin range""" # get mask and according indices mask = (eCocktail >= e0) & (eCocktail <= e1) # data bin range wider than single cocktail bin if np.any(mask): idx = getMaskIndices(mask) # determine coinciding flags eCl, eCu = eCocktail[idx[0]], eCocktail[idx[1]] not_coinc_low, not_coinc_upp = (eCl != e0), (eCu != e1) # get cocktail sum in data bin (always w/o last bin) uCocktailSum = fsum(uCocktail[mask[:-1]][:-1]) logging.debug(' sum: {}'.format(uCocktailSum)) # get correction for non-coinciding edges if not_coinc_low: eCl_bw = eCl - eCocktail[idx[0]-1] corr_low = (eCl - e0) / eCl_bw abs_corr_low = float(corr_low) * uCocktail[idx[0]-1] uCocktailSum += abs_corr_low logging.debug((' low: %g == %g -> %g (%g) -> %g -> {} -> {}' % ( e0, eCl, eCl - e0, eCl_bw, corr_low )).format(abs_corr_low, uCocktailSum)) if not_coinc_upp: if idx[1]+1 < len(eCocktail): eCu_bw = eCocktail[idx[1]+1] - eCu corr_upp = (e1 - eCu) / eCu_bw abs_corr_upp = float(corr_upp) * uCocktail[idx[1]] else:# catch last index (quick fix!) abs_corr_upp = eCu_bw = corr_upp = 0 uCocktailSum += abs_corr_upp logging.debug((' upp: %g == %g -> %g (%g) -> %g -> {} -> {}' % ( e1, eCu, e1 - eCu, eCu_bw, corr_upp )).format(abs_corr_upp, uCocktailSum)) else: mask = (eCocktail >= e0) idx = getMaskIndices(mask) # only use first index # catch if already at last index if idx[0] == idx[1] and idx[0] == len(eCocktail)-1: corr = (e1 - e0) / (eCocktail[idx[0]] - eCocktail[idx[0]-1]) uCocktailSum = float(corr) * uCocktail[idx[0]-1] else: # default case corr = (e1 - e0) / (eCocktail[idx[0]+1] - eCocktail[idx[0]]) uCocktailSum = float(corr) * uCocktail[idx[0]] logging.debug(' sum: {}'.format(uCocktailSum)) return uCocktailSum
[ "def", "getCocktailSum", "(", "e0", ",", "e1", ",", "eCocktail", ",", "uCocktail", ")", ":", "# get mask and according indices", "mask", "=", "(", "eCocktail", ">=", "e0", ")", "&", "(", "eCocktail", "<=", "e1", ")", "# data bin range wider than single cocktail bin", "if", "np", ".", "any", "(", "mask", ")", ":", "idx", "=", "getMaskIndices", "(", "mask", ")", "# determine coinciding flags", "eCl", ",", "eCu", "=", "eCocktail", "[", "idx", "[", "0", "]", "]", ",", "eCocktail", "[", "idx", "[", "1", "]", "]", "not_coinc_low", ",", "not_coinc_upp", "=", "(", "eCl", "!=", "e0", ")", ",", "(", "eCu", "!=", "e1", ")", "# get cocktail sum in data bin (always w/o last bin)", "uCocktailSum", "=", "fsum", "(", "uCocktail", "[", "mask", "[", ":", "-", "1", "]", "]", "[", ":", "-", "1", "]", ")", "logging", ".", "debug", "(", "' sum: {}'", ".", "format", "(", "uCocktailSum", ")", ")", "# get correction for non-coinciding edges", "if", "not_coinc_low", ":", "eCl_bw", "=", "eCl", "-", "eCocktail", "[", "idx", "[", "0", "]", "-", "1", "]", "corr_low", "=", "(", "eCl", "-", "e0", ")", "/", "eCl_bw", "abs_corr_low", "=", "float", "(", "corr_low", ")", "*", "uCocktail", "[", "idx", "[", "0", "]", "-", "1", "]", "uCocktailSum", "+=", "abs_corr_low", "logging", ".", "debug", "(", "(", "' low: %g == %g -> %g (%g) -> %g -> {} -> {}'", "%", "(", "e0", ",", "eCl", ",", "eCl", "-", "e0", ",", "eCl_bw", ",", "corr_low", ")", ")", ".", "format", "(", "abs_corr_low", ",", "uCocktailSum", ")", ")", "if", "not_coinc_upp", ":", "if", "idx", "[", "1", "]", "+", "1", "<", "len", "(", "eCocktail", ")", ":", "eCu_bw", "=", "eCocktail", "[", "idx", "[", "1", "]", "+", "1", "]", "-", "eCu", "corr_upp", "=", "(", "e1", "-", "eCu", ")", "/", "eCu_bw", "abs_corr_upp", "=", "float", "(", "corr_upp", ")", "*", "uCocktail", "[", "idx", "[", "1", "]", "]", "else", ":", "# catch last index (quick fix!)", "abs_corr_upp", "=", "eCu_bw", "=", "corr_upp", "=", "0", "uCocktailSum", "+=", "abs_corr_upp", "logging", ".", "debug", "(", "(", "' upp: %g == %g -> %g (%g) -> %g -> {} -> {}'", "%", "(", "e1", ",", "eCu", ",", "e1", "-", "eCu", ",", "eCu_bw", ",", "corr_upp", ")", ")", ".", "format", "(", "abs_corr_upp", ",", "uCocktailSum", ")", ")", "else", ":", "mask", "=", "(", "eCocktail", ">=", "e0", ")", "idx", "=", "getMaskIndices", "(", "mask", ")", "# only use first index", "# catch if already at last index", "if", "idx", "[", "0", "]", "==", "idx", "[", "1", "]", "and", "idx", "[", "0", "]", "==", "len", "(", "eCocktail", ")", "-", "1", ":", "corr", "=", "(", "e1", "-", "e0", ")", "/", "(", "eCocktail", "[", "idx", "[", "0", "]", "]", "-", "eCocktail", "[", "idx", "[", "0", "]", "-", "1", "]", ")", "uCocktailSum", "=", "float", "(", "corr", ")", "*", "uCocktail", "[", "idx", "[", "0", "]", "-", "1", "]", "else", ":", "# default case", "corr", "=", "(", "e1", "-", "e0", ")", "/", "(", "eCocktail", "[", "idx", "[", "0", "]", "+", "1", "]", "-", "eCocktail", "[", "idx", "[", "0", "]", "]", ")", "uCocktailSum", "=", "float", "(", "corr", ")", "*", "uCocktail", "[", "idx", "[", "0", "]", "]", "logging", ".", "debug", "(", "' sum: {}'", ".", "format", "(", "uCocktailSum", ")", ")", "return", "uCocktailSum" ]
get the cocktail sum for a given data bin range
[ "get", "the", "cocktail", "sum", "for", "a", "given", "data", "bin", "range" ]
e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2
https://github.com/tschaume/ccsgp_get_started/blob/e4e29844a3e6fc7574e9b4b8cd84131f28ddc3f2/ccsgp_get_started/examples/utils.py#L64-L108
248,423
Beyond-Digital/django-gaekit
gaekit/boot.py
break_sandbox
def break_sandbox(): """Patches sandbox to add match-all regex to sandbox whitelist """ class EvilCM(object): def __enter__(self): return self def __exit__(self, exc_type, exc, tb): import re tb.tb_next.tb_next.tb_next.tb_frame.f_locals[ 'self']._enabled_regexes.append(re.compile('.*')) return True try: import sqlite3 # noqa except ImportError: with EvilCM(): __import__('sqlite3')
python
def break_sandbox(): """Patches sandbox to add match-all regex to sandbox whitelist """ class EvilCM(object): def __enter__(self): return self def __exit__(self, exc_type, exc, tb): import re tb.tb_next.tb_next.tb_next.tb_frame.f_locals[ 'self']._enabled_regexes.append(re.compile('.*')) return True try: import sqlite3 # noqa except ImportError: with EvilCM(): __import__('sqlite3')
[ "def", "break_sandbox", "(", ")", ":", "class", "EvilCM", "(", "object", ")", ":", "def", "__enter__", "(", "self", ")", ":", "return", "self", "def", "__exit__", "(", "self", ",", "exc_type", ",", "exc", ",", "tb", ")", ":", "import", "re", "tb", ".", "tb_next", ".", "tb_next", ".", "tb_next", ".", "tb_frame", ".", "f_locals", "[", "'self'", "]", ".", "_enabled_regexes", ".", "append", "(", "re", ".", "compile", "(", "'.*'", ")", ")", "return", "True", "try", ":", "import", "sqlite3", "# noqa", "except", "ImportError", ":", "with", "EvilCM", "(", ")", ":", "__import__", "(", "'sqlite3'", ")" ]
Patches sandbox to add match-all regex to sandbox whitelist
[ "Patches", "sandbox", "to", "add", "match", "-", "all", "regex", "to", "sandbox", "whitelist" ]
b587acd52b5cfd48217a70920d4b61d5f923c8c5
https://github.com/Beyond-Digital/django-gaekit/blob/b587acd52b5cfd48217a70920d4b61d5f923c8c5/gaekit/boot.py#L1-L17
248,424
kervi/kervi-core
kervi/actions/action.py
_LinkedAction.execute
def execute(self, *args, **kwargs): """Executes the action.""" timeout = kwargs.pop("timeout", -1) run_async = kwargs.pop("run_async", False) self._is_running = True result = None if self._action_lock.acquire(False): self._state = ACTION_PENDING self._action_event = threading.Event() self.spine.send_command("kervi_action_" + self._action_id, *args, **kwargs) if not run_async: if self._action_event.wait(timeout): self._state = ACTION_FAILED raise TimeoutError("Timeout in call to action: " + self._action_id) self._action_event = None result = self._last_result else: self._action_lock.release() else: if not self._action_lock.acquire(True, timeout): return None self._action_lock.release() return result
python
def execute(self, *args, **kwargs): """Executes the action.""" timeout = kwargs.pop("timeout", -1) run_async = kwargs.pop("run_async", False) self._is_running = True result = None if self._action_lock.acquire(False): self._state = ACTION_PENDING self._action_event = threading.Event() self.spine.send_command("kervi_action_" + self._action_id, *args, **kwargs) if not run_async: if self._action_event.wait(timeout): self._state = ACTION_FAILED raise TimeoutError("Timeout in call to action: " + self._action_id) self._action_event = None result = self._last_result else: self._action_lock.release() else: if not self._action_lock.acquire(True, timeout): return None self._action_lock.release() return result
[ "def", "execute", "(", "self", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "timeout", "=", "kwargs", ".", "pop", "(", "\"timeout\"", ",", "-", "1", ")", "run_async", "=", "kwargs", ".", "pop", "(", "\"run_async\"", ",", "False", ")", "self", ".", "_is_running", "=", "True", "result", "=", "None", "if", "self", ".", "_action_lock", ".", "acquire", "(", "False", ")", ":", "self", ".", "_state", "=", "ACTION_PENDING", "self", ".", "_action_event", "=", "threading", ".", "Event", "(", ")", "self", ".", "spine", ".", "send_command", "(", "\"kervi_action_\"", "+", "self", ".", "_action_id", ",", "*", "args", ",", "*", "*", "kwargs", ")", "if", "not", "run_async", ":", "if", "self", ".", "_action_event", ".", "wait", "(", "timeout", ")", ":", "self", ".", "_state", "=", "ACTION_FAILED", "raise", "TimeoutError", "(", "\"Timeout in call to action: \"", "+", "self", ".", "_action_id", ")", "self", ".", "_action_event", "=", "None", "result", "=", "self", ".", "_last_result", "else", ":", "self", ".", "_action_lock", ".", "release", "(", ")", "else", ":", "if", "not", "self", ".", "_action_lock", ".", "acquire", "(", "True", ",", "timeout", ")", ":", "return", "None", "self", ".", "_action_lock", ".", "release", "(", ")", "return", "result" ]
Executes the action.
[ "Executes", "the", "action", "." ]
3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23
https://github.com/kervi/kervi-core/blob/3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23/kervi/actions/action.py#L94-L118
248,425
kervi/kervi-core
kervi/actions/action.py
Action.set_interrupt
def set_interrupt(self, method=None, **kwargs): """ Decorator that turns a function or controller method into an action interrupt. """ def action_wrap(f): action_id = kwargs.get("action_id", f.__name__) name = kwargs.get("name", action_id) if inspect.ismethod(f): # not "." in f.__qualname__: self._interrupt = _ActionInterrupt(f) self._ui_parameters["interrupt_enabled"] = True return self._interrupt else: qual_name = getattr(f, "__qualname__", None) owner_class = kwargs.get("controller_class", None) if owner_class: qual_name = owner_class + "." + f.__name__ if qual_name: Actions.add_unbound_interrupt(qual_name, self) else: print("using upython? if yes you need to pass the name of the controller class via the controller_class parameter.") return f if method: return action_wrap(method) else: return action_wrap
python
def set_interrupt(self, method=None, **kwargs): """ Decorator that turns a function or controller method into an action interrupt. """ def action_wrap(f): action_id = kwargs.get("action_id", f.__name__) name = kwargs.get("name", action_id) if inspect.ismethod(f): # not "." in f.__qualname__: self._interrupt = _ActionInterrupt(f) self._ui_parameters["interrupt_enabled"] = True return self._interrupt else: qual_name = getattr(f, "__qualname__", None) owner_class = kwargs.get("controller_class", None) if owner_class: qual_name = owner_class + "." + f.__name__ if qual_name: Actions.add_unbound_interrupt(qual_name, self) else: print("using upython? if yes you need to pass the name of the controller class via the controller_class parameter.") return f if method: return action_wrap(method) else: return action_wrap
[ "def", "set_interrupt", "(", "self", ",", "method", "=", "None", ",", "*", "*", "kwargs", ")", ":", "def", "action_wrap", "(", "f", ")", ":", "action_id", "=", "kwargs", ".", "get", "(", "\"action_id\"", ",", "f", ".", "__name__", ")", "name", "=", "kwargs", ".", "get", "(", "\"name\"", ",", "action_id", ")", "if", "inspect", ".", "ismethod", "(", "f", ")", ":", "# not \".\" in f.__qualname__:", "self", ".", "_interrupt", "=", "_ActionInterrupt", "(", "f", ")", "self", ".", "_ui_parameters", "[", "\"interrupt_enabled\"", "]", "=", "True", "return", "self", ".", "_interrupt", "else", ":", "qual_name", "=", "getattr", "(", "f", ",", "\"__qualname__\"", ",", "None", ")", "owner_class", "=", "kwargs", ".", "get", "(", "\"controller_class\"", ",", "None", ")", "if", "owner_class", ":", "qual_name", "=", "owner_class", "+", "\".\"", "+", "f", ".", "__name__", "if", "qual_name", ":", "Actions", ".", "add_unbound_interrupt", "(", "qual_name", ",", "self", ")", "else", ":", "print", "(", "\"using upython? if yes you need to pass the name of the controller class via the controller_class parameter.\"", ")", "return", "f", "if", "method", ":", "return", "action_wrap", "(", "method", ")", "else", ":", "return", "action_wrap" ]
Decorator that turns a function or controller method into an action interrupt.
[ "Decorator", "that", "turns", "a", "function", "or", "controller", "method", "into", "an", "action", "interrupt", "." ]
3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23
https://github.com/kervi/kervi-core/blob/3c1e3c8a17a7b4d085d8a28b99180ff2a96b0e23/kervi/actions/action.py#L622-L650
248,426
dmckeone/frosty
frosty/freezers.py
_freezer_lookup
def _freezer_lookup(freezer_string): """ Translate a string that may be a freezer name into the internal freezer constant :param freezer_string :return: """ sanitized = freezer_string.lower().strip() for freezer in FREEZER.ALL: freezer_instance = freezer() freezer_name = six.text_type(freezer_instance) if freezer_name == six.text_type(sanitized): return freezer else: if sanitized != freezer_string: raise ValueError(u"Unsupported freezer type \"{0}\". (Sanitized to \"{1}\")".format(freezer_string, sanitized)) else: raise ValueError(u"Unsupported freezer type \"{0}\".".format(freezer_string))
python
def _freezer_lookup(freezer_string): """ Translate a string that may be a freezer name into the internal freezer constant :param freezer_string :return: """ sanitized = freezer_string.lower().strip() for freezer in FREEZER.ALL: freezer_instance = freezer() freezer_name = six.text_type(freezer_instance) if freezer_name == six.text_type(sanitized): return freezer else: if sanitized != freezer_string: raise ValueError(u"Unsupported freezer type \"{0}\". (Sanitized to \"{1}\")".format(freezer_string, sanitized)) else: raise ValueError(u"Unsupported freezer type \"{0}\".".format(freezer_string))
[ "def", "_freezer_lookup", "(", "freezer_string", ")", ":", "sanitized", "=", "freezer_string", ".", "lower", "(", ")", ".", "strip", "(", ")", "for", "freezer", "in", "FREEZER", ".", "ALL", ":", "freezer_instance", "=", "freezer", "(", ")", "freezer_name", "=", "six", ".", "text_type", "(", "freezer_instance", ")", "if", "freezer_name", "==", "six", ".", "text_type", "(", "sanitized", ")", ":", "return", "freezer", "else", ":", "if", "sanitized", "!=", "freezer_string", ":", "raise", "ValueError", "(", "u\"Unsupported freezer type \\\"{0}\\\". (Sanitized to \\\"{1}\\\")\"", ".", "format", "(", "freezer_string", ",", "sanitized", ")", ")", "else", ":", "raise", "ValueError", "(", "u\"Unsupported freezer type \\\"{0}\\\".\"", ".", "format", "(", "freezer_string", ")", ")" ]
Translate a string that may be a freezer name into the internal freezer constant :param freezer_string :return:
[ "Translate", "a", "string", "that", "may", "be", "a", "freezer", "name", "into", "the", "internal", "freezer", "constant" ]
868d81e72b6c8e354af3697531c20f116cd1fc9a
https://github.com/dmckeone/frosty/blob/868d81e72b6c8e354af3697531c20f116cd1fc9a/frosty/freezers.py#L174-L192
248,427
dmckeone/frosty
frosty/freezers.py
resolve_freezer
def resolve_freezer(freezer): """ Locate the appropriate freezer given FREEZER or string input from the programmer. :param freezer: FREEZER constant or string for the freezer that is requested. (None = FREEZER.DEFAULT) :return: """ # Set default freezer if there was none if not freezer: return _Default() # Allow character based lookups as well if isinstance(freezer, six.string_types): cls = _freezer_lookup(freezer) return cls() # Allow plain class definition lookups (we instantiate the class) if freezer.__class__ == type.__class__: return freezer() # Warn when a custom freezer implementation is used. if freezer not in FREEZER.ALL: warn(u"Using custom freezer implelmentation: {0}".format(freezer)) return freezer
python
def resolve_freezer(freezer): """ Locate the appropriate freezer given FREEZER or string input from the programmer. :param freezer: FREEZER constant or string for the freezer that is requested. (None = FREEZER.DEFAULT) :return: """ # Set default freezer if there was none if not freezer: return _Default() # Allow character based lookups as well if isinstance(freezer, six.string_types): cls = _freezer_lookup(freezer) return cls() # Allow plain class definition lookups (we instantiate the class) if freezer.__class__ == type.__class__: return freezer() # Warn when a custom freezer implementation is used. if freezer not in FREEZER.ALL: warn(u"Using custom freezer implelmentation: {0}".format(freezer)) return freezer
[ "def", "resolve_freezer", "(", "freezer", ")", ":", "# Set default freezer if there was none", "if", "not", "freezer", ":", "return", "_Default", "(", ")", "# Allow character based lookups as well", "if", "isinstance", "(", "freezer", ",", "six", ".", "string_types", ")", ":", "cls", "=", "_freezer_lookup", "(", "freezer", ")", "return", "cls", "(", ")", "# Allow plain class definition lookups (we instantiate the class)", "if", "freezer", ".", "__class__", "==", "type", ".", "__class__", ":", "return", "freezer", "(", ")", "# Warn when a custom freezer implementation is used.", "if", "freezer", "not", "in", "FREEZER", ".", "ALL", ":", "warn", "(", "u\"Using custom freezer implelmentation: {0}\"", ".", "format", "(", "freezer", ")", ")", "return", "freezer" ]
Locate the appropriate freezer given FREEZER or string input from the programmer. :param freezer: FREEZER constant or string for the freezer that is requested. (None = FREEZER.DEFAULT) :return:
[ "Locate", "the", "appropriate", "freezer", "given", "FREEZER", "or", "string", "input", "from", "the", "programmer", "." ]
868d81e72b6c8e354af3697531c20f116cd1fc9a
https://github.com/dmckeone/frosty/blob/868d81e72b6c8e354af3697531c20f116cd1fc9a/frosty/freezers.py#L195-L219
248,428
dmckeone/frosty
frosty/freezers.py
_Default._split_packages
def _split_packages(cls, include_packages): """ Split an iterable of packages into packages that need to be passed through, and those that need to have their disk location resolved. Some modules don't have a '__file__' attribute. AFAIK these aren't packages, so they can just be passed through to the includes as-is :return: 2-tuple of a list of the pass-through includes and the package_root_paths """ passthrough_includes = set([ six.text_type(package.__name__) for package in include_packages if not hasattr(package, '__file__') ]) package_file_paths = dict([ (six.text_type(os.path.abspath(package.__file__)), six.text_type(package.__name__)) for package in include_packages if hasattr(package, '__file__') ]) return passthrough_includes, package_file_paths
python
def _split_packages(cls, include_packages): """ Split an iterable of packages into packages that need to be passed through, and those that need to have their disk location resolved. Some modules don't have a '__file__' attribute. AFAIK these aren't packages, so they can just be passed through to the includes as-is :return: 2-tuple of a list of the pass-through includes and the package_root_paths """ passthrough_includes = set([ six.text_type(package.__name__) for package in include_packages if not hasattr(package, '__file__') ]) package_file_paths = dict([ (six.text_type(os.path.abspath(package.__file__)), six.text_type(package.__name__)) for package in include_packages if hasattr(package, '__file__') ]) return passthrough_includes, package_file_paths
[ "def", "_split_packages", "(", "cls", ",", "include_packages", ")", ":", "passthrough_includes", "=", "set", "(", "[", "six", ".", "text_type", "(", "package", ".", "__name__", ")", "for", "package", "in", "include_packages", "if", "not", "hasattr", "(", "package", ",", "'__file__'", ")", "]", ")", "package_file_paths", "=", "dict", "(", "[", "(", "six", ".", "text_type", "(", "os", ".", "path", ".", "abspath", "(", "package", ".", "__file__", ")", ")", ",", "six", ".", "text_type", "(", "package", ".", "__name__", ")", ")", "for", "package", "in", "include_packages", "if", "hasattr", "(", "package", ",", "'__file__'", ")", "]", ")", "return", "passthrough_includes", ",", "package_file_paths" ]
Split an iterable of packages into packages that need to be passed through, and those that need to have their disk location resolved. Some modules don't have a '__file__' attribute. AFAIK these aren't packages, so they can just be passed through to the includes as-is :return: 2-tuple of a list of the pass-through includes and the package_root_paths
[ "Split", "an", "iterable", "of", "packages", "into", "packages", "that", "need", "to", "be", "passed", "through", "and", "those", "that", "need", "to", "have", "their", "disk", "location", "resolved", "." ]
868d81e72b6c8e354af3697531c20f116cd1fc9a
https://github.com/dmckeone/frosty/blob/868d81e72b6c8e354af3697531c20f116cd1fc9a/frosty/freezers.py#L22-L41
248,429
KnowledgeLinks/rdfframework
rdfframework/datasets/jsonquery.py
parse_json_qry
def parse_json_qry(qry_str): """ Parses a json query string into its parts args: qry_str: query string params: variables passed into the string """ def param_analyzer(param_list): rtn_list = [] for param in param_list: parts = param.strip().split("=") try: rtn_list.append(\ JsonQryProcessor[parts[0].strip().lower()](parts[1])) except IndexError: rtn_list.append(\ JsonQryProcessor[parts[0].strip().lower()]()) return rtn_list def part_analyzer(part, idx): nonlocal dallor, asterick, question_mark if part == "$": dallor = idx return part elif part == "*": asterick = idx return part elif part == "?": question_mark = idx return part elif part.startswith("="): return part return cssparse(part)[0] # pdb.set_trace() main_parts = qry_str.split("|") or_parts = main_parts.pop(0).strip() params = param_analyzer(main_parts) rtn_list = [] for or_part in [item.strip() for item in or_parts.split(",") if item.strip()]: dallor, asterick, question_mark = None, None, None dot_parts = or_part.split(".") rtn_list.append(([part_analyzer(part, i) \ for i, part in enumerate(dot_parts)], dallor, asterick, question_mark)) return {"qry_parts": rtn_list, "params": params}
python
def parse_json_qry(qry_str): """ Parses a json query string into its parts args: qry_str: query string params: variables passed into the string """ def param_analyzer(param_list): rtn_list = [] for param in param_list: parts = param.strip().split("=") try: rtn_list.append(\ JsonQryProcessor[parts[0].strip().lower()](parts[1])) except IndexError: rtn_list.append(\ JsonQryProcessor[parts[0].strip().lower()]()) return rtn_list def part_analyzer(part, idx): nonlocal dallor, asterick, question_mark if part == "$": dallor = idx return part elif part == "*": asterick = idx return part elif part == "?": question_mark = idx return part elif part.startswith("="): return part return cssparse(part)[0] # pdb.set_trace() main_parts = qry_str.split("|") or_parts = main_parts.pop(0).strip() params = param_analyzer(main_parts) rtn_list = [] for or_part in [item.strip() for item in or_parts.split(",") if item.strip()]: dallor, asterick, question_mark = None, None, None dot_parts = or_part.split(".") rtn_list.append(([part_analyzer(part, i) \ for i, part in enumerate(dot_parts)], dallor, asterick, question_mark)) return {"qry_parts": rtn_list, "params": params}
[ "def", "parse_json_qry", "(", "qry_str", ")", ":", "def", "param_analyzer", "(", "param_list", ")", ":", "rtn_list", "=", "[", "]", "for", "param", "in", "param_list", ":", "parts", "=", "param", ".", "strip", "(", ")", ".", "split", "(", "\"=\"", ")", "try", ":", "rtn_list", ".", "append", "(", "JsonQryProcessor", "[", "parts", "[", "0", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "]", "(", "parts", "[", "1", "]", ")", ")", "except", "IndexError", ":", "rtn_list", ".", "append", "(", "JsonQryProcessor", "[", "parts", "[", "0", "]", ".", "strip", "(", ")", ".", "lower", "(", ")", "]", "(", ")", ")", "return", "rtn_list", "def", "part_analyzer", "(", "part", ",", "idx", ")", ":", "nonlocal", "dallor", ",", "asterick", ",", "question_mark", "if", "part", "==", "\"$\"", ":", "dallor", "=", "idx", "return", "part", "elif", "part", "==", "\"*\"", ":", "asterick", "=", "idx", "return", "part", "elif", "part", "==", "\"?\"", ":", "question_mark", "=", "idx", "return", "part", "elif", "part", ".", "startswith", "(", "\"=\"", ")", ":", "return", "part", "return", "cssparse", "(", "part", ")", "[", "0", "]", "# pdb.set_trace()", "main_parts", "=", "qry_str", ".", "split", "(", "\"|\"", ")", "or_parts", "=", "main_parts", ".", "pop", "(", "0", ")", ".", "strip", "(", ")", "params", "=", "param_analyzer", "(", "main_parts", ")", "rtn_list", "=", "[", "]", "for", "or_part", "in", "[", "item", ".", "strip", "(", ")", "for", "item", "in", "or_parts", ".", "split", "(", "\",\"", ")", "if", "item", ".", "strip", "(", ")", "]", ":", "dallor", ",", "asterick", ",", "question_mark", "=", "None", ",", "None", ",", "None", "dot_parts", "=", "or_part", ".", "split", "(", "\".\"", ")", "rtn_list", ".", "append", "(", "(", "[", "part_analyzer", "(", "part", ",", "i", ")", "for", "i", ",", "part", "in", "enumerate", "(", "dot_parts", ")", "]", ",", "dallor", ",", "asterick", ",", "question_mark", ")", ")", "return", "{", "\"qry_parts\"", ":", "rtn_list", ",", "\"params\"", ":", "params", "}" ]
Parses a json query string into its parts args: qry_str: query string params: variables passed into the string
[ "Parses", "a", "json", "query", "string", "into", "its", "parts" ]
9ec32dcc4bed51650a4b392cc5c15100fef7923a
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datasets/jsonquery.py#L14-L63
248,430
KnowledgeLinks/rdfframework
rdfframework/datasets/jsonquery.py
json_qry
def json_qry(dataset, qry_str, params={}): """ Takes a json query string and returns the results args: dataset: RdfDataset to query against qry_str: query string params: dictionary of params """ # if qry_str.startswith("$.bf_itemOf[rdf_type=bf_Print].='print',\n"): # pdb.set_trace() if not '$' in qry_str: qry_str = ".".join(['$', qry_str.strip()]) dallor_val = params.get("$", dataset) if isinstance(dallor_val, rdflib.URIRef): dallor_val = Uri(dallor_val) if qry_str.strip() == '$': return [dallor_val] parsed_qry = parse_json_qry(qry_str) qry_parts = parsed_qry['qry_parts'] post_actions = parsed_qry['params'] # print(qry_parts) rtn_list = UniqueList() if params.get('dataset'): dataset = params['dataset'] for or_part in qry_parts: if or_part[1] == 0: if isinstance(dallor_val, dict): result = dallor_val else: try: result = dataset[dallor_val] except KeyError: try: result = dataset[Uri(dallor_val)] except KeyError: try: result = dataset[BlankNode(dallor_val)] except KeyError: continue forward = True for part in or_part[0][1:]: if part == "*": forward = not forward else: if forward: result = get_json_qry_item(result, part) else: result = get_reverse_json_qry_item(result, part, False) else: result = dataset parts = or_part[0].copy() parts.reverse() forward = False for part in parts[1:]: if part == "*": forward = not forward else: if forward: result = get_json_qry_item(result, part) else: result = get_reverse_json_qry_item(result, part, False, dallor_val) rtn_list += result for action in post_actions: rtn_list = action(rtn_list) return rtn_list
python
def json_qry(dataset, qry_str, params={}): """ Takes a json query string and returns the results args: dataset: RdfDataset to query against qry_str: query string params: dictionary of params """ # if qry_str.startswith("$.bf_itemOf[rdf_type=bf_Print].='print',\n"): # pdb.set_trace() if not '$' in qry_str: qry_str = ".".join(['$', qry_str.strip()]) dallor_val = params.get("$", dataset) if isinstance(dallor_val, rdflib.URIRef): dallor_val = Uri(dallor_val) if qry_str.strip() == '$': return [dallor_val] parsed_qry = parse_json_qry(qry_str) qry_parts = parsed_qry['qry_parts'] post_actions = parsed_qry['params'] # print(qry_parts) rtn_list = UniqueList() if params.get('dataset'): dataset = params['dataset'] for or_part in qry_parts: if or_part[1] == 0: if isinstance(dallor_val, dict): result = dallor_val else: try: result = dataset[dallor_val] except KeyError: try: result = dataset[Uri(dallor_val)] except KeyError: try: result = dataset[BlankNode(dallor_val)] except KeyError: continue forward = True for part in or_part[0][1:]: if part == "*": forward = not forward else: if forward: result = get_json_qry_item(result, part) else: result = get_reverse_json_qry_item(result, part, False) else: result = dataset parts = or_part[0].copy() parts.reverse() forward = False for part in parts[1:]: if part == "*": forward = not forward else: if forward: result = get_json_qry_item(result, part) else: result = get_reverse_json_qry_item(result, part, False, dallor_val) rtn_list += result for action in post_actions: rtn_list = action(rtn_list) return rtn_list
[ "def", "json_qry", "(", "dataset", ",", "qry_str", ",", "params", "=", "{", "}", ")", ":", "# if qry_str.startswith(\"$.bf_itemOf[rdf_type=bf_Print].='print',\\n\"):", "# pdb.set_trace()", "if", "not", "'$'", "in", "qry_str", ":", "qry_str", "=", "\".\"", ".", "join", "(", "[", "'$'", ",", "qry_str", ".", "strip", "(", ")", "]", ")", "dallor_val", "=", "params", ".", "get", "(", "\"$\"", ",", "dataset", ")", "if", "isinstance", "(", "dallor_val", ",", "rdflib", ".", "URIRef", ")", ":", "dallor_val", "=", "Uri", "(", "dallor_val", ")", "if", "qry_str", ".", "strip", "(", ")", "==", "'$'", ":", "return", "[", "dallor_val", "]", "parsed_qry", "=", "parse_json_qry", "(", "qry_str", ")", "qry_parts", "=", "parsed_qry", "[", "'qry_parts'", "]", "post_actions", "=", "parsed_qry", "[", "'params'", "]", "# print(qry_parts)", "rtn_list", "=", "UniqueList", "(", ")", "if", "params", ".", "get", "(", "'dataset'", ")", ":", "dataset", "=", "params", "[", "'dataset'", "]", "for", "or_part", "in", "qry_parts", ":", "if", "or_part", "[", "1", "]", "==", "0", ":", "if", "isinstance", "(", "dallor_val", ",", "dict", ")", ":", "result", "=", "dallor_val", "else", ":", "try", ":", "result", "=", "dataset", "[", "dallor_val", "]", "except", "KeyError", ":", "try", ":", "result", "=", "dataset", "[", "Uri", "(", "dallor_val", ")", "]", "except", "KeyError", ":", "try", ":", "result", "=", "dataset", "[", "BlankNode", "(", "dallor_val", ")", "]", "except", "KeyError", ":", "continue", "forward", "=", "True", "for", "part", "in", "or_part", "[", "0", "]", "[", "1", ":", "]", ":", "if", "part", "==", "\"*\"", ":", "forward", "=", "not", "forward", "else", ":", "if", "forward", ":", "result", "=", "get_json_qry_item", "(", "result", ",", "part", ")", "else", ":", "result", "=", "get_reverse_json_qry_item", "(", "result", ",", "part", ",", "False", ")", "else", ":", "result", "=", "dataset", "parts", "=", "or_part", "[", "0", "]", ".", "copy", "(", ")", "parts", ".", "reverse", "(", ")", "forward", "=", "False", "for", "part", "in", "parts", "[", "1", ":", "]", ":", "if", "part", "==", "\"*\"", ":", "forward", "=", "not", "forward", "else", ":", "if", "forward", ":", "result", "=", "get_json_qry_item", "(", "result", ",", "part", ")", "else", ":", "result", "=", "get_reverse_json_qry_item", "(", "result", ",", "part", ",", "False", ",", "dallor_val", ")", "rtn_list", "+=", "result", "for", "action", "in", "post_actions", ":", "rtn_list", "=", "action", "(", "rtn_list", ")", "return", "rtn_list" ]
Takes a json query string and returns the results args: dataset: RdfDataset to query against qry_str: query string params: dictionary of params
[ "Takes", "a", "json", "query", "string", "and", "returns", "the", "results" ]
9ec32dcc4bed51650a4b392cc5c15100fef7923a
https://github.com/KnowledgeLinks/rdfframework/blob/9ec32dcc4bed51650a4b392cc5c15100fef7923a/rdfframework/datasets/jsonquery.py#L393-L463
248,431
jmgilman/Neolib
neolib/pyamf/amf0.py
Decoder.readMixedArray
def readMixedArray(self): """ Read mixed array. @rtype: L{pyamf.MixedArray} """ # TODO: something with the length/strict self.stream.read_ulong() # length obj = pyamf.MixedArray() self.context.addObject(obj) attrs = self.readObjectAttributes(obj) for key in attrs.keys(): try: key = int(key) except ValueError: pass obj[key] = attrs[key] return obj
python
def readMixedArray(self): """ Read mixed array. @rtype: L{pyamf.MixedArray} """ # TODO: something with the length/strict self.stream.read_ulong() # length obj = pyamf.MixedArray() self.context.addObject(obj) attrs = self.readObjectAttributes(obj) for key in attrs.keys(): try: key = int(key) except ValueError: pass obj[key] = attrs[key] return obj
[ "def", "readMixedArray", "(", "self", ")", ":", "# TODO: something with the length/strict", "self", ".", "stream", ".", "read_ulong", "(", ")", "# length", "obj", "=", "pyamf", ".", "MixedArray", "(", ")", "self", ".", "context", ".", "addObject", "(", "obj", ")", "attrs", "=", "self", ".", "readObjectAttributes", "(", "obj", ")", "for", "key", "in", "attrs", ".", "keys", "(", ")", ":", "try", ":", "key", "=", "int", "(", "key", ")", "except", "ValueError", ":", "pass", "obj", "[", "key", "]", "=", "attrs", "[", "key", "]", "return", "obj" ]
Read mixed array. @rtype: L{pyamf.MixedArray}
[ "Read", "mixed", "array", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L223-L245
248,432
jmgilman/Neolib
neolib/pyamf/amf0.py
Decoder.readTypedObject
def readTypedObject(self): """ Reads an aliased ActionScript object from the stream and attempts to 'cast' it into a python class. @see: L{pyamf.register_class} """ class_alias = self.readString() try: alias = self.context.getClassAlias(class_alias) except pyamf.UnknownClassAlias: if self.strict: raise alias = pyamf.TypedObjectClassAlias(class_alias) obj = alias.createInstance(codec=self) self.context.addObject(obj) attrs = self.readObjectAttributes(obj) alias.applyAttributes(obj, attrs, codec=self) return obj
python
def readTypedObject(self): """ Reads an aliased ActionScript object from the stream and attempts to 'cast' it into a python class. @see: L{pyamf.register_class} """ class_alias = self.readString() try: alias = self.context.getClassAlias(class_alias) except pyamf.UnknownClassAlias: if self.strict: raise alias = pyamf.TypedObjectClassAlias(class_alias) obj = alias.createInstance(codec=self) self.context.addObject(obj) attrs = self.readObjectAttributes(obj) alias.applyAttributes(obj, attrs, codec=self) return obj
[ "def", "readTypedObject", "(", "self", ")", ":", "class_alias", "=", "self", ".", "readString", "(", ")", "try", ":", "alias", "=", "self", ".", "context", ".", "getClassAlias", "(", "class_alias", ")", "except", "pyamf", ".", "UnknownClassAlias", ":", "if", "self", ".", "strict", ":", "raise", "alias", "=", "pyamf", ".", "TypedObjectClassAlias", "(", "class_alias", ")", "obj", "=", "alias", ".", "createInstance", "(", "codec", "=", "self", ")", "self", ".", "context", ".", "addObject", "(", "obj", ")", "attrs", "=", "self", ".", "readObjectAttributes", "(", "obj", ")", "alias", ".", "applyAttributes", "(", "obj", ",", "attrs", ",", "codec", "=", "self", ")", "return", "obj" ]
Reads an aliased ActionScript object from the stream and attempts to 'cast' it into a python class. @see: L{pyamf.register_class}
[ "Reads", "an", "aliased", "ActionScript", "object", "from", "the", "stream", "and", "attempts", "to", "cast", "it", "into", "a", "python", "class", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L260-L283
248,433
jmgilman/Neolib
neolib/pyamf/amf0.py
Decoder.readObject
def readObject(self): """ Reads an anonymous object from the data stream. @rtype: L{ASObject<pyamf.ASObject>} """ obj = pyamf.ASObject() self.context.addObject(obj) obj.update(self.readObjectAttributes(obj)) return obj
python
def readObject(self): """ Reads an anonymous object from the data stream. @rtype: L{ASObject<pyamf.ASObject>} """ obj = pyamf.ASObject() self.context.addObject(obj) obj.update(self.readObjectAttributes(obj)) return obj
[ "def", "readObject", "(", "self", ")", ":", "obj", "=", "pyamf", ".", "ASObject", "(", ")", "self", ".", "context", ".", "addObject", "(", "obj", ")", "obj", ".", "update", "(", "self", ".", "readObjectAttributes", "(", "obj", ")", ")", "return", "obj" ]
Reads an anonymous object from the data stream. @rtype: L{ASObject<pyamf.ASObject>}
[ "Reads", "an", "anonymous", "object", "from", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L307-L318
248,434
jmgilman/Neolib
neolib/pyamf/amf0.py
Decoder.readReference
def readReference(self): """ Reads a reference from the data stream. @raise pyamf.ReferenceError: Unknown reference. """ idx = self.stream.read_ushort() o = self.context.getObject(idx) if o is None: raise pyamf.ReferenceError('Unknown reference %d' % (idx,)) return o
python
def readReference(self): """ Reads a reference from the data stream. @raise pyamf.ReferenceError: Unknown reference. """ idx = self.stream.read_ushort() o = self.context.getObject(idx) if o is None: raise pyamf.ReferenceError('Unknown reference %d' % (idx,)) return o
[ "def", "readReference", "(", "self", ")", ":", "idx", "=", "self", ".", "stream", ".", "read_ushort", "(", ")", "o", "=", "self", ".", "context", ".", "getObject", "(", "idx", ")", "if", "o", "is", "None", ":", "raise", "pyamf", ".", "ReferenceError", "(", "'Unknown reference %d'", "%", "(", "idx", ",", ")", ")", "return", "o" ]
Reads a reference from the data stream. @raise pyamf.ReferenceError: Unknown reference.
[ "Reads", "a", "reference", "from", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L320-L332
248,435
jmgilman/Neolib
neolib/pyamf/amf0.py
Decoder.readDate
def readDate(self): """ Reads a UTC date from the data stream. Client and servers are responsible for applying their own timezones. Date: C{0x0B T7 T6} .. C{T0 Z1 Z2 T7} to C{T0} form a 64 bit Big Endian number that specifies the number of nanoseconds that have passed since 1/1/1970 0:00 to the specified time. This format is UTC 1970. C{Z1} and C{Z0} for a 16 bit Big Endian number indicating the indicated time's timezone in minutes. """ ms = self.stream.read_double() / 1000.0 self.stream.read_short() # tz # Timezones are ignored d = util.get_datetime(ms) if self.timezone_offset: d = d + self.timezone_offset self.context.addObject(d) return d
python
def readDate(self): """ Reads a UTC date from the data stream. Client and servers are responsible for applying their own timezones. Date: C{0x0B T7 T6} .. C{T0 Z1 Z2 T7} to C{T0} form a 64 bit Big Endian number that specifies the number of nanoseconds that have passed since 1/1/1970 0:00 to the specified time. This format is UTC 1970. C{Z1} and C{Z0} for a 16 bit Big Endian number indicating the indicated time's timezone in minutes. """ ms = self.stream.read_double() / 1000.0 self.stream.read_short() # tz # Timezones are ignored d = util.get_datetime(ms) if self.timezone_offset: d = d + self.timezone_offset self.context.addObject(d) return d
[ "def", "readDate", "(", "self", ")", ":", "ms", "=", "self", ".", "stream", ".", "read_double", "(", ")", "/", "1000.0", "self", ".", "stream", ".", "read_short", "(", ")", "# tz", "# Timezones are ignored", "d", "=", "util", ".", "get_datetime", "(", "ms", ")", "if", "self", ".", "timezone_offset", ":", "d", "=", "d", "+", "self", ".", "timezone_offset", "self", ".", "context", ".", "addObject", "(", "d", ")", "return", "d" ]
Reads a UTC date from the data stream. Client and servers are responsible for applying their own timezones. Date: C{0x0B T7 T6} .. C{T0 Z1 Z2 T7} to C{T0} form a 64 bit Big Endian number that specifies the number of nanoseconds that have passed since 1/1/1970 0:00 to the specified time. This format is UTC 1970. C{Z1} and C{Z0} for a 16 bit Big Endian number indicating the indicated time's timezone in minutes.
[ "Reads", "a", "UTC", "date", "from", "the", "data", "stream", ".", "Client", "and", "servers", "are", "responsible", "for", "applying", "their", "own", "timezones", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L334-L357
248,436
jmgilman/Neolib
neolib/pyamf/amf0.py
Decoder.readLongString
def readLongString(self): """ Read UTF8 string. """ l = self.stream.read_ulong() bytes = self.stream.read(l) return self.context.getStringForBytes(bytes)
python
def readLongString(self): """ Read UTF8 string. """ l = self.stream.read_ulong() bytes = self.stream.read(l) return self.context.getStringForBytes(bytes)
[ "def", "readLongString", "(", "self", ")", ":", "l", "=", "self", ".", "stream", ".", "read_ulong", "(", ")", "bytes", "=", "self", ".", "stream", ".", "read", "(", "l", ")", "return", "self", ".", "context", ".", "getStringForBytes", "(", "bytes", ")" ]
Read UTF8 string.
[ "Read", "UTF8", "string", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L359-L367
248,437
jmgilman/Neolib
neolib/pyamf/amf0.py
Decoder.readXML
def readXML(self): """ Read XML. """ data = self.readLongString() root = xml.fromstring(data) self.context.addObject(root) return root
python
def readXML(self): """ Read XML. """ data = self.readLongString() root = xml.fromstring(data) self.context.addObject(root) return root
[ "def", "readXML", "(", "self", ")", ":", "data", "=", "self", ".", "readLongString", "(", ")", "root", "=", "xml", ".", "fromstring", "(", "data", ")", "self", ".", "context", ".", "addObject", "(", "root", ")", "return", "root" ]
Read XML.
[ "Read", "XML", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L369-L378
248,438
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeList
def writeList(self, a): """ Write array to the stream. @param a: The array data to be encoded to the AMF0 data stream. """ if self.writeReference(a) != -1: return self.context.addObject(a) self.writeType(TYPE_ARRAY) self.stream.write_ulong(len(a)) for data in a: self.writeElement(data)
python
def writeList(self, a): """ Write array to the stream. @param a: The array data to be encoded to the AMF0 data stream. """ if self.writeReference(a) != -1: return self.context.addObject(a) self.writeType(TYPE_ARRAY) self.stream.write_ulong(len(a)) for data in a: self.writeElement(data)
[ "def", "writeList", "(", "self", ",", "a", ")", ":", "if", "self", ".", "writeReference", "(", "a", ")", "!=", "-", "1", ":", "return", "self", ".", "context", ".", "addObject", "(", "a", ")", "self", ".", "writeType", "(", "TYPE_ARRAY", ")", "self", ".", "stream", ".", "write_ulong", "(", "len", "(", "a", ")", ")", "for", "data", "in", "a", ":", "self", ".", "writeElement", "(", "data", ")" ]
Write array to the stream. @param a: The array data to be encoded to the AMF0 data stream.
[ "Write", "array", "to", "the", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L432-L447
248,439
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeNumber
def writeNumber(self, n): """ Write number to the data stream . @param n: The number data to be encoded to the AMF0 data stream. """ self.writeType(TYPE_NUMBER) self.stream.write_double(float(n))
python
def writeNumber(self, n): """ Write number to the data stream . @param n: The number data to be encoded to the AMF0 data stream. """ self.writeType(TYPE_NUMBER) self.stream.write_double(float(n))
[ "def", "writeNumber", "(", "self", ",", "n", ")", ":", "self", ".", "writeType", "(", "TYPE_NUMBER", ")", "self", ".", "stream", ".", "write_double", "(", "float", "(", "n", ")", ")" ]
Write number to the data stream . @param n: The number data to be encoded to the AMF0 data stream.
[ "Write", "number", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L449-L456
248,440
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeBoolean
def writeBoolean(self, b): """ Write boolean to the data stream. @param b: The boolean data to be encoded to the AMF0 data stream. """ self.writeType(TYPE_BOOL) if b: self.stream.write_uchar(1) else: self.stream.write_uchar(0)
python
def writeBoolean(self, b): """ Write boolean to the data stream. @param b: The boolean data to be encoded to the AMF0 data stream. """ self.writeType(TYPE_BOOL) if b: self.stream.write_uchar(1) else: self.stream.write_uchar(0)
[ "def", "writeBoolean", "(", "self", ",", "b", ")", ":", "self", ".", "writeType", "(", "TYPE_BOOL", ")", "if", "b", ":", "self", ".", "stream", ".", "write_uchar", "(", "1", ")", "else", ":", "self", ".", "stream", ".", "write_uchar", "(", "0", ")" ]
Write boolean to the data stream. @param b: The boolean data to be encoded to the AMF0 data stream.
[ "Write", "boolean", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L458-L469
248,441
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeBytes
def writeBytes(self, s): """ Write a string of bytes to the data stream. """ l = len(s) if l > 0xffff: self.writeType(TYPE_LONGSTRING) else: self.writeType(TYPE_STRING) if l > 0xffff: self.stream.write_ulong(l) else: self.stream.write_ushort(l) self.stream.write(s)
python
def writeBytes(self, s): """ Write a string of bytes to the data stream. """ l = len(s) if l > 0xffff: self.writeType(TYPE_LONGSTRING) else: self.writeType(TYPE_STRING) if l > 0xffff: self.stream.write_ulong(l) else: self.stream.write_ushort(l) self.stream.write(s)
[ "def", "writeBytes", "(", "self", ",", "s", ")", ":", "l", "=", "len", "(", "s", ")", "if", "l", ">", "0xffff", ":", "self", ".", "writeType", "(", "TYPE_LONGSTRING", ")", "else", ":", "self", ".", "writeType", "(", "TYPE_STRING", ")", "if", "l", ">", "0xffff", ":", "self", ".", "stream", ".", "write_ulong", "(", "l", ")", "else", ":", "self", ".", "stream", ".", "write_ushort", "(", "l", ")", "self", ".", "stream", ".", "write", "(", "s", ")" ]
Write a string of bytes to the data stream.
[ "Write", "a", "string", "of", "bytes", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L487-L503
248,442
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeString
def writeString(self, u): """ Write a unicode to the data stream. """ s = self.context.getBytesForString(u) self.writeBytes(s)
python
def writeString(self, u): """ Write a unicode to the data stream. """ s = self.context.getBytesForString(u) self.writeBytes(s)
[ "def", "writeString", "(", "self", ",", "u", ")", ":", "s", "=", "self", ".", "context", ".", "getBytesForString", "(", "u", ")", "self", ".", "writeBytes", "(", "s", ")" ]
Write a unicode to the data stream.
[ "Write", "a", "unicode", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L505-L511
248,443
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeReference
def writeReference(self, o): """ Write reference to the data stream. @param o: The reference data to be encoded to the AMF0 datastream. """ idx = self.context.getObjectReference(o) if idx == -1 or idx > 65535: return -1 self.writeType(TYPE_REFERENCE) self.stream.write_ushort(idx) return idx
python
def writeReference(self, o): """ Write reference to the data stream. @param o: The reference data to be encoded to the AMF0 datastream. """ idx = self.context.getObjectReference(o) if idx == -1 or idx > 65535: return -1 self.writeType(TYPE_REFERENCE) self.stream.write_ushort(idx) return idx
[ "def", "writeReference", "(", "self", ",", "o", ")", ":", "idx", "=", "self", ".", "context", ".", "getObjectReference", "(", "o", ")", "if", "idx", "==", "-", "1", "or", "idx", ">", "65535", ":", "return", "-", "1", "self", ".", "writeType", "(", "TYPE_REFERENCE", ")", "self", ".", "stream", ".", "write_ushort", "(", "idx", ")", "return", "idx" ]
Write reference to the data stream. @param o: The reference data to be encoded to the AMF0 datastream.
[ "Write", "reference", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L513-L527
248,444
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeMixedArray
def writeMixedArray(self, o): """ Write mixed array to the data stream. @type o: L{pyamf.MixedArray} """ if self.writeReference(o) != -1: return self.context.addObject(o) self.writeType(TYPE_MIXEDARRAY) # TODO: optimise this # work out the highest integer index try: # list comprehensions to save the day max_index = max([y[0] for y in o.items() if isinstance(y[0], (int, long))]) if max_index < 0: max_index = 0 except ValueError: max_index = 0 self.stream.write_ulong(max_index) self._writeDict(o) self._writeEndObject()
python
def writeMixedArray(self, o): """ Write mixed array to the data stream. @type o: L{pyamf.MixedArray} """ if self.writeReference(o) != -1: return self.context.addObject(o) self.writeType(TYPE_MIXEDARRAY) # TODO: optimise this # work out the highest integer index try: # list comprehensions to save the day max_index = max([y[0] for y in o.items() if isinstance(y[0], (int, long))]) if max_index < 0: max_index = 0 except ValueError: max_index = 0 self.stream.write_ulong(max_index) self._writeDict(o) self._writeEndObject()
[ "def", "writeMixedArray", "(", "self", ",", "o", ")", ":", "if", "self", ".", "writeReference", "(", "o", ")", "!=", "-", "1", ":", "return", "self", ".", "context", ".", "addObject", "(", "o", ")", "self", ".", "writeType", "(", "TYPE_MIXEDARRAY", ")", "# TODO: optimise this", "# work out the highest integer index", "try", ":", "# list comprehensions to save the day", "max_index", "=", "max", "(", "[", "y", "[", "0", "]", "for", "y", "in", "o", ".", "items", "(", ")", "if", "isinstance", "(", "y", "[", "0", "]", ",", "(", "int", ",", "long", ")", ")", "]", ")", "if", "max_index", "<", "0", ":", "max_index", "=", "0", "except", "ValueError", ":", "max_index", "=", "0", "self", ".", "stream", ".", "write_ulong", "(", "max_index", ")", "self", ".", "_writeDict", "(", "o", ")", "self", ".", "_writeEndObject", "(", ")" ]
Write mixed array to the data stream. @type o: L{pyamf.MixedArray}
[ "Write", "mixed", "array", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L542-L569
248,445
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeObject
def writeObject(self, o): """ Write a Python object to the stream. @param o: The object data to be encoded to the AMF0 data stream. """ if self.writeReference(o) != -1: return self.context.addObject(o) alias = self.context.getClassAlias(o.__class__) alias.compile() if alias.amf3: self.writeAMF3(o) return if alias.anonymous: self.writeType(TYPE_OBJECT) else: self.writeType(TYPE_TYPEDOBJECT) self.serialiseString(alias.alias) attrs = alias.getEncodableAttributes(o, codec=self) if alias.static_attrs and attrs: for key in alias.static_attrs: value = attrs.pop(key) self.serialiseString(key) self.writeElement(value) if attrs: self._writeDict(attrs) self._writeEndObject()
python
def writeObject(self, o): """ Write a Python object to the stream. @param o: The object data to be encoded to the AMF0 data stream. """ if self.writeReference(o) != -1: return self.context.addObject(o) alias = self.context.getClassAlias(o.__class__) alias.compile() if alias.amf3: self.writeAMF3(o) return if alias.anonymous: self.writeType(TYPE_OBJECT) else: self.writeType(TYPE_TYPEDOBJECT) self.serialiseString(alias.alias) attrs = alias.getEncodableAttributes(o, codec=self) if alias.static_attrs and attrs: for key in alias.static_attrs: value = attrs.pop(key) self.serialiseString(key) self.writeElement(value) if attrs: self._writeDict(attrs) self._writeEndObject()
[ "def", "writeObject", "(", "self", ",", "o", ")", ":", "if", "self", ".", "writeReference", "(", "o", ")", "!=", "-", "1", ":", "return", "self", ".", "context", ".", "addObject", "(", "o", ")", "alias", "=", "self", ".", "context", ".", "getClassAlias", "(", "o", ".", "__class__", ")", "alias", ".", "compile", "(", ")", "if", "alias", ".", "amf3", ":", "self", ".", "writeAMF3", "(", "o", ")", "return", "if", "alias", ".", "anonymous", ":", "self", ".", "writeType", "(", "TYPE_OBJECT", ")", "else", ":", "self", ".", "writeType", "(", "TYPE_TYPEDOBJECT", ")", "self", ".", "serialiseString", "(", "alias", ".", "alias", ")", "attrs", "=", "alias", ".", "getEncodableAttributes", "(", "o", ",", "codec", "=", "self", ")", "if", "alias", ".", "static_attrs", "and", "attrs", ":", "for", "key", "in", "alias", ".", "static_attrs", ":", "value", "=", "attrs", ".", "pop", "(", "key", ")", "self", ".", "serialiseString", "(", "key", ")", "self", ".", "writeElement", "(", "value", ")", "if", "attrs", ":", "self", ".", "_writeDict", "(", "attrs", ")", "self", ".", "_writeEndObject", "(", ")" ]
Write a Python object to the stream. @param o: The object data to be encoded to the AMF0 data stream.
[ "Write", "a", "Python", "object", "to", "the", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L574-L611
248,446
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeDate
def writeDate(self, d): """ Writes a date to the data stream. @type d: Instance of C{datetime.datetime} @param d: The date to be encoded to the AMF0 data stream. """ if isinstance(d, datetime.time): raise pyamf.EncodeError('A datetime.time instance was found but ' 'AMF0 has no way to encode time objects. Please use ' 'datetime.datetime instead (got:%r)' % (d,)) # According to the Red5 implementation of AMF0, dates references are # created, but not used. if self.timezone_offset is not None: d -= self.timezone_offset secs = util.get_timestamp(d) tz = 0 self.writeType(TYPE_DATE) self.stream.write_double(secs * 1000.0) self.stream.write_short(tz)
python
def writeDate(self, d): """ Writes a date to the data stream. @type d: Instance of C{datetime.datetime} @param d: The date to be encoded to the AMF0 data stream. """ if isinstance(d, datetime.time): raise pyamf.EncodeError('A datetime.time instance was found but ' 'AMF0 has no way to encode time objects. Please use ' 'datetime.datetime instead (got:%r)' % (d,)) # According to the Red5 implementation of AMF0, dates references are # created, but not used. if self.timezone_offset is not None: d -= self.timezone_offset secs = util.get_timestamp(d) tz = 0 self.writeType(TYPE_DATE) self.stream.write_double(secs * 1000.0) self.stream.write_short(tz)
[ "def", "writeDate", "(", "self", ",", "d", ")", ":", "if", "isinstance", "(", "d", ",", "datetime", ".", "time", ")", ":", "raise", "pyamf", ".", "EncodeError", "(", "'A datetime.time instance was found but '", "'AMF0 has no way to encode time objects. Please use '", "'datetime.datetime instead (got:%r)'", "%", "(", "d", ",", ")", ")", "# According to the Red5 implementation of AMF0, dates references are", "# created, but not used.", "if", "self", ".", "timezone_offset", "is", "not", "None", ":", "d", "-=", "self", ".", "timezone_offset", "secs", "=", "util", ".", "get_timestamp", "(", "d", ")", "tz", "=", "0", "self", ".", "writeType", "(", "TYPE_DATE", ")", "self", ".", "stream", ".", "write_double", "(", "secs", "*", "1000.0", ")", "self", ".", "stream", ".", "write_short", "(", "tz", ")" ]
Writes a date to the data stream. @type d: Instance of C{datetime.datetime} @param d: The date to be encoded to the AMF0 data stream.
[ "Writes", "a", "date", "to", "the", "data", "stream", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L613-L635
248,447
jmgilman/Neolib
neolib/pyamf/amf0.py
Encoder.writeXML
def writeXML(self, e): """ Writes an XML instance. """ self.writeType(TYPE_XML) data = xml.tostring(e) if isinstance(data, unicode): data = data.encode('utf-8') self.stream.write_ulong(len(data)) self.stream.write(data)
python
def writeXML(self, e): """ Writes an XML instance. """ self.writeType(TYPE_XML) data = xml.tostring(e) if isinstance(data, unicode): data = data.encode('utf-8') self.stream.write_ulong(len(data)) self.stream.write(data)
[ "def", "writeXML", "(", "self", ",", "e", ")", ":", "self", ".", "writeType", "(", "TYPE_XML", ")", "data", "=", "xml", ".", "tostring", "(", "e", ")", "if", "isinstance", "(", "data", ",", "unicode", ")", ":", "data", "=", "data", ".", "encode", "(", "'utf-8'", ")", "self", ".", "stream", ".", "write_ulong", "(", "len", "(", "data", ")", ")", "self", ".", "stream", ".", "write", "(", "data", ")" ]
Writes an XML instance.
[ "Writes", "an", "XML", "instance", "." ]
228fafeaed0f3195676137732384a14820ae285c
https://github.com/jmgilman/Neolib/blob/228fafeaed0f3195676137732384a14820ae285c/neolib/pyamf/amf0.py#L637-L649
248,448
abe-winter/pg13-py
pg13/redismodel.py
RedisModel.des
def des(clas,keyblob,valblob): "deserialize. translate publish message, basically" raise NotImplementedError("don't use tuples, it breaks __eq__. this function probably isn't used in real life") raw_keyvals=msgpack.loads(keyblob) (namespace,version),keyvals=raw_keyvals[:2],raw_keyvals[2:] if namespace!=clas.NAMESPACE or version!=clas.VERSION: raise TypeError('des_mismatch got %s want %s'%((namespace,version),(clas.NAMESPACE,clas.VERSION))) vals=tuple(msgpack.loads(valblob)) clas.type_check(zip(*clas.KEY)[1],keyvals,'KEY') clas.type_check(zip(*clas.VALUE)[1],vals,'VALUE') return clas(tuple(keyvals),vals)
python
def des(clas,keyblob,valblob): "deserialize. translate publish message, basically" raise NotImplementedError("don't use tuples, it breaks __eq__. this function probably isn't used in real life") raw_keyvals=msgpack.loads(keyblob) (namespace,version),keyvals=raw_keyvals[:2],raw_keyvals[2:] if namespace!=clas.NAMESPACE or version!=clas.VERSION: raise TypeError('des_mismatch got %s want %s'%((namespace,version),(clas.NAMESPACE,clas.VERSION))) vals=tuple(msgpack.loads(valblob)) clas.type_check(zip(*clas.KEY)[1],keyvals,'KEY') clas.type_check(zip(*clas.VALUE)[1],vals,'VALUE') return clas(tuple(keyvals),vals)
[ "def", "des", "(", "clas", ",", "keyblob", ",", "valblob", ")", ":", "raise", "NotImplementedError", "(", "\"don't use tuples, it breaks __eq__. this function probably isn't used in real life\"", ")", "raw_keyvals", "=", "msgpack", ".", "loads", "(", "keyblob", ")", "(", "namespace", ",", "version", ")", ",", "keyvals", "=", "raw_keyvals", "[", ":", "2", "]", ",", "raw_keyvals", "[", "2", ":", "]", "if", "namespace", "!=", "clas", ".", "NAMESPACE", "or", "version", "!=", "clas", ".", "VERSION", ":", "raise", "TypeError", "(", "'des_mismatch got %s want %s'", "%", "(", "(", "namespace", ",", "version", ")", ",", "(", "clas", ".", "NAMESPACE", ",", "clas", ".", "VERSION", ")", ")", ")", "vals", "=", "tuple", "(", "msgpack", ".", "loads", "(", "valblob", ")", ")", "clas", ".", "type_check", "(", "zip", "(", "*", "clas", ".", "KEY", ")", "[", "1", "]", ",", "keyvals", ",", "'KEY'", ")", "clas", ".", "type_check", "(", "zip", "(", "*", "clas", ".", "VALUE", ")", "[", "1", "]", ",", "vals", ",", "'VALUE'", ")", "return", "clas", "(", "tuple", "(", "keyvals", ")", ",", "vals", ")" ]
deserialize. translate publish message, basically
[ "deserialize", ".", "translate", "publish", "message", "basically" ]
c78806f99f35541a8756987e86edca3438aa97f5
https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/redismodel.py#L41-L51
248,449
abe-winter/pg13-py
pg13/redismodel.py
RedisSimplePubsub.wait
def wait(self): "wait for a message, respecting timeout" data=self.getcon().recv(256) # this can raise socket.timeout if not data: raise PubsubDisco if self.reset: self.reset=False # i.e. ack it. reset is used to tell the wait-thread there was a reconnect (though it's plausible that this never happens) raise PubsubDisco self.buf+=data msg,self.buf=complete_message(self.buf) return msg
python
def wait(self): "wait for a message, respecting timeout" data=self.getcon().recv(256) # this can raise socket.timeout if not data: raise PubsubDisco if self.reset: self.reset=False # i.e. ack it. reset is used to tell the wait-thread there was a reconnect (though it's plausible that this never happens) raise PubsubDisco self.buf+=data msg,self.buf=complete_message(self.buf) return msg
[ "def", "wait", "(", "self", ")", ":", "data", "=", "self", ".", "getcon", "(", ")", ".", "recv", "(", "256", ")", "# this can raise socket.timeout\r", "if", "not", "data", ":", "raise", "PubsubDisco", "if", "self", ".", "reset", ":", "self", ".", "reset", "=", "False", "# i.e. ack it. reset is used to tell the wait-thread there was a reconnect (though it's plausible that this never happens)\r", "raise", "PubsubDisco", "self", ".", "buf", "+=", "data", "msg", ",", "self", ".", "buf", "=", "complete_message", "(", "self", ".", "buf", ")", "return", "msg" ]
wait for a message, respecting timeout
[ "wait", "for", "a", "message", "respecting", "timeout" ]
c78806f99f35541a8756987e86edca3438aa97f5
https://github.com/abe-winter/pg13-py/blob/c78806f99f35541a8756987e86edca3438aa97f5/pg13/redismodel.py#L117-L126
248,450
jut-io/jut-python-tools
jut/api/auth.py
TokenManager.get_access_token
def get_access_token(self): """ get a valid access token """ if self.is_access_token_expired(): if is_debug_enabled(): debug('requesting new access_token') token = get_access_token(username=self.username, password=self.password, client_id=self.client_id, client_secret=self.client_secret, app_url=self.app_url) # lets make sure to refresh before we're halfway to expiring self.expires_at = time.time() + token['expires_in']/2 self.access_token = token['access_token'] return self.access_token
python
def get_access_token(self): """ get a valid access token """ if self.is_access_token_expired(): if is_debug_enabled(): debug('requesting new access_token') token = get_access_token(username=self.username, password=self.password, client_id=self.client_id, client_secret=self.client_secret, app_url=self.app_url) # lets make sure to refresh before we're halfway to expiring self.expires_at = time.time() + token['expires_in']/2 self.access_token = token['access_token'] return self.access_token
[ "def", "get_access_token", "(", "self", ")", ":", "if", "self", ".", "is_access_token_expired", "(", ")", ":", "if", "is_debug_enabled", "(", ")", ":", "debug", "(", "'requesting new access_token'", ")", "token", "=", "get_access_token", "(", "username", "=", "self", ".", "username", ",", "password", "=", "self", ".", "password", ",", "client_id", "=", "self", ".", "client_id", ",", "client_secret", "=", "self", ".", "client_secret", ",", "app_url", "=", "self", ".", "app_url", ")", "# lets make sure to refresh before we're halfway to expiring", "self", ".", "expires_at", "=", "time", ".", "time", "(", ")", "+", "token", "[", "'expires_in'", "]", "/", "2", "self", ".", "access_token", "=", "token", "[", "'access_token'", "]", "return", "self", ".", "access_token" ]
get a valid access token
[ "get", "a", "valid", "access", "token" ]
65574d23f51a7bbced9bb25010d02da5ca5d906f
https://github.com/jut-io/jut-python-tools/blob/65574d23f51a7bbced9bb25010d02da5ca5d906f/jut/api/auth.py#L61-L81
248,451
rbarrois/confutils
confutils/configfile.py
ConfigLineList.find_lines
def find_lines(self, line): """Find all lines matching a given line.""" for other_line in self.lines: if other_line.match(line): yield other_line
python
def find_lines(self, line): """Find all lines matching a given line.""" for other_line in self.lines: if other_line.match(line): yield other_line
[ "def", "find_lines", "(", "self", ",", "line", ")", ":", "for", "other_line", "in", "self", ".", "lines", ":", "if", "other_line", ".", "match", "(", "line", ")", ":", "yield", "other_line" ]
Find all lines matching a given line.
[ "Find", "all", "lines", "matching", "a", "given", "line", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L110-L114
248,452
rbarrois/confutils
confutils/configfile.py
Section.remove
def remove(self, line): """Delete all lines matching the given line.""" nb = 0 for block in self.blocks: nb += block.remove(line) return nb
python
def remove(self, line): """Delete all lines matching the given line.""" nb = 0 for block in self.blocks: nb += block.remove(line) return nb
[ "def", "remove", "(", "self", ",", "line", ")", ":", "nb", "=", "0", "for", "block", "in", "self", ".", "blocks", ":", "nb", "+=", "block", ".", "remove", "(", "line", ")", "return", "nb" ]
Delete all lines matching the given line.
[ "Delete", "all", "lines", "matching", "the", "given", "line", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L227-L233
248,453
rbarrois/confutils
confutils/configfile.py
MultiValuedSectionView.add
def add(self, key, value): """Add a new value for a key. This differs from __setitem__ in adding a new value instead of updating the list of values, thus avoiding the need to fetch the previous list of values. """ self.configfile.add(self.name, key, value)
python
def add(self, key, value): """Add a new value for a key. This differs from __setitem__ in adding a new value instead of updating the list of values, thus avoiding the need to fetch the previous list of values. """ self.configfile.add(self.name, key, value)
[ "def", "add", "(", "self", ",", "key", ",", "value", ")", ":", "self", ".", "configfile", ".", "add", "(", "self", ".", "name", ",", "key", ",", "value", ")" ]
Add a new value for a key. This differs from __setitem__ in adding a new value instead of updating the list of values, thus avoiding the need to fetch the previous list of values.
[ "Add", "a", "new", "value", "for", "a", "key", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L298-L305
248,454
rbarrois/confutils
confutils/configfile.py
ConfigFile._get_section
def _get_section(self, name, create=True): """Retrieve a section by name. Create it on first access.""" try: return self.sections[name] except KeyError: if not create: raise section = Section(name) self.sections[name] = section return section
python
def _get_section(self, name, create=True): """Retrieve a section by name. Create it on first access.""" try: return self.sections[name] except KeyError: if not create: raise section = Section(name) self.sections[name] = section return section
[ "def", "_get_section", "(", "self", ",", "name", ",", "create", "=", "True", ")", ":", "try", ":", "return", "self", ".", "sections", "[", "name", "]", "except", "KeyError", ":", "if", "not", "create", ":", "raise", "section", "=", "Section", "(", "name", ")", "self", ".", "sections", "[", "name", "]", "=", "section", "return", "section" ]
Retrieve a section by name. Create it on first access.
[ "Retrieve", "a", "section", "by", "name", ".", "Create", "it", "on", "first", "access", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L339-L349
248,455
rbarrois/confutils
confutils/configfile.py
ConfigFile.get_line
def get_line(self, section, line): """Retrieve all lines compatible with a given line.""" try: section = self._get_section(section, create=False) except KeyError: return [] return section.find_lines(line)
python
def get_line(self, section, line): """Retrieve all lines compatible with a given line.""" try: section = self._get_section(section, create=False) except KeyError: return [] return section.find_lines(line)
[ "def", "get_line", "(", "self", ",", "section", ",", "line", ")", ":", "try", ":", "section", "=", "self", ".", "_get_section", "(", "section", ",", "create", "=", "False", ")", "except", "KeyError", ":", "return", "[", "]", "return", "section", ".", "find_lines", "(", "line", ")" ]
Retrieve all lines compatible with a given line.
[ "Retrieve", "all", "lines", "compatible", "with", "a", "given", "line", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L358-L364
248,456
rbarrois/confutils
confutils/configfile.py
ConfigFile.iter_lines
def iter_lines(self, section): """Iterate over all lines in a section. This will skip 'header' lines. """ try: section = self._get_section(section, create=False) except KeyError: return for block in section: for line in block: yield line
python
def iter_lines(self, section): """Iterate over all lines in a section. This will skip 'header' lines. """ try: section = self._get_section(section, create=False) except KeyError: return for block in section: for line in block: yield line
[ "def", "iter_lines", "(", "self", ",", "section", ")", ":", "try", ":", "section", "=", "self", ".", "_get_section", "(", "section", ",", "create", "=", "False", ")", "except", "KeyError", ":", "return", "for", "block", "in", "section", ":", "for", "line", "in", "block", ":", "yield", "line" ]
Iterate over all lines in a section. This will skip 'header' lines.
[ "Iterate", "over", "all", "lines", "in", "a", "section", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L366-L378
248,457
rbarrois/confutils
confutils/configfile.py
ConfigFile.enter_block
def enter_block(self, name): """Mark 'entering a block'.""" section = self._get_section(name) block = self.current_block = section.new_block() self.blocks.append(block) return block
python
def enter_block(self, name): """Mark 'entering a block'.""" section = self._get_section(name) block = self.current_block = section.new_block() self.blocks.append(block) return block
[ "def", "enter_block", "(", "self", ",", "name", ")", ":", "section", "=", "self", ".", "_get_section", "(", "name", ")", "block", "=", "self", ".", "current_block", "=", "section", ".", "new_block", "(", ")", "self", ".", "blocks", ".", "append", "(", "block", ")", "return", "block" ]
Mark 'entering a block'.
[ "Mark", "entering", "a", "block", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L383-L388
248,458
rbarrois/confutils
confutils/configfile.py
ConfigFile.insert_line
def insert_line(self, line): """Insert a new line""" if self.current_block is not None: self.current_block.append(line) else: self.header.append(line)
python
def insert_line(self, line): """Insert a new line""" if self.current_block is not None: self.current_block.append(line) else: self.header.append(line)
[ "def", "insert_line", "(", "self", ",", "line", ")", ":", "if", "self", ".", "current_block", "is", "not", "None", ":", "self", ".", "current_block", ".", "append", "(", "line", ")", "else", ":", "self", ".", "header", ".", "append", "(", "line", ")" ]
Insert a new line
[ "Insert", "a", "new", "line" ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L390-L395
248,459
rbarrois/confutils
confutils/configfile.py
ConfigFile.handle_line
def handle_line(self, line): """Read one line.""" if line.kind == ConfigLine.KIND_HEADER: self.enter_block(line.header) else: self.insert_line(line)
python
def handle_line(self, line): """Read one line.""" if line.kind == ConfigLine.KIND_HEADER: self.enter_block(line.header) else: self.insert_line(line)
[ "def", "handle_line", "(", "self", ",", "line", ")", ":", "if", "line", ".", "kind", "==", "ConfigLine", ".", "KIND_HEADER", ":", "self", ".", "enter_block", "(", "line", ".", "header", ")", "else", ":", "self", ".", "insert_line", "(", "line", ")" ]
Read one line.
[ "Read", "one", "line", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L397-L402
248,460
rbarrois/confutils
confutils/configfile.py
ConfigFile.parse
def parse(self, fileobj, name_hint='', parser=None): """Fill from a file-like object.""" self.current_block = None # Reset current block parser = parser or Parser() for line in parser.parse(fileobj, name_hint=name_hint): self.handle_line(line)
python
def parse(self, fileobj, name_hint='', parser=None): """Fill from a file-like object.""" self.current_block = None # Reset current block parser = parser or Parser() for line in parser.parse(fileobj, name_hint=name_hint): self.handle_line(line)
[ "def", "parse", "(", "self", ",", "fileobj", ",", "name_hint", "=", "''", ",", "parser", "=", "None", ")", ":", "self", ".", "current_block", "=", "None", "# Reset current block", "parser", "=", "parser", "or", "Parser", "(", ")", "for", "line", "in", "parser", ".", "parse", "(", "fileobj", ",", "name_hint", "=", "name_hint", ")", ":", "self", ".", "handle_line", "(", "line", ")" ]
Fill from a file-like object.
[ "Fill", "from", "a", "file", "-", "like", "object", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L404-L409
248,461
rbarrois/confutils
confutils/configfile.py
ConfigFile.remove_line
def remove_line(self, section, line): """Remove all instances of a line. Returns: int: the number of lines removed """ try: s = self._get_section(section, create=False) except KeyError: # No such section, skip. return 0 return s.remove(line)
python
def remove_line(self, section, line): """Remove all instances of a line. Returns: int: the number of lines removed """ try: s = self._get_section(section, create=False) except KeyError: # No such section, skip. return 0 return s.remove(line)
[ "def", "remove_line", "(", "self", ",", "section", ",", "line", ")", ":", "try", ":", "s", "=", "self", ".", "_get_section", "(", "section", ",", "create", "=", "False", ")", "except", "KeyError", ":", "# No such section, skip.", "return", "0", "return", "s", ".", "remove", "(", "line", ")" ]
Remove all instances of a line. Returns: int: the number of lines removed
[ "Remove", "all", "instances", "of", "a", "line", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L448-L460
248,462
rbarrois/confutils
confutils/configfile.py
ConfigFile.add_or_update
def add_or_update(self, section, key, value): """Update the key or, if no previous value existed, add it. Returns: int: Number of updated lines. """ updates = self.update(section, key, value) if updates == 0: self.add(section, key, value) return updates
python
def add_or_update(self, section, key, value): """Update the key or, if no previous value existed, add it. Returns: int: Number of updated lines. """ updates = self.update(section, key, value) if updates == 0: self.add(section, key, value) return updates
[ "def", "add_or_update", "(", "self", ",", "section", ",", "key", ",", "value", ")", ":", "updates", "=", "self", ".", "update", "(", "section", ",", "key", ",", "value", ")", "if", "updates", "==", "0", ":", "self", ".", "add", "(", "section", ",", "key", ",", "value", ")", "return", "updates" ]
Update the key or, if no previous value existed, add it. Returns: int: Number of updated lines.
[ "Update", "the", "key", "or", "if", "no", "previous", "value", "existed", "add", "it", "." ]
26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4
https://github.com/rbarrois/confutils/blob/26bbb3f31c09a99ee2104263a9e97d6d3fc8e4f4/confutils/configfile.py#L500-L509
248,463
minhhoit/yacms
yacms/utils/cache.py
cache_set
def cache_set(key, value, timeout=None, refreshed=False): """ Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case the entry is not returned, so that a cache miss occurs and the entry should be set by the caller, but all other callers will still get the stale entry, so no real cache misses ever occur. """ if timeout is None: timeout = settings.CACHE_MIDDLEWARE_SECONDS refresh_time = timeout + time() real_timeout = timeout + settings.CACHE_SET_DELAY_SECONDS packed = (value, refresh_time, refreshed) return cache.set(_hashed_key(key), packed, real_timeout)
python
def cache_set(key, value, timeout=None, refreshed=False): """ Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case the entry is not returned, so that a cache miss occurs and the entry should be set by the caller, but all other callers will still get the stale entry, so no real cache misses ever occur. """ if timeout is None: timeout = settings.CACHE_MIDDLEWARE_SECONDS refresh_time = timeout + time() real_timeout = timeout + settings.CACHE_SET_DELAY_SECONDS packed = (value, refresh_time, refreshed) return cache.set(_hashed_key(key), packed, real_timeout)
[ "def", "cache_set", "(", "key", ",", "value", ",", "timeout", "=", "None", ",", "refreshed", "=", "False", ")", ":", "if", "timeout", "is", "None", ":", "timeout", "=", "settings", ".", "CACHE_MIDDLEWARE_SECONDS", "refresh_time", "=", "timeout", "+", "time", "(", ")", "real_timeout", "=", "timeout", "+", "settings", ".", "CACHE_SET_DELAY_SECONDS", "packed", "=", "(", "value", ",", "refresh_time", ",", "refreshed", ")", "return", "cache", ".", "set", "(", "_hashed_key", "(", "key", ")", ",", "packed", ",", "real_timeout", ")" ]
Wrapper for ``cache.set``. Stores the cache entry packed with the desired cache expiry time. When the entry is retrieved from cache, the packed expiry time is also checked, and if past, the stale cache entry is stored again with an expiry that has ``CACHE_SET_DELAY_SECONDS`` added to it. In this case the entry is not returned, so that a cache miss occurs and the entry should be set by the caller, but all other callers will still get the stale entry, so no real cache misses ever occur.
[ "Wrapper", "for", "cache", ".", "set", ".", "Stores", "the", "cache", "entry", "packed", "with", "the", "desired", "cache", "expiry", "time", ".", "When", "the", "entry", "is", "retrieved", "from", "cache", "the", "packed", "expiry", "time", "is", "also", "checked", "and", "if", "past", "the", "stale", "cache", "entry", "is", "stored", "again", "with", "an", "expiry", "that", "has", "CACHE_SET_DELAY_SECONDS", "added", "to", "it", ".", "In", "this", "case", "the", "entry", "is", "not", "returned", "so", "that", "a", "cache", "miss", "occurs", "and", "the", "entry", "should", "be", "set", "by", "the", "caller", "but", "all", "other", "callers", "will", "still", "get", "the", "stale", "entry", "so", "no", "real", "cache", "misses", "ever", "occur", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/utils/cache.py#L26-L42
248,464
minhhoit/yacms
yacms/utils/cache.py
cache_installed
def cache_installed(): """ Returns ``True`` if a cache backend is configured, and the cache middleware classes or subclasses thereof are present. This will be evaluated once per run, and then cached. """ has_key = bool(getattr(settings, "NEVERCACHE_KEY", "")) def flatten(seqs): return (item for seq in seqs for item in seq) middleware_classes = map(import_string, get_middleware_setting()) middleware_ancestors = set(flatten(map(getmro, middleware_classes))) yacms_cache_middleware_classes = { import_string("yacms.core.middleware.UpdateCacheMiddleware"), import_string("yacms.core.middleware.FetchFromCacheMiddleware"), } return (has_key and settings.CACHES and not settings.TESTING and yacms_cache_middleware_classes.issubset(middleware_ancestors))
python
def cache_installed(): """ Returns ``True`` if a cache backend is configured, and the cache middleware classes or subclasses thereof are present. This will be evaluated once per run, and then cached. """ has_key = bool(getattr(settings, "NEVERCACHE_KEY", "")) def flatten(seqs): return (item for seq in seqs for item in seq) middleware_classes = map(import_string, get_middleware_setting()) middleware_ancestors = set(flatten(map(getmro, middleware_classes))) yacms_cache_middleware_classes = { import_string("yacms.core.middleware.UpdateCacheMiddleware"), import_string("yacms.core.middleware.FetchFromCacheMiddleware"), } return (has_key and settings.CACHES and not settings.TESTING and yacms_cache_middleware_classes.issubset(middleware_ancestors))
[ "def", "cache_installed", "(", ")", ":", "has_key", "=", "bool", "(", "getattr", "(", "settings", ",", "\"NEVERCACHE_KEY\"", ",", "\"\"", ")", ")", "def", "flatten", "(", "seqs", ")", ":", "return", "(", "item", "for", "seq", "in", "seqs", "for", "item", "in", "seq", ")", "middleware_classes", "=", "map", "(", "import_string", ",", "get_middleware_setting", "(", ")", ")", "middleware_ancestors", "=", "set", "(", "flatten", "(", "map", "(", "getmro", ",", "middleware_classes", ")", ")", ")", "yacms_cache_middleware_classes", "=", "{", "import_string", "(", "\"yacms.core.middleware.UpdateCacheMiddleware\"", ")", ",", "import_string", "(", "\"yacms.core.middleware.FetchFromCacheMiddleware\"", ")", ",", "}", "return", "(", "has_key", "and", "settings", ".", "CACHES", "and", "not", "settings", ".", "TESTING", "and", "yacms_cache_middleware_classes", ".", "issubset", "(", "middleware_ancestors", ")", ")" ]
Returns ``True`` if a cache backend is configured, and the cache middleware classes or subclasses thereof are present. This will be evaluated once per run, and then cached.
[ "Returns", "True", "if", "a", "cache", "backend", "is", "configured", "and", "the", "cache", "middleware", "classes", "or", "subclasses", "thereof", "are", "present", ".", "This", "will", "be", "evaluated", "once", "per", "run", "and", "then", "cached", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/utils/cache.py#L63-L83
248,465
minhhoit/yacms
yacms/utils/cache.py
cache_key_prefix
def cache_key_prefix(request): """ Cache key for yacms's cache middleware. Adds the current device and site ID. """ cache_key = "%s.%s.%s." % ( settings.CACHE_MIDDLEWARE_KEY_PREFIX, current_site_id(), device_from_request(request) or "default", ) return _i18n_cache_key_suffix(request, cache_key)
python
def cache_key_prefix(request): """ Cache key for yacms's cache middleware. Adds the current device and site ID. """ cache_key = "%s.%s.%s." % ( settings.CACHE_MIDDLEWARE_KEY_PREFIX, current_site_id(), device_from_request(request) or "default", ) return _i18n_cache_key_suffix(request, cache_key)
[ "def", "cache_key_prefix", "(", "request", ")", ":", "cache_key", "=", "\"%s.%s.%s.\"", "%", "(", "settings", ".", "CACHE_MIDDLEWARE_KEY_PREFIX", ",", "current_site_id", "(", ")", ",", "device_from_request", "(", "request", ")", "or", "\"default\"", ",", ")", "return", "_i18n_cache_key_suffix", "(", "request", ",", "cache_key", ")" ]
Cache key for yacms's cache middleware. Adds the current device and site ID.
[ "Cache", "key", "for", "yacms", "s", "cache", "middleware", ".", "Adds", "the", "current", "device", "and", "site", "ID", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/utils/cache.py#L86-L96
248,466
minhhoit/yacms
yacms/utils/cache.py
add_cache_bypass
def add_cache_bypass(url): """ Adds the current time to the querystring of the URL to force a cache reload. Used for when a form post redirects back to a page that should display updated content, such as new comments or ratings. """ if not cache_installed(): return url hash_str = "" if "#" in url: url, hash_str = url.split("#", 1) hash_str = "#" + hash_str url += "?" if "?" not in url else "&" return url + "t=" + str(time()).replace(".", "") + hash_str
python
def add_cache_bypass(url): """ Adds the current time to the querystring of the URL to force a cache reload. Used for when a form post redirects back to a page that should display updated content, such as new comments or ratings. """ if not cache_installed(): return url hash_str = "" if "#" in url: url, hash_str = url.split("#", 1) hash_str = "#" + hash_str url += "?" if "?" not in url else "&" return url + "t=" + str(time()).replace(".", "") + hash_str
[ "def", "add_cache_bypass", "(", "url", ")", ":", "if", "not", "cache_installed", "(", ")", ":", "return", "url", "hash_str", "=", "\"\"", "if", "\"#\"", "in", "url", ":", "url", ",", "hash_str", "=", "url", ".", "split", "(", "\"#\"", ",", "1", ")", "hash_str", "=", "\"#\"", "+", "hash_str", "url", "+=", "\"?\"", "if", "\"?\"", "not", "in", "url", "else", "\"&\"", "return", "url", "+", "\"t=\"", "+", "str", "(", "time", "(", ")", ")", ".", "replace", "(", "\".\"", ",", "\"\"", ")", "+", "hash_str" ]
Adds the current time to the querystring of the URL to force a cache reload. Used for when a form post redirects back to a page that should display updated content, such as new comments or ratings.
[ "Adds", "the", "current", "time", "to", "the", "querystring", "of", "the", "URL", "to", "force", "a", "cache", "reload", ".", "Used", "for", "when", "a", "form", "post", "redirects", "back", "to", "a", "page", "that", "should", "display", "updated", "content", "such", "as", "new", "comments", "or", "ratings", "." ]
2921b706b7107c6e8c5f2bbf790ff11f85a2167f
https://github.com/minhhoit/yacms/blob/2921b706b7107c6e8c5f2bbf790ff11f85a2167f/yacms/utils/cache.py#L107-L121
248,467
xtrementl/focus
focus/plugin/modules/im.py
_dbus_get_object
def _dbus_get_object(bus_name, object_name): """ Fetches DBUS proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. Returns object or ``None``. """ try: bus = dbus.SessionBus() obj = bus.get_object(bus_name, object_name) return obj except (NameError, dbus.exceptions.DBusException): return None
python
def _dbus_get_object(bus_name, object_name): """ Fetches DBUS proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. Returns object or ``None``. """ try: bus = dbus.SessionBus() obj = bus.get_object(bus_name, object_name) return obj except (NameError, dbus.exceptions.DBusException): return None
[ "def", "_dbus_get_object", "(", "bus_name", ",", "object_name", ")", ":", "try", ":", "bus", "=", "dbus", ".", "SessionBus", "(", ")", "obj", "=", "bus", ".", "get_object", "(", "bus_name", ",", "object_name", ")", "return", "obj", "except", "(", "NameError", ",", "dbus", ".", "exceptions", ".", "DBusException", ")", ":", "return", "None" ]
Fetches DBUS proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. Returns object or ``None``.
[ "Fetches", "DBUS", "proxy", "object", "given", "the", "specified", "parameters", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/im.py#L50-L67
248,468
xtrementl/focus
focus/plugin/modules/im.py
_dbus_get_interface
def _dbus_get_interface(bus_name, object_name, interface_name): """ Fetches DBUS interface proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. `interface_name` Name of the interface. Returns object or ``None``. """ try: obj = _dbus_get_object(bus_name, object_name) if not obj: raise NameError return dbus.Interface(obj, interface_name) except (NameError, dbus.exceptions.DBusException): return None
python
def _dbus_get_interface(bus_name, object_name, interface_name): """ Fetches DBUS interface proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. `interface_name` Name of the interface. Returns object or ``None``. """ try: obj = _dbus_get_object(bus_name, object_name) if not obj: raise NameError return dbus.Interface(obj, interface_name) except (NameError, dbus.exceptions.DBusException): return None
[ "def", "_dbus_get_interface", "(", "bus_name", ",", "object_name", ",", "interface_name", ")", ":", "try", ":", "obj", "=", "_dbus_get_object", "(", "bus_name", ",", "object_name", ")", "if", "not", "obj", ":", "raise", "NameError", "return", "dbus", ".", "Interface", "(", "obj", ",", "interface_name", ")", "except", "(", "NameError", ",", "dbus", ".", "exceptions", ".", "DBusException", ")", ":", "return", "None" ]
Fetches DBUS interface proxy object given the specified parameters. `bus_name` Name of the bus interface. `object_name` Object path related to the interface. `interface_name` Name of the interface. Returns object or ``None``.
[ "Fetches", "DBUS", "interface", "proxy", "object", "given", "the", "specified", "parameters", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/im.py#L70-L90
248,469
xtrementl/focus
focus/plugin/modules/im.py
_pidgin_status
def _pidgin_status(status, message): """ Updates status and message for Pidgin IM application. `status` Status type. `message` Status message. """ try: iface = _dbus_get_interface('im.pidgin.purple.PurpleService', '/im/pidgin/purple/PurpleObject', 'im.pidgin.purple.PurpleInterface') if iface: # create new transient status code = PIDGIN_CODE_MAP[status] saved_status = iface.PurpleSavedstatusNew('', code) # set the message, if provided iface.PurpleSavedstatusSetMessage(saved_status, message) # activate status iface.PurpleSavedstatusActivate(saved_status) except dbus.exceptions.DBusException: pass
python
def _pidgin_status(status, message): """ Updates status and message for Pidgin IM application. `status` Status type. `message` Status message. """ try: iface = _dbus_get_interface('im.pidgin.purple.PurpleService', '/im/pidgin/purple/PurpleObject', 'im.pidgin.purple.PurpleInterface') if iface: # create new transient status code = PIDGIN_CODE_MAP[status] saved_status = iface.PurpleSavedstatusNew('', code) # set the message, if provided iface.PurpleSavedstatusSetMessage(saved_status, message) # activate status iface.PurpleSavedstatusActivate(saved_status) except dbus.exceptions.DBusException: pass
[ "def", "_pidgin_status", "(", "status", ",", "message", ")", ":", "try", ":", "iface", "=", "_dbus_get_interface", "(", "'im.pidgin.purple.PurpleService'", ",", "'/im/pidgin/purple/PurpleObject'", ",", "'im.pidgin.purple.PurpleInterface'", ")", "if", "iface", ":", "# create new transient status", "code", "=", "PIDGIN_CODE_MAP", "[", "status", "]", "saved_status", "=", "iface", ".", "PurpleSavedstatusNew", "(", "''", ",", "code", ")", "# set the message, if provided", "iface", ".", "PurpleSavedstatusSetMessage", "(", "saved_status", ",", "message", ")", "# activate status", "iface", ".", "PurpleSavedstatusActivate", "(", "saved_status", ")", "except", "dbus", ".", "exceptions", ".", "DBusException", ":", "pass" ]
Updates status and message for Pidgin IM application. `status` Status type. `message` Status message.
[ "Updates", "status", "and", "message", "for", "Pidgin", "IM", "application", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/im.py#L93-L119
248,470
xtrementl/focus
focus/plugin/modules/im.py
_empathy_status
def _empathy_status(status, message): """ Updates status and message for Empathy IM application. `status` Status type. `message` Status message. """ ACCT_IFACE = 'org.freedesktop.Telepathy.Account' DBUS_PROP_IFACE = 'org.freedesktop.DBus.Properties' ACCT_MAN_IFACE = 'org.freedesktop.Telepathy.AccountManager' ACCT_MAN_PATH = '/org/freedesktop/Telepathy/AccountManager' SP_IFACE = ('org.freedesktop.Telepathy.' 'Connection.Interface.SimplePresence') # fetch main account manager interface am_iface = _dbus_get_interface(ACCT_MAN_IFACE, ACCT_MAN_PATH, DBUS_PROP_IFACE) if am_iface: account_paths = am_iface.Get(ACCT_MAN_IFACE, 'ValidAccounts') for account_path in account_paths: try: # fetch account interface account = _dbus_get_object(ACCT_MAN_IFACE, account_path) # skip disconnected, disabled, etc. if account.Get(ACCT_IFACE, 'ConnectionStatus') != 0: continue # fetch simple presence interface for account connection conn_path = account.Get(ACCT_IFACE, 'Connection') conn_iface = conn_path.replace("/", ".")[1:] sp_iface = _dbus_get_interface(conn_iface, conn_path, SP_IFACE) except dbus.exceptions.DBusException: continue # set status and message for code in EMPATHY_CODE_MAP[status]: try: sp_iface.SetPresence(code, message) except dbus.exceptions.DBusException: pass else: break
python
def _empathy_status(status, message): """ Updates status and message for Empathy IM application. `status` Status type. `message` Status message. """ ACCT_IFACE = 'org.freedesktop.Telepathy.Account' DBUS_PROP_IFACE = 'org.freedesktop.DBus.Properties' ACCT_MAN_IFACE = 'org.freedesktop.Telepathy.AccountManager' ACCT_MAN_PATH = '/org/freedesktop/Telepathy/AccountManager' SP_IFACE = ('org.freedesktop.Telepathy.' 'Connection.Interface.SimplePresence') # fetch main account manager interface am_iface = _dbus_get_interface(ACCT_MAN_IFACE, ACCT_MAN_PATH, DBUS_PROP_IFACE) if am_iface: account_paths = am_iface.Get(ACCT_MAN_IFACE, 'ValidAccounts') for account_path in account_paths: try: # fetch account interface account = _dbus_get_object(ACCT_MAN_IFACE, account_path) # skip disconnected, disabled, etc. if account.Get(ACCT_IFACE, 'ConnectionStatus') != 0: continue # fetch simple presence interface for account connection conn_path = account.Get(ACCT_IFACE, 'Connection') conn_iface = conn_path.replace("/", ".")[1:] sp_iface = _dbus_get_interface(conn_iface, conn_path, SP_IFACE) except dbus.exceptions.DBusException: continue # set status and message for code in EMPATHY_CODE_MAP[status]: try: sp_iface.SetPresence(code, message) except dbus.exceptions.DBusException: pass else: break
[ "def", "_empathy_status", "(", "status", ",", "message", ")", ":", "ACCT_IFACE", "=", "'org.freedesktop.Telepathy.Account'", "DBUS_PROP_IFACE", "=", "'org.freedesktop.DBus.Properties'", "ACCT_MAN_IFACE", "=", "'org.freedesktop.Telepathy.AccountManager'", "ACCT_MAN_PATH", "=", "'/org/freedesktop/Telepathy/AccountManager'", "SP_IFACE", "=", "(", "'org.freedesktop.Telepathy.'", "'Connection.Interface.SimplePresence'", ")", "# fetch main account manager interface", "am_iface", "=", "_dbus_get_interface", "(", "ACCT_MAN_IFACE", ",", "ACCT_MAN_PATH", ",", "DBUS_PROP_IFACE", ")", "if", "am_iface", ":", "account_paths", "=", "am_iface", ".", "Get", "(", "ACCT_MAN_IFACE", ",", "'ValidAccounts'", ")", "for", "account_path", "in", "account_paths", ":", "try", ":", "# fetch account interface", "account", "=", "_dbus_get_object", "(", "ACCT_MAN_IFACE", ",", "account_path", ")", "# skip disconnected, disabled, etc.", "if", "account", ".", "Get", "(", "ACCT_IFACE", ",", "'ConnectionStatus'", ")", "!=", "0", ":", "continue", "# fetch simple presence interface for account connection", "conn_path", "=", "account", ".", "Get", "(", "ACCT_IFACE", ",", "'Connection'", ")", "conn_iface", "=", "conn_path", ".", "replace", "(", "\"/\"", ",", "\".\"", ")", "[", "1", ":", "]", "sp_iface", "=", "_dbus_get_interface", "(", "conn_iface", ",", "conn_path", ",", "SP_IFACE", ")", "except", "dbus", ".", "exceptions", ".", "DBusException", ":", "continue", "# set status and message", "for", "code", "in", "EMPATHY_CODE_MAP", "[", "status", "]", ":", "try", ":", "sp_iface", ".", "SetPresence", "(", "code", ",", "message", ")", "except", "dbus", ".", "exceptions", ".", "DBusException", ":", "pass", "else", ":", "break" ]
Updates status and message for Empathy IM application. `status` Status type. `message` Status message.
[ "Updates", "status", "and", "message", "for", "Empathy", "IM", "application", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/im.py#L165-L213
248,471
xtrementl/focus
focus/plugin/modules/im.py
_linux_skype_status
def _linux_skype_status(status, message): """ Updates status and message for Skype IM application on Linux. `status` Status type. `message` Status message. """ try: iface = _dbus_get_interface('com.Skype.API', '/com/Skype', 'com.Skype.API') if iface: # authenticate if iface.Invoke('NAME focus') != 'OK': msg = 'User denied authorization' raise dbus.exceptions.DbusException(msg) iface.Invoke('PROTOCOL 5') # set status iface.Invoke('SET USERSTATUS {0}'.format(SKYPE_CODE_MAP[status])) # set the message, if provided iface.Invoke('SET PROFILE MOOD_TEXT {0}' .format(message)) except dbus.exceptions.DBusException: pass
python
def _linux_skype_status(status, message): """ Updates status and message for Skype IM application on Linux. `status` Status type. `message` Status message. """ try: iface = _dbus_get_interface('com.Skype.API', '/com/Skype', 'com.Skype.API') if iface: # authenticate if iface.Invoke('NAME focus') != 'OK': msg = 'User denied authorization' raise dbus.exceptions.DbusException(msg) iface.Invoke('PROTOCOL 5') # set status iface.Invoke('SET USERSTATUS {0}'.format(SKYPE_CODE_MAP[status])) # set the message, if provided iface.Invoke('SET PROFILE MOOD_TEXT {0}' .format(message)) except dbus.exceptions.DBusException: pass
[ "def", "_linux_skype_status", "(", "status", ",", "message", ")", ":", "try", ":", "iface", "=", "_dbus_get_interface", "(", "'com.Skype.API'", ",", "'/com/Skype'", ",", "'com.Skype.API'", ")", "if", "iface", ":", "# authenticate", "if", "iface", ".", "Invoke", "(", "'NAME focus'", ")", "!=", "'OK'", ":", "msg", "=", "'User denied authorization'", "raise", "dbus", ".", "exceptions", ".", "DbusException", "(", "msg", ")", "iface", ".", "Invoke", "(", "'PROTOCOL 5'", ")", "# set status", "iface", ".", "Invoke", "(", "'SET USERSTATUS {0}'", ".", "format", "(", "SKYPE_CODE_MAP", "[", "status", "]", ")", ")", "# set the message, if provided", "iface", ".", "Invoke", "(", "'SET PROFILE MOOD_TEXT {0}'", ".", "format", "(", "message", ")", ")", "except", "dbus", ".", "exceptions", ".", "DBusException", ":", "pass" ]
Updates status and message for Skype IM application on Linux. `status` Status type. `message` Status message.
[ "Updates", "status", "and", "message", "for", "Skype", "IM", "application", "on", "Linux", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/im.py#L216-L244
248,472
xtrementl/focus
focus/plugin/modules/im.py
IMStatus._set_status
def _set_status(self, status, message=''): """ Updates the status and message on all supported IM apps. `status` Status type (See ``VALID_STATUSES``). `message` Status message. """ message = message.strip() # fetch away message from provided id if message.startswith(':'): msg_id = message[1:] message = self.messages.get(msg_id, '') message = message.encode('utf-8', 'replace') # attempt to set status for each supported application for func in self.set_status_funcs: func(status, message)
python
def _set_status(self, status, message=''): """ Updates the status and message on all supported IM apps. `status` Status type (See ``VALID_STATUSES``). `message` Status message. """ message = message.strip() # fetch away message from provided id if message.startswith(':'): msg_id = message[1:] message = self.messages.get(msg_id, '') message = message.encode('utf-8', 'replace') # attempt to set status for each supported application for func in self.set_status_funcs: func(status, message)
[ "def", "_set_status", "(", "self", ",", "status", ",", "message", "=", "''", ")", ":", "message", "=", "message", ".", "strip", "(", ")", "# fetch away message from provided id", "if", "message", ".", "startswith", "(", "':'", ")", ":", "msg_id", "=", "message", "[", "1", ":", "]", "message", "=", "self", ".", "messages", ".", "get", "(", "msg_id", ",", "''", ")", "message", "=", "message", ".", "encode", "(", "'utf-8'", ",", "'replace'", ")", "# attempt to set status for each supported application", "for", "func", "in", "self", ".", "set_status_funcs", ":", "func", "(", "status", ",", "message", ")" ]
Updates the status and message on all supported IM apps. `status` Status type (See ``VALID_STATUSES``). `message` Status message.
[ "Updates", "the", "status", "and", "message", "on", "all", "supported", "IM", "apps", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/im.py#L389-L409
248,473
xtrementl/focus
focus/plugin/modules/im.py
IMStatus.parse_option
def parse_option(self, option, block_name, *values): """ Parse status, end_status, timer_status and status_msg options. """ if option.endswith('status'): status = values[0] if status not in self.VALID_STATUSES: raise ValueError(u'Invalid IM status "{0}"'.format(status)) if len(values) > 2: raise TypeError if option == 'status': option = 'start_' + option key = option.split('_', 1)[0] self.statuses[key] = values[:2] elif option == 'status_msg': if len(values) != 2: raise TypeError name, msg = values self.messages[name] = msg
python
def parse_option(self, option, block_name, *values): """ Parse status, end_status, timer_status and status_msg options. """ if option.endswith('status'): status = values[0] if status not in self.VALID_STATUSES: raise ValueError(u'Invalid IM status "{0}"'.format(status)) if len(values) > 2: raise TypeError if option == 'status': option = 'start_' + option key = option.split('_', 1)[0] self.statuses[key] = values[:2] elif option == 'status_msg': if len(values) != 2: raise TypeError name, msg = values self.messages[name] = msg
[ "def", "parse_option", "(", "self", ",", "option", ",", "block_name", ",", "*", "values", ")", ":", "if", "option", ".", "endswith", "(", "'status'", ")", ":", "status", "=", "values", "[", "0", "]", "if", "status", "not", "in", "self", ".", "VALID_STATUSES", ":", "raise", "ValueError", "(", "u'Invalid IM status \"{0}\"'", ".", "format", "(", "status", ")", ")", "if", "len", "(", "values", ")", ">", "2", ":", "raise", "TypeError", "if", "option", "==", "'status'", ":", "option", "=", "'start_'", "+", "option", "key", "=", "option", ".", "split", "(", "'_'", ",", "1", ")", "[", "0", "]", "self", ".", "statuses", "[", "key", "]", "=", "values", "[", ":", "2", "]", "elif", "option", "==", "'status_msg'", ":", "if", "len", "(", "values", ")", "!=", "2", ":", "raise", "TypeError", "name", ",", "msg", "=", "values", "self", ".", "messages", "[", "name", "]", "=", "msg" ]
Parse status, end_status, timer_status and status_msg options.
[ "Parse", "status", "end_status", "timer_status", "and", "status_msg", "options", "." ]
cbbbc0b49a7409f9e0dc899de5b7e057f50838e4
https://github.com/xtrementl/focus/blob/cbbbc0b49a7409f9e0dc899de5b7e057f50838e4/focus/plugin/modules/im.py#L411-L434
248,474
cirruscluster/cirruscluster
cirruscluster/core.py
GetNumCoresOnHosts
def GetNumCoresOnHosts(hosts, private_key): """ Returns list of the number of cores for each host requested in hosts. """ results = runner.Runner(host_list=hosts, private_key=private_key, module_name='setup').run() num_cores_list = [] for _, props in results['contacted'].iteritems(): cores = props['ansible_facts']['ansible_processor_cores'] val = 0 try: val = int(cores) except ValueError: pass num_cores_list.append(val) return num_cores_list
python
def GetNumCoresOnHosts(hosts, private_key): """ Returns list of the number of cores for each host requested in hosts. """ results = runner.Runner(host_list=hosts, private_key=private_key, module_name='setup').run() num_cores_list = [] for _, props in results['contacted'].iteritems(): cores = props['ansible_facts']['ansible_processor_cores'] val = 0 try: val = int(cores) except ValueError: pass num_cores_list.append(val) return num_cores_list
[ "def", "GetNumCoresOnHosts", "(", "hosts", ",", "private_key", ")", ":", "results", "=", "runner", ".", "Runner", "(", "host_list", "=", "hosts", ",", "private_key", "=", "private_key", ",", "module_name", "=", "'setup'", ")", ".", "run", "(", ")", "num_cores_list", "=", "[", "]", "for", "_", ",", "props", "in", "results", "[", "'contacted'", "]", ".", "iteritems", "(", ")", ":", "cores", "=", "props", "[", "'ansible_facts'", "]", "[", "'ansible_processor_cores'", "]", "val", "=", "0", "try", ":", "val", "=", "int", "(", "cores", ")", "except", "ValueError", ":", "pass", "num_cores_list", ".", "append", "(", "val", ")", "return", "num_cores_list" ]
Returns list of the number of cores for each host requested in hosts.
[ "Returns", "list", "of", "the", "number", "of", "cores", "for", "each", "host", "requested", "in", "hosts", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L91-L104
248,475
cirruscluster/cirruscluster
cirruscluster/core.py
RunPlaybookOnHosts
def RunPlaybookOnHosts(playbook_path, hosts, private_key, extra_vars=None): """ Runs the playbook and returns True if it completes successfully on all hosts. """ inventory = ansible_inventory.Inventory(hosts) if not inventory.list_hosts(): raise RuntimeError("Host list is empty.") stats = callbacks.AggregateStats() verbose = 0 playbook_cb = ansible.callbacks.PlaybookCallbacks(verbose=verbose) runner_cb = ansible.callbacks.PlaybookRunnerCallbacks(stats, verbose=verbose) pb = playbook.PlayBook(playbook=playbook_path, host_list=hosts, remote_user='ubuntu', private_key_file=None, private_key=private_key, stats=stats, callbacks=playbook_cb, runner_callbacks=runner_cb, extra_vars=extra_vars) results = pb.run() # Check if all hosts completed playbook without error success = True if 'dark' in results: if len(results['dark']) > 0: print "Contact failures:" for host, reason in results['dark'].iteritems(): print " %s (%s)" % (host, reason['msg']) success = False for host, status in results.iteritems(): if host == 'dark': continue failures = status['failures'] if failures: logging.info( '%s %s' % (host, status)) success = False return success
python
def RunPlaybookOnHosts(playbook_path, hosts, private_key, extra_vars=None): """ Runs the playbook and returns True if it completes successfully on all hosts. """ inventory = ansible_inventory.Inventory(hosts) if not inventory.list_hosts(): raise RuntimeError("Host list is empty.") stats = callbacks.AggregateStats() verbose = 0 playbook_cb = ansible.callbacks.PlaybookCallbacks(verbose=verbose) runner_cb = ansible.callbacks.PlaybookRunnerCallbacks(stats, verbose=verbose) pb = playbook.PlayBook(playbook=playbook_path, host_list=hosts, remote_user='ubuntu', private_key_file=None, private_key=private_key, stats=stats, callbacks=playbook_cb, runner_callbacks=runner_cb, extra_vars=extra_vars) results = pb.run() # Check if all hosts completed playbook without error success = True if 'dark' in results: if len(results['dark']) > 0: print "Contact failures:" for host, reason in results['dark'].iteritems(): print " %s (%s)" % (host, reason['msg']) success = False for host, status in results.iteritems(): if host == 'dark': continue failures = status['failures'] if failures: logging.info( '%s %s' % (host, status)) success = False return success
[ "def", "RunPlaybookOnHosts", "(", "playbook_path", ",", "hosts", ",", "private_key", ",", "extra_vars", "=", "None", ")", ":", "inventory", "=", "ansible_inventory", ".", "Inventory", "(", "hosts", ")", "if", "not", "inventory", ".", "list_hosts", "(", ")", ":", "raise", "RuntimeError", "(", "\"Host list is empty.\"", ")", "stats", "=", "callbacks", ".", "AggregateStats", "(", ")", "verbose", "=", "0", "playbook_cb", "=", "ansible", ".", "callbacks", ".", "PlaybookCallbacks", "(", "verbose", "=", "verbose", ")", "runner_cb", "=", "ansible", ".", "callbacks", ".", "PlaybookRunnerCallbacks", "(", "stats", ",", "verbose", "=", "verbose", ")", "pb", "=", "playbook", ".", "PlayBook", "(", "playbook", "=", "playbook_path", ",", "host_list", "=", "hosts", ",", "remote_user", "=", "'ubuntu'", ",", "private_key_file", "=", "None", ",", "private_key", "=", "private_key", ",", "stats", "=", "stats", ",", "callbacks", "=", "playbook_cb", ",", "runner_callbacks", "=", "runner_cb", ",", "extra_vars", "=", "extra_vars", ")", "results", "=", "pb", ".", "run", "(", ")", "# Check if all hosts completed playbook without error", "success", "=", "True", "if", "'dark'", "in", "results", ":", "if", "len", "(", "results", "[", "'dark'", "]", ")", ">", "0", ":", "print", "\"Contact failures:\"", "for", "host", ",", "reason", "in", "results", "[", "'dark'", "]", ".", "iteritems", "(", ")", ":", "print", "\" %s (%s)\"", "%", "(", "host", ",", "reason", "[", "'msg'", "]", ")", "success", "=", "False", "for", "host", ",", "status", "in", "results", ".", "iteritems", "(", ")", ":", "if", "host", "==", "'dark'", ":", "continue", "failures", "=", "status", "[", "'failures'", "]", "if", "failures", ":", "logging", ".", "info", "(", "'%s %s'", "%", "(", "host", ",", "status", ")", ")", "success", "=", "False", "return", "success" ]
Runs the playbook and returns True if it completes successfully on all hosts.
[ "Runs", "the", "playbook", "and", "returns", "True", "if", "it", "completes", "successfully", "on", "all", "hosts", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L106-L141
248,476
cirruscluster/cirruscluster
cirruscluster/core.py
RunPlaybookOnHost
def RunPlaybookOnHost(playbook_path, host, private_key, extra_vars=None): """ Runs the playbook and returns True if it completes successfully on a single host. """ return RunPlaybookOnHosts(playbook_path, [host], private_key, extra_vars)
python
def RunPlaybookOnHost(playbook_path, host, private_key, extra_vars=None): """ Runs the playbook and returns True if it completes successfully on a single host. """ return RunPlaybookOnHosts(playbook_path, [host], private_key, extra_vars)
[ "def", "RunPlaybookOnHost", "(", "playbook_path", ",", "host", ",", "private_key", ",", "extra_vars", "=", "None", ")", ":", "return", "RunPlaybookOnHosts", "(", "playbook_path", ",", "[", "host", "]", ",", "private_key", ",", "extra_vars", ")" ]
Runs the playbook and returns True if it completes successfully on a single host.
[ "Runs", "the", "playbook", "and", "returns", "True", "if", "it", "completes", "successfully", "on", "a", "single", "host", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L143-L148
248,477
cirruscluster/cirruscluster
cirruscluster/core.py
ExecuteCmd
def ExecuteCmd(cmd, quiet=False): """ Run a command in a shell. """ result = None if quiet: with open(os.devnull, "w") as fnull: result = subprocess.call(cmd, shell=True, stdout=fnull, stderr=fnull) else: result = subprocess.call(cmd, shell=True) return result
python
def ExecuteCmd(cmd, quiet=False): """ Run a command in a shell. """ result = None if quiet: with open(os.devnull, "w") as fnull: result = subprocess.call(cmd, shell=True, stdout=fnull, stderr=fnull) else: result = subprocess.call(cmd, shell=True) return result
[ "def", "ExecuteCmd", "(", "cmd", ",", "quiet", "=", "False", ")", ":", "result", "=", "None", "if", "quiet", ":", "with", "open", "(", "os", ".", "devnull", ",", "\"w\"", ")", "as", "fnull", ":", "result", "=", "subprocess", ".", "call", "(", "cmd", ",", "shell", "=", "True", ",", "stdout", "=", "fnull", ",", "stderr", "=", "fnull", ")", "else", ":", "result", "=", "subprocess", ".", "call", "(", "cmd", ",", "shell", "=", "True", ")", "return", "result" ]
Run a command in a shell.
[ "Run", "a", "command", "in", "a", "shell", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L154-L162
248,478
cirruscluster/cirruscluster
cirruscluster/core.py
CheckOutput
def CheckOutput(*popenargs, **kwargs): """ Run command with arguments and return its output as a byte string. Backported from Python 2.7 as it's implemented as pure python on stdlib. """ process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) output, _ = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] error = subprocess.CalledProcessError(retcode, cmd) error.output = output raise error return retcode, output
python
def CheckOutput(*popenargs, **kwargs): """ Run command with arguments and return its output as a byte string. Backported from Python 2.7 as it's implemented as pure python on stdlib. """ process = subprocess.Popen(stdout=subprocess.PIPE, *popenargs, **kwargs) output, _ = process.communicate() retcode = process.poll() if retcode: cmd = kwargs.get("args") if cmd is None: cmd = popenargs[0] error = subprocess.CalledProcessError(retcode, cmd) error.output = output raise error return retcode, output
[ "def", "CheckOutput", "(", "*", "popenargs", ",", "*", "*", "kwargs", ")", ":", "process", "=", "subprocess", ".", "Popen", "(", "stdout", "=", "subprocess", ".", "PIPE", ",", "*", "popenargs", ",", "*", "*", "kwargs", ")", "output", ",", "_", "=", "process", ".", "communicate", "(", ")", "retcode", "=", "process", ".", "poll", "(", ")", "if", "retcode", ":", "cmd", "=", "kwargs", ".", "get", "(", "\"args\"", ")", "if", "cmd", "is", "None", ":", "cmd", "=", "popenargs", "[", "0", "]", "error", "=", "subprocess", ".", "CalledProcessError", "(", "retcode", ",", "cmd", ")", "error", ".", "output", "=", "output", "raise", "error", "return", "retcode", ",", "output" ]
Run command with arguments and return its output as a byte string. Backported from Python 2.7 as it's implemented as pure python on stdlib.
[ "Run", "command", "with", "arguments", "and", "return", "its", "output", "as", "a", "byte", "string", ".", "Backported", "from", "Python", "2", ".", "7", "as", "it", "s", "implemented", "as", "pure", "python", "on", "stdlib", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L164-L179
248,479
cirruscluster/cirruscluster
cirruscluster/core.py
UrlGet
def UrlGet(url, timeout=10, retries=0): """ Retrieve content from the given URL. """ # in Python 2.6 we can pass timeout to urllib2.urlopen socket.setdefaulttimeout(timeout) attempts = 0 content = None while not content: try: content = urllib2.urlopen(url).read() except urllib2.URLError: attempts = attempts + 1 if attempts > retries: raise IOError('Failed to fetch url: %s' % url) return content
python
def UrlGet(url, timeout=10, retries=0): """ Retrieve content from the given URL. """ # in Python 2.6 we can pass timeout to urllib2.urlopen socket.setdefaulttimeout(timeout) attempts = 0 content = None while not content: try: content = urllib2.urlopen(url).read() except urllib2.URLError: attempts = attempts + 1 if attempts > retries: raise IOError('Failed to fetch url: %s' % url) return content
[ "def", "UrlGet", "(", "url", ",", "timeout", "=", "10", ",", "retries", "=", "0", ")", ":", "# in Python 2.6 we can pass timeout to urllib2.urlopen", "socket", ".", "setdefaulttimeout", "(", "timeout", ")", "attempts", "=", "0", "content", "=", "None", "while", "not", "content", ":", "try", ":", "content", "=", "urllib2", ".", "urlopen", "(", "url", ")", ".", "read", "(", ")", "except", "urllib2", ".", "URLError", ":", "attempts", "=", "attempts", "+", "1", "if", "attempts", ">", "retries", ":", "raise", "IOError", "(", "'Failed to fetch url: %s'", "%", "url", ")", "return", "content" ]
Retrieve content from the given URL.
[ "Retrieve", "content", "from", "the", "given", "URL", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L181-L194
248,480
cirruscluster/cirruscluster
cirruscluster/core.py
ReadRemoteFile
def ReadRemoteFile(remote_file_path, hostname, ssh_key): """ Reads a remote file into a string. """ cmd = 'sudo cat %s' % remote_file_path exit_code, output = RunCommandOnHost(cmd, hostname, ssh_key) if exit_code: raise IOError('Can not read remote path: %s' % (remote_file_path)) return output
python
def ReadRemoteFile(remote_file_path, hostname, ssh_key): """ Reads a remote file into a string. """ cmd = 'sudo cat %s' % remote_file_path exit_code, output = RunCommandOnHost(cmd, hostname, ssh_key) if exit_code: raise IOError('Can not read remote path: %s' % (remote_file_path)) return output
[ "def", "ReadRemoteFile", "(", "remote_file_path", ",", "hostname", ",", "ssh_key", ")", ":", "cmd", "=", "'sudo cat %s'", "%", "remote_file_path", "exit_code", ",", "output", "=", "RunCommandOnHost", "(", "cmd", ",", "hostname", ",", "ssh_key", ")", "if", "exit_code", ":", "raise", "IOError", "(", "'Can not read remote path: %s'", "%", "(", "remote_file_path", ")", ")", "return", "output" ]
Reads a remote file into a string.
[ "Reads", "a", "remote", "file", "into", "a", "string", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L226-L232
248,481
cirruscluster/cirruscluster
cirruscluster/core.py
__RemoteExecuteHelper
def __RemoteExecuteHelper(args): """ Helper for multiprocessing. """ cmd, hostname, ssh_key = args #Random.atfork() # needed to fix bug in old python 2.6 interpreters private_key = paramiko.RSAKey.from_private_key(StringIO.StringIO(ssh_key)) client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) while True: try: client.connect(hostname, username='ubuntu', pkey=private_key, allow_agent=False, look_for_keys=False) break except socket.error as e: print '.' time.sleep(5) except paramiko.AuthenticationException as e: print e time.sleep(5) channel = client.get_transport().open_session() channel.exec_command(cmd) exit_code = channel.recv_exit_status() output = channel.recv(1000000) client.close() return exit_code, output
python
def __RemoteExecuteHelper(args): """ Helper for multiprocessing. """ cmd, hostname, ssh_key = args #Random.atfork() # needed to fix bug in old python 2.6 interpreters private_key = paramiko.RSAKey.from_private_key(StringIO.StringIO(ssh_key)) client = paramiko.SSHClient() client.set_missing_host_key_policy(paramiko.AutoAddPolicy()) while True: try: client.connect(hostname, username='ubuntu', pkey=private_key, allow_agent=False, look_for_keys=False) break except socket.error as e: print '.' time.sleep(5) except paramiko.AuthenticationException as e: print e time.sleep(5) channel = client.get_transport().open_session() channel.exec_command(cmd) exit_code = channel.recv_exit_status() output = channel.recv(1000000) client.close() return exit_code, output
[ "def", "__RemoteExecuteHelper", "(", "args", ")", ":", "cmd", ",", "hostname", ",", "ssh_key", "=", "args", "#Random.atfork() # needed to fix bug in old python 2.6 interpreters", "private_key", "=", "paramiko", ".", "RSAKey", ".", "from_private_key", "(", "StringIO", ".", "StringIO", "(", "ssh_key", ")", ")", "client", "=", "paramiko", ".", "SSHClient", "(", ")", "client", ".", "set_missing_host_key_policy", "(", "paramiko", ".", "AutoAddPolicy", "(", ")", ")", "while", "True", ":", "try", ":", "client", ".", "connect", "(", "hostname", ",", "username", "=", "'ubuntu'", ",", "pkey", "=", "private_key", ",", "allow_agent", "=", "False", ",", "look_for_keys", "=", "False", ")", "break", "except", "socket", ".", "error", "as", "e", ":", "print", "'.'", "time", ".", "sleep", "(", "5", ")", "except", "paramiko", ".", "AuthenticationException", "as", "e", ":", "print", "e", "time", ".", "sleep", "(", "5", ")", "channel", "=", "client", ".", "get_transport", "(", ")", ".", "open_session", "(", ")", "channel", ".", "exec_command", "(", "cmd", ")", "exit_code", "=", "channel", ".", "recv_exit_status", "(", ")", "output", "=", "channel", ".", "recv", "(", "1000000", ")", "client", ".", "close", "(", ")", "return", "exit_code", ",", "output" ]
Helper for multiprocessing.
[ "Helper", "for", "multiprocessing", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L241-L264
248,482
cirruscluster/cirruscluster
cirruscluster/core.py
WaitForHostsReachable
def WaitForHostsReachable(hostnames, ssh_key): """ Blocks until host is reachable via ssh. """ while True: unreachable = GetUnreachableHosts(hostnames, ssh_key) if unreachable: print 'waiting for unreachable hosts: %s' % unreachable time.sleep(5) else: break return
python
def WaitForHostsReachable(hostnames, ssh_key): """ Blocks until host is reachable via ssh. """ while True: unreachable = GetUnreachableHosts(hostnames, ssh_key) if unreachable: print 'waiting for unreachable hosts: %s' % unreachable time.sleep(5) else: break return
[ "def", "WaitForHostsReachable", "(", "hostnames", ",", "ssh_key", ")", ":", "while", "True", ":", "unreachable", "=", "GetUnreachableHosts", "(", "hostnames", ",", "ssh_key", ")", "if", "unreachable", ":", "print", "'waiting for unreachable hosts: %s'", "%", "unreachable", "time", ".", "sleep", "(", "5", ")", "else", ":", "break", "return" ]
Blocks until host is reachable via ssh.
[ "Blocks", "until", "host", "is", "reachable", "via", "ssh", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L283-L292
248,483
cirruscluster/cirruscluster
cirruscluster/core.py
GetUnreachableInstances
def GetUnreachableInstances(instances, ssh_key): """ Returns list of instances unreachable via ssh. """ hostnames = [i.private_ip for i in instances] ssh_status = AreHostsReachable(hostnames, ssh_key) assert(len(hostnames) == len(ssh_status)) nonresponsive_instances = [instance for (instance, ssh_ok) in zip(instances, ssh_status) if not ssh_ok] return nonresponsive_instances
python
def GetUnreachableInstances(instances, ssh_key): """ Returns list of instances unreachable via ssh. """ hostnames = [i.private_ip for i in instances] ssh_status = AreHostsReachable(hostnames, ssh_key) assert(len(hostnames) == len(ssh_status)) nonresponsive_instances = [instance for (instance, ssh_ok) in zip(instances, ssh_status) if not ssh_ok] return nonresponsive_instances
[ "def", "GetUnreachableInstances", "(", "instances", ",", "ssh_key", ")", ":", "hostnames", "=", "[", "i", ".", "private_ip", "for", "i", "in", "instances", "]", "ssh_status", "=", "AreHostsReachable", "(", "hostnames", ",", "ssh_key", ")", "assert", "(", "len", "(", "hostnames", ")", "==", "len", "(", "ssh_status", ")", ")", "nonresponsive_instances", "=", "[", "instance", "for", "(", "instance", ",", "ssh_ok", ")", "in", "zip", "(", "instances", ",", "ssh_status", ")", "if", "not", "ssh_ok", "]", "return", "nonresponsive_instances" ]
Returns list of instances unreachable via ssh.
[ "Returns", "list", "of", "instances", "unreachable", "via", "ssh", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L294-L301
248,484
cirruscluster/cirruscluster
cirruscluster/core.py
GetUnreachableHosts
def GetUnreachableHosts(hostnames, ssh_key): """ Returns list of hosts unreachable via ssh. """ ssh_status = AreHostsReachable(hostnames, ssh_key) assert(len(hostnames) == len(ssh_status)) nonresponsive_hostnames = [host for (host, ssh_ok) in zip(hostnames, ssh_status) if not ssh_ok] return nonresponsive_hostnames
python
def GetUnreachableHosts(hostnames, ssh_key): """ Returns list of hosts unreachable via ssh. """ ssh_status = AreHostsReachable(hostnames, ssh_key) assert(len(hostnames) == len(ssh_status)) nonresponsive_hostnames = [host for (host, ssh_ok) in zip(hostnames, ssh_status) if not ssh_ok] return nonresponsive_hostnames
[ "def", "GetUnreachableHosts", "(", "hostnames", ",", "ssh_key", ")", ":", "ssh_status", "=", "AreHostsReachable", "(", "hostnames", ",", "ssh_key", ")", "assert", "(", "len", "(", "hostnames", ")", "==", "len", "(", "ssh_status", ")", ")", "nonresponsive_hostnames", "=", "[", "host", "for", "(", "host", ",", "ssh_ok", ")", "in", "zip", "(", "hostnames", ",", "ssh_status", ")", "if", "not", "ssh_ok", "]", "return", "nonresponsive_hostnames" ]
Returns list of hosts unreachable via ssh.
[ "Returns", "list", "of", "hosts", "unreachable", "via", "ssh", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L303-L309
248,485
cirruscluster/cirruscluster
cirruscluster/core.py
AreHostsReachable
def AreHostsReachable(hostnames, ssh_key): """ Returns list of bools indicating if host reachable via ssh. """ # validate input for hostname in hostnames: assert(len(hostname)) ssh_ok = [exit_code == 0 for (exit_code, _) in RunCommandOnHosts('echo test > /dev/null', hostnames, ssh_key)] return ssh_ok
python
def AreHostsReachable(hostnames, ssh_key): """ Returns list of bools indicating if host reachable via ssh. """ # validate input for hostname in hostnames: assert(len(hostname)) ssh_ok = [exit_code == 0 for (exit_code, _) in RunCommandOnHosts('echo test > /dev/null', hostnames, ssh_key)] return ssh_ok
[ "def", "AreHostsReachable", "(", "hostnames", ",", "ssh_key", ")", ":", "# validate input", "for", "hostname", "in", "hostnames", ":", "assert", "(", "len", "(", "hostname", ")", ")", "ssh_ok", "=", "[", "exit_code", "==", "0", "for", "(", "exit_code", ",", "_", ")", "in", "RunCommandOnHosts", "(", "'echo test > /dev/null'", ",", "hostnames", ",", "ssh_key", ")", "]", "return", "ssh_ok" ]
Returns list of bools indicating if host reachable via ssh.
[ "Returns", "list", "of", "bools", "indicating", "if", "host", "reachable", "via", "ssh", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L311-L318
248,486
cirruscluster/cirruscluster
cirruscluster/core.py
AmiName
def AmiName(ami_release_name, ubuntu_release_name, virtualization_type, mapr_version, role): """ Returns AMI name using Cirrus ami naming convention. """ if not role in valid_instance_roles: raise RuntimeError('Specified role (%s) not a valid role: %s' % (role, valid_instance_roles)) if virtualization_type not in valid_virtualization_types: raise RuntimeError('Specified virtualization type (%s) not valid: %s' % (virtualization_type, valid_virtualization_types)) ami_name = 'cirrus-%s-ubuntu-%s-%s-mapr%s-%s' % (ami_release_name, ubuntu_release_name, virtualization_type, mapr_version, role) return ami_name
python
def AmiName(ami_release_name, ubuntu_release_name, virtualization_type, mapr_version, role): """ Returns AMI name using Cirrus ami naming convention. """ if not role in valid_instance_roles: raise RuntimeError('Specified role (%s) not a valid role: %s' % (role, valid_instance_roles)) if virtualization_type not in valid_virtualization_types: raise RuntimeError('Specified virtualization type (%s) not valid: %s' % (virtualization_type, valid_virtualization_types)) ami_name = 'cirrus-%s-ubuntu-%s-%s-mapr%s-%s' % (ami_release_name, ubuntu_release_name, virtualization_type, mapr_version, role) return ami_name
[ "def", "AmiName", "(", "ami_release_name", ",", "ubuntu_release_name", ",", "virtualization_type", ",", "mapr_version", ",", "role", ")", ":", "if", "not", "role", "in", "valid_instance_roles", ":", "raise", "RuntimeError", "(", "'Specified role (%s) not a valid role: %s'", "%", "(", "role", ",", "valid_instance_roles", ")", ")", "if", "virtualization_type", "not", "in", "valid_virtualization_types", ":", "raise", "RuntimeError", "(", "'Specified virtualization type (%s) not valid: %s'", "%", "(", "virtualization_type", ",", "valid_virtualization_types", ")", ")", "ami_name", "=", "'cirrus-%s-ubuntu-%s-%s-mapr%s-%s'", "%", "(", "ami_release_name", ",", "ubuntu_release_name", ",", "virtualization_type", ",", "mapr_version", ",", "role", ")", "return", "ami_name" ]
Returns AMI name using Cirrus ami naming convention.
[ "Returns", "AMI", "name", "using", "Cirrus", "ami", "naming", "convention", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L324-L337
248,487
cirruscluster/cirruscluster
cirruscluster/core.py
LookupCirrusAmi
def LookupCirrusAmi(ec2, instance_type, ubuntu_release_name, mapr_version, role, ami_release_name, ami_owner_id): """ Returns AMI satisfying provided constraints. """ if not role in valid_instance_roles: raise RuntimeError('Specified role (%s) not a valid role: %s' % (role, valid_instance_roles)) virtualization_type = 'paravirtual' if IsHPCInstanceType(instance_type): virtualization_type = 'hvm' assert(ami_owner_id) images = ec2.get_all_images(owners=[ami_owner_id]) ami = None ami_name = AmiName(ami_release_name, ubuntu_release_name, virtualization_type, mapr_version, role) for image in images: if image.name == ami_name: ami = image break return ami
python
def LookupCirrusAmi(ec2, instance_type, ubuntu_release_name, mapr_version, role, ami_release_name, ami_owner_id): """ Returns AMI satisfying provided constraints. """ if not role in valid_instance_roles: raise RuntimeError('Specified role (%s) not a valid role: %s' % (role, valid_instance_roles)) virtualization_type = 'paravirtual' if IsHPCInstanceType(instance_type): virtualization_type = 'hvm' assert(ami_owner_id) images = ec2.get_all_images(owners=[ami_owner_id]) ami = None ami_name = AmiName(ami_release_name, ubuntu_release_name, virtualization_type, mapr_version, role) for image in images: if image.name == ami_name: ami = image break return ami
[ "def", "LookupCirrusAmi", "(", "ec2", ",", "instance_type", ",", "ubuntu_release_name", ",", "mapr_version", ",", "role", ",", "ami_release_name", ",", "ami_owner_id", ")", ":", "if", "not", "role", "in", "valid_instance_roles", ":", "raise", "RuntimeError", "(", "'Specified role (%s) not a valid role: %s'", "%", "(", "role", ",", "valid_instance_roles", ")", ")", "virtualization_type", "=", "'paravirtual'", "if", "IsHPCInstanceType", "(", "instance_type", ")", ":", "virtualization_type", "=", "'hvm'", "assert", "(", "ami_owner_id", ")", "images", "=", "ec2", ".", "get_all_images", "(", "owners", "=", "[", "ami_owner_id", "]", ")", "ami", "=", "None", "ami_name", "=", "AmiName", "(", "ami_release_name", ",", "ubuntu_release_name", ",", "virtualization_type", ",", "mapr_version", ",", "role", ")", "for", "image", "in", "images", ":", "if", "image", ".", "name", "==", "ami_name", ":", "ami", "=", "image", "break", "return", "ami" ]
Returns AMI satisfying provided constraints.
[ "Returns", "AMI", "satisfying", "provided", "constraints", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L342-L361
248,488
cirruscluster/cirruscluster
cirruscluster/core.py
GetRegion
def GetRegion(region_name): """ Converts region name string into boto Region object. """ regions = boto_ec2.regions() region = None valid_region_names = [] for r in regions: valid_region_names.append(r.name) if r.name == region_name: region = r break if not region: logging.info( 'invalid region name: %s ' % (region_name)) logging.info( 'Try one of these:\n %s' % ('\n'.join(valid_region_names))) assert(False) return region
python
def GetRegion(region_name): """ Converts region name string into boto Region object. """ regions = boto_ec2.regions() region = None valid_region_names = [] for r in regions: valid_region_names.append(r.name) if r.name == region_name: region = r break if not region: logging.info( 'invalid region name: %s ' % (region_name)) logging.info( 'Try one of these:\n %s' % ('\n'.join(valid_region_names))) assert(False) return region
[ "def", "GetRegion", "(", "region_name", ")", ":", "regions", "=", "boto_ec2", ".", "regions", "(", ")", "region", "=", "None", "valid_region_names", "=", "[", "]", "for", "r", "in", "regions", ":", "valid_region_names", ".", "append", "(", "r", ".", "name", ")", "if", "r", ".", "name", "==", "region_name", ":", "region", "=", "r", "break", "if", "not", "region", ":", "logging", ".", "info", "(", "'invalid region name: %s '", "%", "(", "region_name", ")", ")", "logging", ".", "info", "(", "'Try one of these:\\n %s'", "%", "(", "'\\n'", ".", "join", "(", "valid_region_names", ")", ")", ")", "assert", "(", "False", ")", "return", "region" ]
Converts region name string into boto Region object.
[ "Converts", "region", "name", "string", "into", "boto", "Region", "object", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L368-L382
248,489
cirruscluster/cirruscluster
cirruscluster/core.py
PrivateToPublicOpenSSH
def PrivateToPublicOpenSSH(key, host): """ Computes the OpenSSH public key format given a private key. """ # Create public key from private key. ssh_rsa = '00000007' + base64.b16encode('ssh-rsa') # Exponent. exponent = '%x' % (key.e,) if len(exponent) % 2: exponent = '0' + exponent ssh_rsa += '%08x' % (len(exponent) / 2,) ssh_rsa += exponent modulus = '%x' % (key.n,) if len(modulus) % 2: modulus = '0' + modulus if modulus[0] in '89abcdef': modulus = '00' + modulus ssh_rsa += '%08x' % (len(modulus) / 2,) ssh_rsa += modulus hash_string = base64.b64encode(base64.b16decode(ssh_rsa.upper())) public_key = 'ssh-rsa %s %s' % (hash_string, host) return public_key
python
def PrivateToPublicOpenSSH(key, host): """ Computes the OpenSSH public key format given a private key. """ # Create public key from private key. ssh_rsa = '00000007' + base64.b16encode('ssh-rsa') # Exponent. exponent = '%x' % (key.e,) if len(exponent) % 2: exponent = '0' + exponent ssh_rsa += '%08x' % (len(exponent) / 2,) ssh_rsa += exponent modulus = '%x' % (key.n,) if len(modulus) % 2: modulus = '0' + modulus if modulus[0] in '89abcdef': modulus = '00' + modulus ssh_rsa += '%08x' % (len(modulus) / 2,) ssh_rsa += modulus hash_string = base64.b64encode(base64.b16decode(ssh_rsa.upper())) public_key = 'ssh-rsa %s %s' % (hash_string, host) return public_key
[ "def", "PrivateToPublicOpenSSH", "(", "key", ",", "host", ")", ":", "# Create public key from private key.", "ssh_rsa", "=", "'00000007'", "+", "base64", ".", "b16encode", "(", "'ssh-rsa'", ")", "# Exponent.", "exponent", "=", "'%x'", "%", "(", "key", ".", "e", ",", ")", "if", "len", "(", "exponent", ")", "%", "2", ":", "exponent", "=", "'0'", "+", "exponent", "ssh_rsa", "+=", "'%08x'", "%", "(", "len", "(", "exponent", ")", "/", "2", ",", ")", "ssh_rsa", "+=", "exponent", "modulus", "=", "'%x'", "%", "(", "key", ".", "n", ",", ")", "if", "len", "(", "modulus", ")", "%", "2", ":", "modulus", "=", "'0'", "+", "modulus", "if", "modulus", "[", "0", "]", "in", "'89abcdef'", ":", "modulus", "=", "'00'", "+", "modulus", "ssh_rsa", "+=", "'%08x'", "%", "(", "len", "(", "modulus", ")", "/", "2", ",", ")", "ssh_rsa", "+=", "modulus", "hash_string", "=", "base64", ".", "b64encode", "(", "base64", ".", "b16decode", "(", "ssh_rsa", ".", "upper", "(", ")", ")", ")", "public_key", "=", "'ssh-rsa %s %s'", "%", "(", "hash_string", ",", "host", ")", "return", "public_key" ]
Computes the OpenSSH public key format given a private key.
[ "Computes", "the", "OpenSSH", "public", "key", "format", "given", "a", "private", "key", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L500-L519
248,490
cirruscluster/cirruscluster
cirruscluster/core.py
InitKeypair
def InitKeypair(aws_id, aws_secret, ec2, s3, keypair_name, src_region, dst_regions): """ Returns the ssh private key for the given keypair name Cirrus created bucket. Creates the keypair if it doesn't yet exist and stores private key in S3. """ # check if a keypair has been created metadata = CirrusAccessIdMetadata(s3, aws_id) keypair = ec2.get_key_pair(keypair_name) ssh_key = None if keypair: # if created, check that private key is available in s3 ssh_key = metadata.GetSshKey(keypair_name) # if the private key is not created or not available in s3, recreate it if not ssh_key: if keypair: ec2.delete_key_pair(keypair_name) print 'Recreating keypair: %s' % (keypair_name) # create new key in current region_name keypair = ec2.create_key_pair(keypair_name) ssh_key = keypair.material metadata.SetSshKey(keypair_name, ssh_key) DistributeKeyToRegions(src_region, dst_regions, keypair, aws_id, aws_secret) assert(keypair) assert(ssh_key) return ssh_key
python
def InitKeypair(aws_id, aws_secret, ec2, s3, keypair_name, src_region, dst_regions): """ Returns the ssh private key for the given keypair name Cirrus created bucket. Creates the keypair if it doesn't yet exist and stores private key in S3. """ # check if a keypair has been created metadata = CirrusAccessIdMetadata(s3, aws_id) keypair = ec2.get_key_pair(keypair_name) ssh_key = None if keypair: # if created, check that private key is available in s3 ssh_key = metadata.GetSshKey(keypair_name) # if the private key is not created or not available in s3, recreate it if not ssh_key: if keypair: ec2.delete_key_pair(keypair_name) print 'Recreating keypair: %s' % (keypair_name) # create new key in current region_name keypair = ec2.create_key_pair(keypair_name) ssh_key = keypair.material metadata.SetSshKey(keypair_name, ssh_key) DistributeKeyToRegions(src_region, dst_regions, keypair, aws_id, aws_secret) assert(keypair) assert(ssh_key) return ssh_key
[ "def", "InitKeypair", "(", "aws_id", ",", "aws_secret", ",", "ec2", ",", "s3", ",", "keypair_name", ",", "src_region", ",", "dst_regions", ")", ":", "# check if a keypair has been created", "metadata", "=", "CirrusAccessIdMetadata", "(", "s3", ",", "aws_id", ")", "keypair", "=", "ec2", ".", "get_key_pair", "(", "keypair_name", ")", "ssh_key", "=", "None", "if", "keypair", ":", "# if created, check that private key is available in s3", "ssh_key", "=", "metadata", ".", "GetSshKey", "(", "keypair_name", ")", "# if the private key is not created or not available in s3, recreate it", "if", "not", "ssh_key", ":", "if", "keypair", ":", "ec2", ".", "delete_key_pair", "(", "keypair_name", ")", "print", "'Recreating keypair: %s'", "%", "(", "keypair_name", ")", "# create new key in current region_name", "keypair", "=", "ec2", ".", "create_key_pair", "(", "keypair_name", ")", "ssh_key", "=", "keypair", ".", "material", "metadata", ".", "SetSshKey", "(", "keypair_name", ",", "ssh_key", ")", "DistributeKeyToRegions", "(", "src_region", ",", "dst_regions", ",", "keypair", ",", "aws_id", ",", "aws_secret", ")", "assert", "(", "keypair", ")", "assert", "(", "ssh_key", ")", "return", "ssh_key" ]
Returns the ssh private key for the given keypair name Cirrus created bucket. Creates the keypair if it doesn't yet exist and stores private key in S3.
[ "Returns", "the", "ssh", "private", "key", "for", "the", "given", "keypair", "name", "Cirrus", "created", "bucket", ".", "Creates", "the", "keypair", "if", "it", "doesn", "t", "yet", "exist", "and", "stores", "private", "key", "in", "S3", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L522-L549
248,491
cirruscluster/cirruscluster
cirruscluster/core.py
__WaitForVolume
def __WaitForVolume(volume, desired_state): """ Blocks until EBS volume is in desired state. """ print 'Waiting for volume %s to be %s...' % (volume.id, desired_state) while True: volume.update() sys.stdout.write('.') sys.stdout.flush() #print 'status is: %s' % volume.status if volume.status == desired_state: break time.sleep(5) return
python
def __WaitForVolume(volume, desired_state): """ Blocks until EBS volume is in desired state. """ print 'Waiting for volume %s to be %s...' % (volume.id, desired_state) while True: volume.update() sys.stdout.write('.') sys.stdout.flush() #print 'status is: %s' % volume.status if volume.status == desired_state: break time.sleep(5) return
[ "def", "__WaitForVolume", "(", "volume", ",", "desired_state", ")", ":", "print", "'Waiting for volume %s to be %s...'", "%", "(", "volume", ".", "id", ",", "desired_state", ")", "while", "True", ":", "volume", ".", "update", "(", ")", "sys", ".", "stdout", ".", "write", "(", "'.'", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "#print 'status is: %s' % volume.status", "if", "volume", ".", "status", "==", "desired_state", ":", "break", "time", ".", "sleep", "(", "5", ")", "return" ]
Blocks until EBS volume is in desired state.
[ "Blocks", "until", "EBS", "volume", "is", "in", "desired", "state", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L586-L597
248,492
cirruscluster/cirruscluster
cirruscluster/core.py
WaitForSnapshotCompleted
def WaitForSnapshotCompleted(snapshot): """ Blocks until snapshot is complete. """ print 'Waiting for snapshot %s to be completed...' % (snapshot) while True: snapshot.update() sys.stdout.write('.') sys.stdout.flush() #print 'status is: %s' % snapshot.status if snapshot.status == 'completed': break time.sleep(5) return
python
def WaitForSnapshotCompleted(snapshot): """ Blocks until snapshot is complete. """ print 'Waiting for snapshot %s to be completed...' % (snapshot) while True: snapshot.update() sys.stdout.write('.') sys.stdout.flush() #print 'status is: %s' % snapshot.status if snapshot.status == 'completed': break time.sleep(5) return
[ "def", "WaitForSnapshotCompleted", "(", "snapshot", ")", ":", "print", "'Waiting for snapshot %s to be completed...'", "%", "(", "snapshot", ")", "while", "True", ":", "snapshot", ".", "update", "(", ")", "sys", ".", "stdout", ".", "write", "(", "'.'", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "#print 'status is: %s' % snapshot.status", "if", "snapshot", ".", "status", "==", "'completed'", ":", "break", "time", ".", "sleep", "(", "5", ")", "return" ]
Blocks until snapshot is complete.
[ "Blocks", "until", "snapshot", "is", "complete", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L599-L610
248,493
cirruscluster/cirruscluster
cirruscluster/core.py
__WaitForInstance
def __WaitForInstance(instance, desired_state): """ Blocks until instance is in desired_state. """ print 'Waiting for instance %s to change to %s' % (instance.id, desired_state) while True: try: instance.update() state = instance.state sys.stdout.write('.') sys.stdout.flush() if state == desired_state: break except boto_exception.EC2ResponseError as e: logging.info(e) #except boto_exception.ResponseError as e: # This is an alias # logging.info(e) time.sleep(5) return
python
def __WaitForInstance(instance, desired_state): """ Blocks until instance is in desired_state. """ print 'Waiting for instance %s to change to %s' % (instance.id, desired_state) while True: try: instance.update() state = instance.state sys.stdout.write('.') sys.stdout.flush() if state == desired_state: break except boto_exception.EC2ResponseError as e: logging.info(e) #except boto_exception.ResponseError as e: # This is an alias # logging.info(e) time.sleep(5) return
[ "def", "__WaitForInstance", "(", "instance", ",", "desired_state", ")", ":", "print", "'Waiting for instance %s to change to %s'", "%", "(", "instance", ".", "id", ",", "desired_state", ")", "while", "True", ":", "try", ":", "instance", ".", "update", "(", ")", "state", "=", "instance", ".", "state", "sys", ".", "stdout", ".", "write", "(", "'.'", ")", "sys", ".", "stdout", ".", "flush", "(", ")", "if", "state", "==", "desired_state", ":", "break", "except", "boto_exception", ".", "EC2ResponseError", "as", "e", ":", "logging", ".", "info", "(", "e", ")", "#except boto_exception.ResponseError as e: # This is an alias", "# logging.info(e)", "time", ".", "sleep", "(", "5", ")", "return" ]
Blocks until instance is in desired_state.
[ "Blocks", "until", "instance", "is", "in", "desired_state", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L618-L634
248,494
cirruscluster/cirruscluster
cirruscluster/core.py
SearchUbuntuAmiDatabase
def SearchUbuntuAmiDatabase(release_name, region_name, root_store_type, virtualization_type): """ Returns the ubuntu created ami matching the given criteria. """ ami_list_url = 'http://cloud-images.ubuntu.com/query/%s/server/released.txt' \ % (release_name) url_file = urllib2.urlopen(ami_list_url) # The mapping of columns names to col ids in the ubuntu release txt file. release_name_col = 0 release_tag_col = 2 release_date_col = 3 root_store_type_col = 4 arch_col = 5 region_col = 6 ami_col = 7 matching_amis = [] # list of tuples (ami_id, tokens) for line in url_file: tokens = line.split() # lines have different number of columns (one fewer for hvm) if (len(tokens) == 9): virtualization_type_col = 8 elif (len(tokens) == 10): virtualization_type_col = 9 else: raise RuntimeError('invalid line format: %s' % line) if tokens[release_name_col] == release_name \ and tokens[release_tag_col] == 'release' \ and tokens[root_store_type_col] == root_store_type \ and tokens[arch_col] == 'amd64' \ and tokens[region_col] == region_name \ and tokens[virtualization_type_col] == virtualization_type: matching_amis.append((tokens[ami_col], tokens)) matching_amis.sort(key=lambda (ami, tokens) : tokens[release_date_col], reverse=True) # order newest first if not matching_amis: params = [release_name, root_store_type, region_name, virtualization_type] raise RuntimeError('Failed to find matching ubuntu ami: %s', params) selected_ami = matching_amis[0][0] return selected_ami
python
def SearchUbuntuAmiDatabase(release_name, region_name, root_store_type, virtualization_type): """ Returns the ubuntu created ami matching the given criteria. """ ami_list_url = 'http://cloud-images.ubuntu.com/query/%s/server/released.txt' \ % (release_name) url_file = urllib2.urlopen(ami_list_url) # The mapping of columns names to col ids in the ubuntu release txt file. release_name_col = 0 release_tag_col = 2 release_date_col = 3 root_store_type_col = 4 arch_col = 5 region_col = 6 ami_col = 7 matching_amis = [] # list of tuples (ami_id, tokens) for line in url_file: tokens = line.split() # lines have different number of columns (one fewer for hvm) if (len(tokens) == 9): virtualization_type_col = 8 elif (len(tokens) == 10): virtualization_type_col = 9 else: raise RuntimeError('invalid line format: %s' % line) if tokens[release_name_col] == release_name \ and tokens[release_tag_col] == 'release' \ and tokens[root_store_type_col] == root_store_type \ and tokens[arch_col] == 'amd64' \ and tokens[region_col] == region_name \ and tokens[virtualization_type_col] == virtualization_type: matching_amis.append((tokens[ami_col], tokens)) matching_amis.sort(key=lambda (ami, tokens) : tokens[release_date_col], reverse=True) # order newest first if not matching_amis: params = [release_name, root_store_type, region_name, virtualization_type] raise RuntimeError('Failed to find matching ubuntu ami: %s', params) selected_ami = matching_amis[0][0] return selected_ami
[ "def", "SearchUbuntuAmiDatabase", "(", "release_name", ",", "region_name", ",", "root_store_type", ",", "virtualization_type", ")", ":", "ami_list_url", "=", "'http://cloud-images.ubuntu.com/query/%s/server/released.txt'", "%", "(", "release_name", ")", "url_file", "=", "urllib2", ".", "urlopen", "(", "ami_list_url", ")", "# The mapping of columns names to col ids in the ubuntu release txt file.", "release_name_col", "=", "0", "release_tag_col", "=", "2", "release_date_col", "=", "3", "root_store_type_col", "=", "4", "arch_col", "=", "5", "region_col", "=", "6", "ami_col", "=", "7", "matching_amis", "=", "[", "]", "# list of tuples (ami_id, tokens)", "for", "line", "in", "url_file", ":", "tokens", "=", "line", ".", "split", "(", ")", "# lines have different number of columns (one fewer for hvm)", "if", "(", "len", "(", "tokens", ")", "==", "9", ")", ":", "virtualization_type_col", "=", "8", "elif", "(", "len", "(", "tokens", ")", "==", "10", ")", ":", "virtualization_type_col", "=", "9", "else", ":", "raise", "RuntimeError", "(", "'invalid line format: %s'", "%", "line", ")", "if", "tokens", "[", "release_name_col", "]", "==", "release_name", "and", "tokens", "[", "release_tag_col", "]", "==", "'release'", "and", "tokens", "[", "root_store_type_col", "]", "==", "root_store_type", "and", "tokens", "[", "arch_col", "]", "==", "'amd64'", "and", "tokens", "[", "region_col", "]", "==", "region_name", "and", "tokens", "[", "virtualization_type_col", "]", "==", "virtualization_type", ":", "matching_amis", ".", "append", "(", "(", "tokens", "[", "ami_col", "]", ",", "tokens", ")", ")", "matching_amis", ".", "sort", "(", "key", "=", "lambda", "(", "ami", ",", "tokens", ")", ":", "tokens", "[", "release_date_col", "]", ",", "reverse", "=", "True", ")", "# order newest first ", "if", "not", "matching_amis", ":", "params", "=", "[", "release_name", ",", "root_store_type", ",", "region_name", ",", "virtualization_type", "]", "raise", "RuntimeError", "(", "'Failed to find matching ubuntu ami: %s'", ",", "params", ")", "selected_ami", "=", "matching_amis", "[", "0", "]", "[", "0", "]", "return", "selected_ami" ]
Returns the ubuntu created ami matching the given criteria.
[ "Returns", "the", "ubuntu", "created", "ami", "matching", "the", "given", "criteria", "." ]
977409929dd81322d886425cdced10608117d5d7
https://github.com/cirruscluster/cirruscluster/blob/977409929dd81322d886425cdced10608117d5d7/cirruscluster/core.py#L666-L704
248,495
bruth/restlib2
restlib2/mixins.py
TemplateResponseMixin.render
def render(self, request, context, status=codes.ok, content_type=None, args=None, kwargs=None): "Expects the method handler to return the `context` for the template." if isinstance(self.template_name, (list, tuple)): template = loader.select_template(self.template_name) elif self.template_name: template = loader.get_template(self.template_name) else: template = loader.Template(self.template_string) context = RequestContext(request, context) content = template.render(context) return HttpResponse(content, status=status, content_type=content_type)
python
def render(self, request, context, status=codes.ok, content_type=None, args=None, kwargs=None): "Expects the method handler to return the `context` for the template." if isinstance(self.template_name, (list, tuple)): template = loader.select_template(self.template_name) elif self.template_name: template = loader.get_template(self.template_name) else: template = loader.Template(self.template_string) context = RequestContext(request, context) content = template.render(context) return HttpResponse(content, status=status, content_type=content_type)
[ "def", "render", "(", "self", ",", "request", ",", "context", ",", "status", "=", "codes", ".", "ok", ",", "content_type", "=", "None", ",", "args", "=", "None", ",", "kwargs", "=", "None", ")", ":", "if", "isinstance", "(", "self", ".", "template_name", ",", "(", "list", ",", "tuple", ")", ")", ":", "template", "=", "loader", ".", "select_template", "(", "self", ".", "template_name", ")", "elif", "self", ".", "template_name", ":", "template", "=", "loader", ".", "get_template", "(", "self", ".", "template_name", ")", "else", ":", "template", "=", "loader", ".", "Template", "(", "self", ".", "template_string", ")", "context", "=", "RequestContext", "(", "request", ",", "context", ")", "content", "=", "template", ".", "render", "(", "context", ")", "return", "HttpResponse", "(", "content", ",", "status", "=", "status", ",", "content_type", "=", "content_type", ")" ]
Expects the method handler to return the `context` for the template.
[ "Expects", "the", "method", "handler", "to", "return", "the", "context", "for", "the", "template", "." ]
cb147527496ddf08263364f1fb52e7c48f215667
https://github.com/bruth/restlib2/blob/cb147527496ddf08263364f1fb52e7c48f215667/restlib2/mixins.py#L11-L25
248,496
ddorn/superprompt
superprompt/core.py
prompt_autocomplete
def prompt_autocomplete(prompt, complete, default=None, contains_spaces=True, show_default=True, prompt_suffix=': ', color=None): """ Prompt a string with autocompletion :param complete: A function that returns a list of possible strings that should be completed on a given text. def complete(text: str) -> List[str]: ... """ def real_completer(text, state): possibilities = complete(text) + [None] if possibilities: return possibilities[state] return None readline.set_completer_delims('\t\n' + ' ' * (not contains_spaces)) readline.parse_and_bind("tab: complete") readline.set_completer(real_completer) if default is not None and show_default: prompt += ' [%s]' % default prompt += prompt_suffix colors = { 'red': Fore.RED, 'blue': Fore.BLUE, 'green': Fore.GREEN, 'cyan': Fore.CYAN, 'magenta': Fore.MAGENTA, 'yellow': Fore.YELLOW, 'white': Fore.WHITE, 'black': Fore.LIGHTBLACK_EX } if color: prompt = colors[color.lower()] + prompt + Fore.RESET if default is not None: r = input(prompt) else: while True: r = input(prompt) if r: break r = r or default # remove the autocompletion before quitting for future input() readline.parse_and_bind('tab: self-insert') readline.set_completer(None) return r
python
def prompt_autocomplete(prompt, complete, default=None, contains_spaces=True, show_default=True, prompt_suffix=': ', color=None): """ Prompt a string with autocompletion :param complete: A function that returns a list of possible strings that should be completed on a given text. def complete(text: str) -> List[str]: ... """ def real_completer(text, state): possibilities = complete(text) + [None] if possibilities: return possibilities[state] return None readline.set_completer_delims('\t\n' + ' ' * (not contains_spaces)) readline.parse_and_bind("tab: complete") readline.set_completer(real_completer) if default is not None and show_default: prompt += ' [%s]' % default prompt += prompt_suffix colors = { 'red': Fore.RED, 'blue': Fore.BLUE, 'green': Fore.GREEN, 'cyan': Fore.CYAN, 'magenta': Fore.MAGENTA, 'yellow': Fore.YELLOW, 'white': Fore.WHITE, 'black': Fore.LIGHTBLACK_EX } if color: prompt = colors[color.lower()] + prompt + Fore.RESET if default is not None: r = input(prompt) else: while True: r = input(prompt) if r: break r = r or default # remove the autocompletion before quitting for future input() readline.parse_and_bind('tab: self-insert') readline.set_completer(None) return r
[ "def", "prompt_autocomplete", "(", "prompt", ",", "complete", ",", "default", "=", "None", ",", "contains_spaces", "=", "True", ",", "show_default", "=", "True", ",", "prompt_suffix", "=", "': '", ",", "color", "=", "None", ")", ":", "def", "real_completer", "(", "text", ",", "state", ")", ":", "possibilities", "=", "complete", "(", "text", ")", "+", "[", "None", "]", "if", "possibilities", ":", "return", "possibilities", "[", "state", "]", "return", "None", "readline", ".", "set_completer_delims", "(", "'\\t\\n'", "+", "' '", "*", "(", "not", "contains_spaces", ")", ")", "readline", ".", "parse_and_bind", "(", "\"tab: complete\"", ")", "readline", ".", "set_completer", "(", "real_completer", ")", "if", "default", "is", "not", "None", "and", "show_default", ":", "prompt", "+=", "' [%s]'", "%", "default", "prompt", "+=", "prompt_suffix", "colors", "=", "{", "'red'", ":", "Fore", ".", "RED", ",", "'blue'", ":", "Fore", ".", "BLUE", ",", "'green'", ":", "Fore", ".", "GREEN", ",", "'cyan'", ":", "Fore", ".", "CYAN", ",", "'magenta'", ":", "Fore", ".", "MAGENTA", ",", "'yellow'", ":", "Fore", ".", "YELLOW", ",", "'white'", ":", "Fore", ".", "WHITE", ",", "'black'", ":", "Fore", ".", "LIGHTBLACK_EX", "}", "if", "color", ":", "prompt", "=", "colors", "[", "color", ".", "lower", "(", ")", "]", "+", "prompt", "+", "Fore", ".", "RESET", "if", "default", "is", "not", "None", ":", "r", "=", "input", "(", "prompt", ")", "else", ":", "while", "True", ":", "r", "=", "input", "(", "prompt", ")", "if", "r", ":", "break", "r", "=", "r", "or", "default", "# remove the autocompletion before quitting for future input()", "readline", ".", "parse_and_bind", "(", "'tab: self-insert'", ")", "readline", ".", "set_completer", "(", "None", ")", "return", "r" ]
Prompt a string with autocompletion :param complete: A function that returns a list of possible strings that should be completed on a given text. def complete(text: str) -> List[str]: ...
[ "Prompt", "a", "string", "with", "autocompletion" ]
f2ee13a71c0523663ca1740738b545e2ab1eab20
https://github.com/ddorn/superprompt/blob/f2ee13a71c0523663ca1740738b545e2ab1eab20/superprompt/core.py#L47-L101
248,497
ddorn/superprompt
superprompt/core.py
prompt_file
def prompt_file(prompt, default=None, must_exist=True, is_dir=False, show_default=True, prompt_suffix=': ', color=None): """ Prompt a filename using using glob for autocompetion. If must_exist is True (default) then you can be sure that the value returned is an existing filename or directory name. If is_dir is True, this will show only the directories for the completion. """ if must_exist: while True: r = prompt_autocomplete(prompt, path_complete(is_dir), default, show_default=show_default, prompt_suffix=prompt_suffix, color=color) if os.path.exists(r): break print('This path does not exist.') else: r = prompt_autocomplete(prompt, path_complete(is_dir), default, show_default=show_default, prompt_suffix=prompt_suffix, color=color) return r
python
def prompt_file(prompt, default=None, must_exist=True, is_dir=False, show_default=True, prompt_suffix=': ', color=None): """ Prompt a filename using using glob for autocompetion. If must_exist is True (default) then you can be sure that the value returned is an existing filename or directory name. If is_dir is True, this will show only the directories for the completion. """ if must_exist: while True: r = prompt_autocomplete(prompt, path_complete(is_dir), default, show_default=show_default, prompt_suffix=prompt_suffix, color=color) if os.path.exists(r): break print('This path does not exist.') else: r = prompt_autocomplete(prompt, path_complete(is_dir), default, show_default=show_default, prompt_suffix=prompt_suffix, color=color) return r
[ "def", "prompt_file", "(", "prompt", ",", "default", "=", "None", ",", "must_exist", "=", "True", ",", "is_dir", "=", "False", ",", "show_default", "=", "True", ",", "prompt_suffix", "=", "': '", ",", "color", "=", "None", ")", ":", "if", "must_exist", ":", "while", "True", ":", "r", "=", "prompt_autocomplete", "(", "prompt", ",", "path_complete", "(", "is_dir", ")", ",", "default", ",", "show_default", "=", "show_default", ",", "prompt_suffix", "=", "prompt_suffix", ",", "color", "=", "color", ")", "if", "os", ".", "path", ".", "exists", "(", "r", ")", ":", "break", "print", "(", "'This path does not exist.'", ")", "else", ":", "r", "=", "prompt_autocomplete", "(", "prompt", ",", "path_complete", "(", "is_dir", ")", ",", "default", ",", "show_default", "=", "show_default", ",", "prompt_suffix", "=", "prompt_suffix", ",", "color", "=", "color", ")", "return", "r" ]
Prompt a filename using using glob for autocompetion. If must_exist is True (default) then you can be sure that the value returned is an existing filename or directory name. If is_dir is True, this will show only the directories for the completion.
[ "Prompt", "a", "filename", "using", "using", "glob", "for", "autocompetion", "." ]
f2ee13a71c0523663ca1740738b545e2ab1eab20
https://github.com/ddorn/superprompt/blob/f2ee13a71c0523663ca1740738b545e2ab1eab20/superprompt/core.py#L104-L125
248,498
ddorn/superprompt
superprompt/core.py
prompt_choice
def prompt_choice(prompt, possibilities, default=None, only_in_poss=True, show_default=True, prompt_suffix=': ', color=None): """ Prompt for a string in a given range of possibilities. This also sets the history to the list of possibilities so the user can scroll is with the arrow to find what he wants, If only_in_poss is False, you are not guaranteed that this will return one of the possibilities. """ assert len(possibilities) >= 1 assert not only_in_poss or default is None or default in possibilities, '$s not in possibilities' % default contains_spaces = any(' ' in poss for poss in possibilities) possibilities = sorted(possibilities) readline.clear_history() for kw in possibilities: readline.add_history(kw) def complete(text): return [t for t in possibilities if t.startswith(text)] while 1: r = prompt_autocomplete(prompt, complete, default, contains_spaces=contains_spaces, show_default=show_default, prompt_suffix=prompt_suffix, color=color) if not only_in_poss or r in possibilities: break print('%s is not a possibility.' % r) readline.clear_history() return r
python
def prompt_choice(prompt, possibilities, default=None, only_in_poss=True, show_default=True, prompt_suffix=': ', color=None): """ Prompt for a string in a given range of possibilities. This also sets the history to the list of possibilities so the user can scroll is with the arrow to find what he wants, If only_in_poss is False, you are not guaranteed that this will return one of the possibilities. """ assert len(possibilities) >= 1 assert not only_in_poss or default is None or default in possibilities, '$s not in possibilities' % default contains_spaces = any(' ' in poss for poss in possibilities) possibilities = sorted(possibilities) readline.clear_history() for kw in possibilities: readline.add_history(kw) def complete(text): return [t for t in possibilities if t.startswith(text)] while 1: r = prompt_autocomplete(prompt, complete, default, contains_spaces=contains_spaces, show_default=show_default, prompt_suffix=prompt_suffix, color=color) if not only_in_poss or r in possibilities: break print('%s is not a possibility.' % r) readline.clear_history() return r
[ "def", "prompt_choice", "(", "prompt", ",", "possibilities", ",", "default", "=", "None", ",", "only_in_poss", "=", "True", ",", "show_default", "=", "True", ",", "prompt_suffix", "=", "': '", ",", "color", "=", "None", ")", ":", "assert", "len", "(", "possibilities", ")", ">=", "1", "assert", "not", "only_in_poss", "or", "default", "is", "None", "or", "default", "in", "possibilities", ",", "'$s not in possibilities'", "%", "default", "contains_spaces", "=", "any", "(", "' '", "in", "poss", "for", "poss", "in", "possibilities", ")", "possibilities", "=", "sorted", "(", "possibilities", ")", "readline", ".", "clear_history", "(", ")", "for", "kw", "in", "possibilities", ":", "readline", ".", "add_history", "(", "kw", ")", "def", "complete", "(", "text", ")", ":", "return", "[", "t", "for", "t", "in", "possibilities", "if", "t", ".", "startswith", "(", "text", ")", "]", "while", "1", ":", "r", "=", "prompt_autocomplete", "(", "prompt", ",", "complete", ",", "default", ",", "contains_spaces", "=", "contains_spaces", ",", "show_default", "=", "show_default", ",", "prompt_suffix", "=", "prompt_suffix", ",", "color", "=", "color", ")", "if", "not", "only_in_poss", "or", "r", "in", "possibilities", ":", "break", "print", "(", "'%s is not a possibility.'", "%", "r", ")", "readline", ".", "clear_history", "(", ")", "return", "r" ]
Prompt for a string in a given range of possibilities. This also sets the history to the list of possibilities so the user can scroll is with the arrow to find what he wants, If only_in_poss is False, you are not guaranteed that this will return one of the possibilities.
[ "Prompt", "for", "a", "string", "in", "a", "given", "range", "of", "possibilities", "." ]
f2ee13a71c0523663ca1740738b545e2ab1eab20
https://github.com/ddorn/superprompt/blob/f2ee13a71c0523663ca1740738b545e2ab1eab20/superprompt/core.py#L128-L162
248,499
ryanjdillon/pyotelem
pyotelem/plots/plotglides.py
plot_glide_depths
def plot_glide_depths(depths, mask_tag_filt): '''Plot depth at glides Args ---- depths: ndarray Depth values at each sensor sampling mask_tag_filt: ndarray Boolean mask to slice filtered sub-glides from tag data ''' import numpy from . import plotutils fig, ax = plt.subplots() ax = plotutils.plot_noncontiguous(ax, depths, numpy.where(mask_tag_filt)[0]) ax.invert_yaxis() plt.show() return None
python
def plot_glide_depths(depths, mask_tag_filt): '''Plot depth at glides Args ---- depths: ndarray Depth values at each sensor sampling mask_tag_filt: ndarray Boolean mask to slice filtered sub-glides from tag data ''' import numpy from . import plotutils fig, ax = plt.subplots() ax = plotutils.plot_noncontiguous(ax, depths, numpy.where(mask_tag_filt)[0]) ax.invert_yaxis() plt.show() return None
[ "def", "plot_glide_depths", "(", "depths", ",", "mask_tag_filt", ")", ":", "import", "numpy", "from", ".", "import", "plotutils", "fig", ",", "ax", "=", "plt", ".", "subplots", "(", ")", "ax", "=", "plotutils", ".", "plot_noncontiguous", "(", "ax", ",", "depths", ",", "numpy", ".", "where", "(", "mask_tag_filt", ")", "[", "0", "]", ")", "ax", ".", "invert_yaxis", "(", ")", "plt", ".", "show", "(", ")", "return", "None" ]
Plot depth at glides Args ---- depths: ndarray Depth values at each sensor sampling mask_tag_filt: ndarray Boolean mask to slice filtered sub-glides from tag data
[ "Plot", "depth", "at", "glides" ]
816563a9c3feb3fa416f1c2921c6b75db34111ad
https://github.com/ryanjdillon/pyotelem/blob/816563a9c3feb3fa416f1c2921c6b75db34111ad/pyotelem/plots/plotglides.py#L9-L30